Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,18 b'' | |||
|
1 | diff -rup Beaker-1.9.1-orig/beaker/session.py Beaker-1.9.1/beaker/session.py | |
|
2 | --- Beaker-1.9.1-orig/beaker/session.py 2020-04-10 10:23:04.000000000 +0200 | |
|
3 | +++ Beaker-1.9.1/beaker/session.py 2020-04-10 10:23:34.000000000 +0200 | |
|
4 | @@ -156,6 +156,14 @@ def __init__(self, request, id=None, invalidate_corrupt=False, | |
|
5 | if timeout and not save_accessed_time: | |
|
6 | raise BeakerException("timeout requires save_accessed_time") | |
|
7 | self.timeout = timeout | |
|
8 | + # We want to pass timeout param to redis backend to support expiration of keys | |
|
9 | + # In future, I believe, we can use this param for memcached and mongo as well | |
|
10 | + if self.timeout is not None and self.type == 'ext:redis': | |
|
11 | + # The backend expiration should always be a bit longer (I decied to use 2 minutes) than the | |
|
12 | + # session expiration itself to prevent the case where the backend data expires while | |
|
13 | + # the session is being read (PR#153) | |
|
14 | + self.namespace_args['timeout'] = self.timeout + 60 * 2 | |
|
15 | + | |
|
16 | self.save_atime = save_accessed_time | |
|
17 | self.use_cookies = use_cookies | |
|
18 | self.cookie_expires = cookie_expires No newline at end of file |
@@ -0,0 +1,26 b'' | |||
|
1 | diff -rup Beaker-1.9.1-orig/beaker/ext/redisnm.py Beaker-1.9.1/beaker/ext/redisnm.py | |
|
2 | --- Beaker-1.9.1-orig/beaker/ext/redisnm.py 2018-04-10 10:23:04.000000000 +0200 | |
|
3 | +++ Beaker-1.9.1/beaker/ext/redisnm.py 2018-04-10 10:23:34.000000000 +0200 | |
|
4 | @@ -30,9 +30,10 @@ class RedisNamespaceManager(NamespaceManager): | |
|
5 | ||
|
6 | clients = SyncDict() | |
|
7 | ||
|
8 | - def __init__(self, namespace, url, **kw): | |
|
9 | + def __init__(self, namespace, url, timeout=None, **kw): | |
|
10 | super(RedisNamespaceManager, self).__init__(namespace) | |
|
11 | self.lock_dir = None # Redis uses redis itself for locking. | |
|
12 | + self.timeout = timeout | |
|
13 | ||
|
14 | if redis is None: | |
|
15 | raise RuntimeError('redis is not available') | |
|
16 | @@ -68,6 +69,8 @@ def has_key(self, key): | |
|
17 | ||
|
18 | def set_value(self, key, value, expiretime=None): | |
|
19 | value = pickle.dumps(value) | |
|
20 | + if expiretime is None and self.timeout is not None: | |
|
21 | + expiretime = self.timeout | |
|
22 | if expiretime is not None: | |
|
23 | self.client.setex(self._format_key(key), int(expiretime), value) | |
|
24 | else: | |
|
25 | ||
|
26 |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,6 +1,5 b'' | |||
|
1 | 1 | [bumpversion] |
|
2 |
current_version = 4.2 |
|
|
2 | current_version = 4.21.0 | |
|
3 | 3 | message = release: Bump version {current_version} to {new_version} |
|
4 | 4 | |
|
5 | 5 | [bumpversion:file:rhodecode/VERSION] |
|
6 |
@@ -1,33 +1,28 b'' | |||
|
1 | 1 | [DEFAULT] |
|
2 | 2 | done = false |
|
3 | 3 | |
|
4 | 4 | [task:bump_version] |
|
5 | 5 | done = true |
|
6 | 6 | |
|
7 | 7 | [task:rc_tools_pinned] |
|
8 | done = true | |
|
9 | 8 | |
|
10 | 9 | [task:fixes_on_stable] |
|
11 | done = true | |
|
12 | 10 | |
|
13 | 11 | [task:pip2nix_generated] |
|
14 | done = true | |
|
15 | 12 | |
|
16 | 13 | [task:changelog_updated] |
|
17 | done = true | |
|
18 | 14 | |
|
19 | 15 | [task:generate_api_docs] |
|
20 | done = true | |
|
16 | ||
|
17 | [task:updated_translation] | |
|
21 | 18 | |
|
22 | 19 | [release] |
|
23 |
state = |
|
|
24 |
version = 4.2 |
|
|
25 | ||
|
26 | [task:updated_translation] | |
|
20 | state = in_progress | |
|
21 | version = 4.21.0 | |
|
27 | 22 | |
|
28 | 23 | [task:generate_js_routes] |
|
29 | 24 | |
|
30 | 25 | [task:updated_trial_license] |
|
31 | 26 | |
|
32 | 27 | [task:generate_oss_licenses] |
|
33 | 28 |
@@ -1,193 +1,194 b'' | |||
|
1 | 1 | Nginx Configuration Example |
|
2 | 2 | --------------------------- |
|
3 | 3 | |
|
4 | 4 | Use the following example to configure Nginx as a your web server. |
|
5 | 5 | |
|
6 | 6 | |
|
7 | 7 | .. code-block:: nginx |
|
8 | 8 | |
|
9 | 9 | ## Rate limiter for certain pages to prevent brute force attacks |
|
10 | 10 | limit_req_zone $binary_remote_addr zone=req_limit:10m rate=1r/s; |
|
11 | 11 | |
|
12 | 12 | ## cache zone |
|
13 | 13 | proxy_cache_path /etc/nginx/nginx_cache levels=1:2 use_temp_path=off keys_zone=cache_zone:10m inactive=720h max_size=10g; |
|
14 | 14 | |
|
15 | 15 | ## Custom log format |
|
16 | 16 | log_format log_custom '$remote_addr - $remote_user [$time_local] ' |
|
17 | 17 | '"$request" $status $body_bytes_sent ' |
|
18 | 18 | '"$http_referer" "$http_user_agent" ' |
|
19 | 19 | '$request_time $upstream_response_time $pipe'; |
|
20 | 20 | |
|
21 | 21 | ## Define one or more upstreams (local RhodeCode instance) to connect to |
|
22 | 22 | upstream rc { |
|
23 | 23 | # Url to running RhodeCode instance. |
|
24 | 24 | # This is shown as `- URL: <host>` in output from rccontrol status. |
|
25 | 25 | server 127.0.0.1:10002; |
|
26 | 26 | |
|
27 | 27 | # add more instances for load balancing |
|
28 | 28 | # server 127.0.0.1:10003; |
|
29 | 29 | # server 127.0.0.1:10004; |
|
30 | 30 | } |
|
31 | 31 | |
|
32 | 32 | ## HTTP to HTTPS rewrite |
|
33 | 33 | server { |
|
34 | 34 | listen 80; |
|
35 | 35 | server_name rhodecode.myserver.com; |
|
36 | 36 | |
|
37 | 37 | if ($http_host = rhodecode.myserver.com) { |
|
38 | 38 | rewrite (.*) https://rhodecode.myserver.com$1 permanent; |
|
39 | 39 | } |
|
40 | 40 | } |
|
41 | 41 | |
|
42 | 42 | ## Optional gist alias server, for serving nicer GIST urls. |
|
43 | 43 | server { |
|
44 | 44 | listen 443; |
|
45 | 45 | server_name gist.myserver.com; |
|
46 | 46 | access_log /var/log/nginx/gist.access.log log_custom; |
|
47 | 47 | error_log /var/log/nginx/gist.error.log; |
|
48 | 48 | |
|
49 | 49 | ssl on; |
|
50 | 50 | ssl_certificate gist.rhodecode.myserver.com.crt; |
|
51 | 51 | ssl_certificate_key gist.rhodecode.myserver.com.key; |
|
52 | 52 | |
|
53 | 53 | ssl_session_timeout 5m; |
|
54 | 54 | |
|
55 | 55 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; |
|
56 | 56 | ssl_prefer_server_ciphers on; |
|
57 | 57 | ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA'; |
|
58 | 58 | |
|
59 | 59 | ## Strict http prevents from https -> http downgrade |
|
60 | 60 | add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;"; |
|
61 | 61 | |
|
62 | 62 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits |
|
63 | 63 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; |
|
64 | 64 | |
|
65 | 65 | rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1; |
|
66 | 66 | rewrite (.*) https://rhodecode.myserver.com/_admin/gists; |
|
67 | 67 | } |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | ## MAIN SSL enabled server |
|
71 | 71 | server { |
|
72 | 72 | listen 443 ssl http2; |
|
73 | 73 | server_name rhodecode.myserver.com; |
|
74 | 74 | |
|
75 | 75 | access_log /var/log/nginx/rhodecode.access.log log_custom; |
|
76 | 76 | error_log /var/log/nginx/rhodecode.error.log; |
|
77 | 77 | |
|
78 | 78 | ssl_certificate rhodecode.myserver.com.crt; |
|
79 | 79 | ssl_certificate_key rhodecode.myserver.com.key; |
|
80 | 80 | |
|
81 | 81 | # enable session resumption to improve https performance |
|
82 | 82 | # http://vincent.bernat.im/en/blog/2011-ssl-session-reuse-rfc5077.html |
|
83 | 83 | ssl_session_cache shared:SSL:50m; |
|
84 | 84 | ssl_session_timeout 5m; |
|
85 | 85 | |
|
86 | 86 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits |
|
87 | 87 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; |
|
88 | 88 | |
|
89 | 89 | # enables server-side protection from BEAST attacks |
|
90 | 90 | # http://blog.ivanristic.com/2013/09/is-beast-still-a-threat.html |
|
91 | 91 | ssl_prefer_server_ciphers on; |
|
92 | 92 | |
|
93 | 93 | # disable SSLv3(enabled by default since nginx 0.8.19) since it's less secure then TLS http://en.wikipedia.org/wiki/Secure_Sockets_Layer#SSL_3.0 |
|
94 | 94 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; |
|
95 | 95 | |
|
96 | 96 | # ciphers chosen for forward secrecy and compatibility |
|
97 | 97 | # http://blog.ivanristic.com/2013/08/configuring-apache-nginx-and-openssl-for-forward-secrecy.html |
|
98 | 98 | ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4"; |
|
99 | 99 | |
|
100 | 100 | client_body_buffer_size 128k; |
|
101 | 101 | # maximum number and size of buffers for large headers to read from client request |
|
102 | 102 | large_client_header_buffers 16 256k; |
|
103 | 103 | |
|
104 | 104 | ## uncomment to serve static files by Nginx, recommended for performance |
|
105 | 105 | # location /_static/rhodecode { |
|
106 | 106 | # gzip on; |
|
107 | 107 | # gzip_min_length 500; |
|
108 | 108 | # gzip_proxied any; |
|
109 | 109 | # gzip_comp_level 4; |
|
110 | 110 | # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml; |
|
111 | 111 | # gzip_vary on; |
|
112 | 112 | # gzip_disable "msie6"; |
|
113 | 113 | # expires 60d; |
|
114 | 114 | # alias /path/to/.rccontrol/community-1/static; |
|
115 | 115 | # alias /path/to/.rccontrol/enterprise-1/static; |
|
116 | 116 | # } |
|
117 | 117 | |
|
118 | 118 | ## channelstream location handler, if channelstream live chat and notifications |
|
119 | 119 | ## are enable this will proxy the requests to channelstream websocket server |
|
120 | 120 | location /_channelstream { |
|
121 | 121 | rewrite /_channelstream/(.*) /$1 break; |
|
122 | 122 | gzip off; |
|
123 | 123 | tcp_nodelay off; |
|
124 | 124 | |
|
125 | 125 | proxy_connect_timeout 10; |
|
126 | 126 | proxy_send_timeout 10m; |
|
127 | 127 | proxy_read_timeout 10m; |
|
128 | 128 | |
|
129 | 129 | proxy_set_header Host $host; |
|
130 | 130 | proxy_set_header X-Real-IP $remote_addr; |
|
131 | 131 | proxy_set_header X-Url-Scheme $scheme; |
|
132 | 132 | proxy_set_header X-Forwarded-Proto $scheme; |
|
133 | 133 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; |
|
134 | 134 | |
|
135 | 135 | proxy_http_version 1.1; |
|
136 | 136 | proxy_set_header Upgrade $http_upgrade; |
|
137 | 137 | proxy_set_header Connection "upgrade"; |
|
138 | 138 | |
|
139 | 139 | proxy_pass http://127.0.0.1:9800; |
|
140 | 140 | } |
|
141 | 141 | |
|
142 | 142 | ## rate limit this endpoint to prevent login page brute-force attacks |
|
143 | 143 | location /_admin/login { |
|
144 | 144 | limit_req zone=req_limit burst=10 nodelay; |
|
145 | 145 | try_files $uri @rhodecode_http; |
|
146 | 146 | } |
|
147 | 147 | |
|
148 | 148 | ## Special Cache for file store, make sure you enable this intentionally as |
|
149 | 149 | ## it could bypass upload files permissions |
|
150 | # location /_file_store/download { | |
|
150 | # location /_file_store/download/gravatars { | |
|
151 | 151 | # |
|
152 | 152 | # proxy_cache cache_zone; |
|
153 | 153 | # # ignore Set-Cookie |
|
154 | 154 | # proxy_ignore_headers Set-Cookie; |
|
155 |
# |
|
|
155 | # # ignore cache-control | |
|
156 | # proxy_ignore_headers Cache-Control; | |
|
156 | 157 | # |
|
157 | 158 | # proxy_cache_key $host$uri$is_args$args; |
|
158 | 159 | # proxy_cache_methods GET; |
|
159 | 160 | # |
|
160 | 161 | # proxy_cache_bypass $http_cache_control; |
|
161 | 162 | # proxy_cache_valid 200 302 720h; |
|
162 | 163 | # |
|
163 | 164 | # proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504; |
|
164 | 165 | # |
|
165 | 166 | # # returns cache status in headers |
|
166 | 167 | # add_header X-Proxy-Cache $upstream_cache_status; |
|
167 | 168 | # add_header Cache-Control "public"; |
|
168 | 169 | # |
|
169 | 170 | # proxy_cache_lock on; |
|
170 | 171 | # proxy_cache_lock_age 5m; |
|
171 | 172 | # |
|
172 | 173 | # proxy_pass http://rc; |
|
173 | 174 | # |
|
174 | 175 | # } |
|
175 | 176 | |
|
176 | 177 | location / { |
|
177 | 178 | try_files $uri @rhodecode_http; |
|
178 | 179 | } |
|
179 | 180 | |
|
180 | 181 | location @rhodecode_http { |
|
181 | 182 | # example of proxy.conf can be found in our docs. |
|
182 | 183 | include /etc/nginx/proxy.conf; |
|
183 | 184 | proxy_pass http://rc; |
|
184 | 185 | } |
|
185 | 186 | |
|
186 | 187 | ## Custom 502 error page. |
|
187 | 188 | ## Will be displayed while RhodeCode server is turned off |
|
188 | 189 | error_page 502 /502.html; |
|
189 | 190 | location = /502.html { |
|
190 | 191 | #root /path/to/.rccontrol/community-1/static; |
|
191 | 192 | root /path/to/.rccontrol/enterprise-1/static; |
|
192 | 193 | } |
|
193 | 194 | } No newline at end of file |
@@ -1,279 +1,281 b'' | |||
|
1 | 1 | # Overrides for the generated python-packages.nix |
|
2 | 2 | # |
|
3 | 3 | # This function is intended to be used as an extension to the generated file |
|
4 | 4 | # python-packages.nix. The main objective is to add needed dependencies of C |
|
5 | 5 | # libraries and tweak the build instructions where needed. |
|
6 | 6 | |
|
7 | 7 | { pkgs |
|
8 | 8 | , basePythonPackages |
|
9 | 9 | }: |
|
10 | 10 | |
|
11 | 11 | let |
|
12 | 12 | sed = "sed -i"; |
|
13 | 13 | |
|
14 | 14 | localLicenses = { |
|
15 | 15 | repoze = { |
|
16 | 16 | fullName = "Repoze License"; |
|
17 | 17 | url = http://www.repoze.org/LICENSE.txt; |
|
18 | 18 | }; |
|
19 | 19 | }; |
|
20 | 20 | |
|
21 | 21 | in |
|
22 | 22 | |
|
23 | 23 | self: super: { |
|
24 | 24 | |
|
25 | 25 | "appenlight-client" = super."appenlight-client".override (attrs: { |
|
26 | 26 | meta = { |
|
27 | 27 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
28 | 28 | }; |
|
29 | 29 | }); |
|
30 | 30 | |
|
31 | 31 | "beaker" = super."beaker".override (attrs: { |
|
32 | 32 | patches = [ |
|
33 | 33 | ./patches/beaker/patch-beaker-lock-func-debug.diff |
|
34 | 34 | ./patches/beaker/patch-beaker-metadata-reuse.diff |
|
35 | ./patches/beaker/patch-beaker-improved-redis.diff | |
|
36 | ./patches/beaker/patch-beaker-improved-redis-2.diff | |
|
35 | 37 | ]; |
|
36 | 38 | }); |
|
37 | 39 | |
|
38 | 40 | "cffi" = super."cffi".override (attrs: { |
|
39 | 41 | buildInputs = [ |
|
40 | 42 | pkgs.libffi |
|
41 | 43 | ]; |
|
42 | 44 | }); |
|
43 | 45 | |
|
44 | 46 | "cryptography" = super."cryptography".override (attrs: { |
|
45 | 47 | buildInputs = [ |
|
46 | 48 | pkgs.openssl |
|
47 | 49 | ]; |
|
48 | 50 | }); |
|
49 | 51 | |
|
50 | 52 | "gevent" = super."gevent".override (attrs: { |
|
51 | 53 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
52 | 54 | # NOTE: (marcink) odd requirements from gevent aren't set properly, |
|
53 | 55 | # thus we need to inject psutil manually |
|
54 | 56 | self."psutil" |
|
55 | 57 | ]; |
|
56 | 58 | }); |
|
57 | 59 | |
|
58 | 60 | "future" = super."future".override (attrs: { |
|
59 | 61 | meta = { |
|
60 | 62 | license = [ pkgs.lib.licenses.mit ]; |
|
61 | 63 | }; |
|
62 | 64 | }); |
|
63 | 65 | |
|
64 | 66 | "testpath" = super."testpath".override (attrs: { |
|
65 | 67 | meta = { |
|
66 | 68 | license = [ pkgs.lib.licenses.mit ]; |
|
67 | 69 | }; |
|
68 | 70 | }); |
|
69 | 71 | |
|
70 | 72 | "gnureadline" = super."gnureadline".override (attrs: { |
|
71 | 73 | buildInputs = [ |
|
72 | 74 | pkgs.ncurses |
|
73 | 75 | ]; |
|
74 | 76 | patchPhase = '' |
|
75 | 77 | substituteInPlace setup.py --replace "/bin/bash" "${pkgs.bash}/bin/bash" |
|
76 | 78 | ''; |
|
77 | 79 | }); |
|
78 | 80 | |
|
79 | 81 | "gunicorn" = super."gunicorn".override (attrs: { |
|
80 | 82 | propagatedBuildInputs = [ |
|
81 | 83 | # johbo: futures is needed as long as we are on Python 2, otherwise |
|
82 | 84 | # gunicorn explodes if used with multiple threads per worker. |
|
83 | 85 | self."futures" |
|
84 | 86 | ]; |
|
85 | 87 | }); |
|
86 | 88 | |
|
87 | 89 | "nbconvert" = super."nbconvert".override (attrs: { |
|
88 | 90 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
89 | 91 | # marcink: plug in jupyter-client for notebook rendering |
|
90 | 92 | self."jupyter-client" |
|
91 | 93 | ]; |
|
92 | 94 | }); |
|
93 | 95 | |
|
94 | 96 | "ipython" = super."ipython".override (attrs: { |
|
95 | 97 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
96 | 98 | self."gnureadline" |
|
97 | 99 | ]; |
|
98 | 100 | }); |
|
99 | 101 | |
|
100 | 102 | "lxml" = super."lxml".override (attrs: { |
|
101 | 103 | buildInputs = [ |
|
102 | 104 | pkgs.libxml2 |
|
103 | 105 | pkgs.libxslt |
|
104 | 106 | ]; |
|
105 | 107 | propagatedBuildInputs = [ |
|
106 | 108 | # Needed, so that "setup.py bdist_wheel" does work |
|
107 | 109 | self."wheel" |
|
108 | 110 | ]; |
|
109 | 111 | }); |
|
110 | 112 | |
|
111 | 113 | "mysql-python" = super."mysql-python".override (attrs: { |
|
112 | 114 | buildInputs = [ |
|
113 | 115 | pkgs.openssl |
|
114 | 116 | ]; |
|
115 | 117 | propagatedBuildInputs = [ |
|
116 | 118 | pkgs.libmysql |
|
117 | 119 | pkgs.zlib |
|
118 | 120 | ]; |
|
119 | 121 | }); |
|
120 | 122 | |
|
121 | 123 | "psycopg2" = super."psycopg2".override (attrs: { |
|
122 | 124 | propagatedBuildInputs = [ |
|
123 | 125 | pkgs.postgresql |
|
124 | 126 | ]; |
|
125 | 127 | meta = { |
|
126 | 128 | license = pkgs.lib.licenses.lgpl3Plus; |
|
127 | 129 | }; |
|
128 | 130 | }); |
|
129 | 131 | |
|
130 | 132 | "pycurl" = super."pycurl".override (attrs: { |
|
131 | 133 | propagatedBuildInputs = [ |
|
132 | 134 | pkgs.curl |
|
133 | 135 | pkgs.openssl |
|
134 | 136 | ]; |
|
135 | 137 | |
|
136 | 138 | preConfigure = '' |
|
137 | 139 | substituteInPlace setup.py --replace '--static-libs' '--libs' |
|
138 | 140 | export PYCURL_SSL_LIBRARY=openssl |
|
139 | 141 | ''; |
|
140 | 142 | |
|
141 | 143 | meta = { |
|
142 | 144 | license = pkgs.lib.licenses.mit; |
|
143 | 145 | }; |
|
144 | 146 | }); |
|
145 | 147 | |
|
146 | 148 | "pyramid" = super."pyramid".override (attrs: { |
|
147 | 149 | meta = { |
|
148 | 150 | license = localLicenses.repoze; |
|
149 | 151 | }; |
|
150 | 152 | }); |
|
151 | 153 | |
|
152 | 154 | "pyramid-debugtoolbar" = super."pyramid-debugtoolbar".override (attrs: { |
|
153 | 155 | meta = { |
|
154 | 156 | license = [ pkgs.lib.licenses.bsdOriginal localLicenses.repoze ]; |
|
155 | 157 | }; |
|
156 | 158 | }); |
|
157 | 159 | |
|
158 | 160 | "pysqlite" = super."pysqlite".override (attrs: { |
|
159 | 161 | propagatedBuildInputs = [ |
|
160 | 162 | pkgs.sqlite |
|
161 | 163 | ]; |
|
162 | 164 | meta = { |
|
163 | 165 | license = [ pkgs.lib.licenses.zlib pkgs.lib.licenses.libpng ]; |
|
164 | 166 | }; |
|
165 | 167 | }); |
|
166 | 168 | |
|
167 | 169 | "python-ldap" = super."python-ldap".override (attrs: { |
|
168 | 170 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
169 | 171 | pkgs.openldap |
|
170 | 172 | pkgs.cyrus_sasl |
|
171 | 173 | pkgs.openssl |
|
172 | 174 | ]; |
|
173 | 175 | }); |
|
174 | 176 | |
|
175 | 177 | "python-pam" = super."python-pam".override (attrs: { |
|
176 | 178 | propagatedBuildInputs = [ |
|
177 | 179 | pkgs.pam |
|
178 | 180 | ]; |
|
179 | 181 | |
|
180 | 182 | # TODO: johbo: Check if this can be avoided, or transform into |
|
181 | 183 | # a real patch |
|
182 | 184 | patchPhase = '' |
|
183 | 185 | substituteInPlace pam.py \ |
|
184 | 186 | --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"' |
|
185 | 187 | ''; |
|
186 | 188 | |
|
187 | 189 | }); |
|
188 | 190 | |
|
189 | 191 | "python-saml" = super."python-saml".override (attrs: { |
|
190 | 192 | buildInputs = [ |
|
191 | 193 | pkgs.libxml2 |
|
192 | 194 | pkgs.libxslt |
|
193 | 195 | ]; |
|
194 | 196 | }); |
|
195 | 197 | |
|
196 | 198 | "dm.xmlsec.binding" = super."dm.xmlsec.binding".override (attrs: { |
|
197 | 199 | buildInputs = [ |
|
198 | 200 | pkgs.libxml2 |
|
199 | 201 | pkgs.libxslt |
|
200 | 202 | pkgs.xmlsec |
|
201 | 203 | pkgs.libtool |
|
202 | 204 | ]; |
|
203 | 205 | }); |
|
204 | 206 | |
|
205 | 207 | "pyzmq" = super."pyzmq".override (attrs: { |
|
206 | 208 | buildInputs = [ |
|
207 | 209 | pkgs.czmq |
|
208 | 210 | ]; |
|
209 | 211 | }); |
|
210 | 212 | |
|
211 | 213 | "urlobject" = super."urlobject".override (attrs: { |
|
212 | 214 | meta = { |
|
213 | 215 | license = { |
|
214 | 216 | spdxId = "Unlicense"; |
|
215 | 217 | fullName = "The Unlicense"; |
|
216 | 218 | url = http://unlicense.org/; |
|
217 | 219 | }; |
|
218 | 220 | }; |
|
219 | 221 | }); |
|
220 | 222 | |
|
221 | 223 | "docutils" = super."docutils".override (attrs: { |
|
222 | 224 | meta = { |
|
223 | 225 | license = pkgs.lib.licenses.bsd2; |
|
224 | 226 | }; |
|
225 | 227 | }); |
|
226 | 228 | |
|
227 | 229 | "colander" = super."colander".override (attrs: { |
|
228 | 230 | meta = { |
|
229 | 231 | license = localLicenses.repoze; |
|
230 | 232 | }; |
|
231 | 233 | }); |
|
232 | 234 | |
|
233 | 235 | "pyramid-beaker" = super."pyramid-beaker".override (attrs: { |
|
234 | 236 | meta = { |
|
235 | 237 | license = localLicenses.repoze; |
|
236 | 238 | }; |
|
237 | 239 | }); |
|
238 | 240 | |
|
239 | 241 | "pyramid-mako" = super."pyramid-mako".override (attrs: { |
|
240 | 242 | meta = { |
|
241 | 243 | license = localLicenses.repoze; |
|
242 | 244 | }; |
|
243 | 245 | }); |
|
244 | 246 | |
|
245 | 247 | "repoze.lru" = super."repoze.lru".override (attrs: { |
|
246 | 248 | meta = { |
|
247 | 249 | license = localLicenses.repoze; |
|
248 | 250 | }; |
|
249 | 251 | }); |
|
250 | 252 | |
|
251 | 253 | "python-editor" = super."python-editor".override (attrs: { |
|
252 | 254 | meta = { |
|
253 | 255 | license = pkgs.lib.licenses.asl20; |
|
254 | 256 | }; |
|
255 | 257 | }); |
|
256 | 258 | |
|
257 | 259 | "translationstring" = super."translationstring".override (attrs: { |
|
258 | 260 | meta = { |
|
259 | 261 | license = localLicenses.repoze; |
|
260 | 262 | }; |
|
261 | 263 | }); |
|
262 | 264 | |
|
263 | 265 | "venusian" = super."venusian".override (attrs: { |
|
264 | 266 | meta = { |
|
265 | 267 | license = localLicenses.repoze; |
|
266 | 268 | }; |
|
267 | 269 | }); |
|
268 | 270 | |
|
269 | 271 | "supervisor" = super."supervisor".override (attrs: { |
|
270 | 272 | patches = [ |
|
271 | 273 | ./patches/supervisor/patch-rlimits-old-kernel.diff |
|
272 | 274 | ]; |
|
273 | 275 | }); |
|
274 | 276 | |
|
275 | 277 | # Avoid that base packages screw up the build process |
|
276 | 278 | inherit (basePythonPackages) |
|
277 | 279 | setuptools; |
|
278 | 280 | |
|
279 | 281 | } |
@@ -1,2444 +1,2497 b'' | |||
|
1 | 1 | # Generated by pip2nix 0.8.0.dev1 |
|
2 | 2 | # See https://github.com/johbo/pip2nix |
|
3 | 3 | |
|
4 | 4 | { pkgs, fetchurl, fetchgit, fetchhg }: |
|
5 | 5 | |
|
6 | 6 | self: super: { |
|
7 | 7 | "alembic" = super.buildPythonPackage { |
|
8 | 8 | name = "alembic-1.4.2"; |
|
9 | 9 | doCheck = false; |
|
10 | 10 | propagatedBuildInputs = [ |
|
11 | 11 | self."sqlalchemy" |
|
12 | 12 | self."mako" |
|
13 | 13 | self."python-editor" |
|
14 | 14 | self."python-dateutil" |
|
15 | 15 | ]; |
|
16 | 16 | src = fetchurl { |
|
17 | 17 | url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz"; |
|
18 | 18 | sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3"; |
|
19 | 19 | }; |
|
20 | 20 | meta = { |
|
21 | 21 | license = [ pkgs.lib.licenses.mit ]; |
|
22 | 22 | }; |
|
23 | 23 | }; |
|
24 | 24 | "amqp" = super.buildPythonPackage { |
|
25 | 25 | name = "amqp-2.5.2"; |
|
26 | 26 | doCheck = false; |
|
27 | 27 | propagatedBuildInputs = [ |
|
28 | 28 | self."vine" |
|
29 | 29 | ]; |
|
30 | 30 | src = fetchurl { |
|
31 | 31 | url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz"; |
|
32 | 32 | sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp"; |
|
33 | 33 | }; |
|
34 | 34 | meta = { |
|
35 | 35 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
36 | 36 | }; |
|
37 | 37 | }; |
|
38 | "apispec" = super.buildPythonPackage { | |
|
39 | name = "apispec-1.0.0"; | |
|
40 | doCheck = false; | |
|
41 | propagatedBuildInputs = [ | |
|
42 | self."PyYAML" | |
|
43 | ]; | |
|
44 | src = fetchurl { | |
|
45 | url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz"; | |
|
46 | sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp"; | |
|
47 | }; | |
|
48 | meta = { | |
|
49 | license = [ pkgs.lib.licenses.mit ]; | |
|
50 | }; | |
|
51 | }; | |
|
38 | 52 | "appenlight-client" = super.buildPythonPackage { |
|
39 | 53 | name = "appenlight-client-0.6.26"; |
|
40 | 54 | doCheck = false; |
|
41 | 55 | propagatedBuildInputs = [ |
|
42 | 56 | self."webob" |
|
43 | 57 | self."requests" |
|
44 | 58 | self."six" |
|
45 | 59 | ]; |
|
46 | 60 | src = fetchurl { |
|
47 | 61 | url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz"; |
|
48 | 62 | sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712"; |
|
49 | 63 | }; |
|
50 | 64 | meta = { |
|
51 | 65 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
52 | 66 | }; |
|
53 | 67 | }; |
|
54 | 68 | "asn1crypto" = super.buildPythonPackage { |
|
55 | 69 | name = "asn1crypto-0.24.0"; |
|
56 | 70 | doCheck = false; |
|
57 | 71 | src = fetchurl { |
|
58 | 72 | url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"; |
|
59 | 73 | sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x"; |
|
60 | 74 | }; |
|
61 | 75 | meta = { |
|
62 | 76 | license = [ pkgs.lib.licenses.mit ]; |
|
63 | 77 | }; |
|
64 | 78 | }; |
|
65 | 79 | "atomicwrites" = super.buildPythonPackage { |
|
66 | 80 | name = "atomicwrites-1.3.0"; |
|
67 | 81 | doCheck = false; |
|
68 | 82 | src = fetchurl { |
|
69 | 83 | url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz"; |
|
70 | 84 | sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm"; |
|
71 | 85 | }; |
|
72 | 86 | meta = { |
|
73 | 87 | license = [ pkgs.lib.licenses.mit ]; |
|
74 | 88 | }; |
|
75 | 89 | }; |
|
76 | 90 | "attrs" = super.buildPythonPackage { |
|
77 | 91 | name = "attrs-19.3.0"; |
|
78 | 92 | doCheck = false; |
|
79 | 93 | src = fetchurl { |
|
80 | 94 | url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz"; |
|
81 | 95 | sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp"; |
|
82 | 96 | }; |
|
83 | 97 | meta = { |
|
84 | 98 | license = [ pkgs.lib.licenses.mit ]; |
|
85 | 99 | }; |
|
86 | 100 | }; |
|
87 | 101 | "babel" = super.buildPythonPackage { |
|
88 | 102 | name = "babel-1.3"; |
|
89 | 103 | doCheck = false; |
|
90 | 104 | propagatedBuildInputs = [ |
|
91 | 105 | self."pytz" |
|
92 | 106 | ]; |
|
93 | 107 | src = fetchurl { |
|
94 | 108 | url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz"; |
|
95 | 109 | sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz"; |
|
96 | 110 | }; |
|
97 | 111 | meta = { |
|
98 | 112 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
99 | 113 | }; |
|
100 | 114 | }; |
|
101 | 115 | "backports.shutil-get-terminal-size" = super.buildPythonPackage { |
|
102 | 116 | name = "backports.shutil-get-terminal-size-1.0.0"; |
|
103 | 117 | doCheck = false; |
|
104 | 118 | src = fetchurl { |
|
105 | 119 | url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz"; |
|
106 | 120 | sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki"; |
|
107 | 121 | }; |
|
108 | 122 | meta = { |
|
109 | 123 | license = [ pkgs.lib.licenses.mit ]; |
|
110 | 124 | }; |
|
111 | 125 | }; |
|
112 | 126 | "beaker" = super.buildPythonPackage { |
|
113 | 127 | name = "beaker-1.9.1"; |
|
114 | 128 | doCheck = false; |
|
115 | 129 | propagatedBuildInputs = [ |
|
116 | 130 | self."funcsigs" |
|
117 | 131 | ]; |
|
118 | 132 | src = fetchurl { |
|
119 | 133 | url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz"; |
|
120 | 134 | sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj"; |
|
121 | 135 | }; |
|
122 | 136 | meta = { |
|
123 | 137 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
124 | 138 | }; |
|
125 | 139 | }; |
|
126 | 140 | "beautifulsoup4" = super.buildPythonPackage { |
|
127 | 141 | name = "beautifulsoup4-4.6.3"; |
|
128 | 142 | doCheck = false; |
|
129 | 143 | src = fetchurl { |
|
130 | 144 | url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz"; |
|
131 | 145 | sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h"; |
|
132 | 146 | }; |
|
133 | 147 | meta = { |
|
134 | 148 | license = [ pkgs.lib.licenses.mit ]; |
|
135 | 149 | }; |
|
136 | 150 | }; |
|
137 | 151 | "billiard" = super.buildPythonPackage { |
|
138 | 152 | name = "billiard-3.6.1.0"; |
|
139 | 153 | doCheck = false; |
|
140 | 154 | src = fetchurl { |
|
141 | 155 | url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz"; |
|
142 | 156 | sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q"; |
|
143 | 157 | }; |
|
144 | 158 | meta = { |
|
145 | 159 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
146 | 160 | }; |
|
147 | 161 | }; |
|
148 | 162 | "bleach" = super.buildPythonPackage { |
|
149 | 163 | name = "bleach-3.1.3"; |
|
150 | 164 | doCheck = false; |
|
151 | 165 | propagatedBuildInputs = [ |
|
152 | 166 | self."six" |
|
153 | 167 | self."webencodings" |
|
154 | 168 | ]; |
|
155 | 169 | src = fetchurl { |
|
156 | 170 | url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz"; |
|
157 | 171 | sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq"; |
|
158 | 172 | }; |
|
159 | 173 | meta = { |
|
160 | 174 | license = [ pkgs.lib.licenses.asl20 ]; |
|
161 | 175 | }; |
|
162 | 176 | }; |
|
163 | 177 | "bumpversion" = super.buildPythonPackage { |
|
164 | 178 | name = "bumpversion-0.5.3"; |
|
165 | 179 | doCheck = false; |
|
166 | 180 | src = fetchurl { |
|
167 | 181 | url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz"; |
|
168 | 182 | sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37"; |
|
169 | 183 | }; |
|
170 | 184 | meta = { |
|
171 | 185 | license = [ pkgs.lib.licenses.mit ]; |
|
172 | 186 | }; |
|
173 | 187 | }; |
|
174 | 188 | "cachetools" = super.buildPythonPackage { |
|
175 | 189 | name = "cachetools-3.1.1"; |
|
176 | 190 | doCheck = false; |
|
177 | 191 | src = fetchurl { |
|
178 | 192 | url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz"; |
|
179 | 193 | sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf"; |
|
180 | 194 | }; |
|
181 | 195 | meta = { |
|
182 | 196 | license = [ pkgs.lib.licenses.mit ]; |
|
183 | 197 | }; |
|
184 | 198 | }; |
|
185 | 199 | "celery" = super.buildPythonPackage { |
|
186 | 200 | name = "celery-4.3.0"; |
|
187 | 201 | doCheck = false; |
|
188 | 202 | propagatedBuildInputs = [ |
|
189 | 203 | self."pytz" |
|
190 | 204 | self."billiard" |
|
191 | 205 | self."kombu" |
|
192 | 206 | self."vine" |
|
193 | 207 | ]; |
|
194 | 208 | src = fetchurl { |
|
195 | 209 | url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz"; |
|
196 | 210 | sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac"; |
|
197 | 211 | }; |
|
198 | 212 | meta = { |
|
199 | 213 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
200 | 214 | }; |
|
201 | 215 | }; |
|
202 | 216 | "certifi" = super.buildPythonPackage { |
|
203 | 217 | name = "certifi-2020.4.5.1"; |
|
204 | 218 | doCheck = false; |
|
205 | 219 | src = fetchurl { |
|
206 | 220 | url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz"; |
|
207 | 221 | sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i"; |
|
208 | 222 | }; |
|
209 | 223 | meta = { |
|
210 | 224 | license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ]; |
|
211 | 225 | }; |
|
212 | 226 | }; |
|
213 | 227 | "cffi" = super.buildPythonPackage { |
|
214 | 228 | name = "cffi-1.12.3"; |
|
215 | 229 | doCheck = false; |
|
216 | 230 | propagatedBuildInputs = [ |
|
217 | 231 | self."pycparser" |
|
218 | 232 | ]; |
|
219 | 233 | src = fetchurl { |
|
220 | 234 | url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz"; |
|
221 | 235 | sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704"; |
|
222 | 236 | }; |
|
223 | 237 | meta = { |
|
224 | 238 | license = [ pkgs.lib.licenses.mit ]; |
|
225 | 239 | }; |
|
226 | 240 | }; |
|
227 | 241 | "chameleon" = super.buildPythonPackage { |
|
228 | 242 | name = "chameleon-2.24"; |
|
229 | 243 | doCheck = false; |
|
230 | 244 | src = fetchurl { |
|
231 | 245 | url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz"; |
|
232 | 246 | sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5"; |
|
233 | 247 | }; |
|
234 | 248 | meta = { |
|
235 | 249 | license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ]; |
|
236 | 250 | }; |
|
237 | 251 | }; |
|
238 | 252 | "channelstream" = super.buildPythonPackage { |
|
239 |
name = "channelstream-0. |
|
|
253 | name = "channelstream-0.6.14"; | |
|
240 | 254 | doCheck = false; |
|
241 | 255 | propagatedBuildInputs = [ |
|
242 | 256 | self."gevent" |
|
243 | 257 | self."ws4py" |
|
258 | self."marshmallow" | |
|
259 | self."python-dateutil" | |
|
244 | 260 | self."pyramid" |
|
245 | 261 | self."pyramid-jinja2" |
|
262 | self."pyramid-apispec" | |
|
246 | 263 | self."itsdangerous" |
|
247 | 264 | self."requests" |
|
248 | 265 | self."six" |
|
249 | 266 | ]; |
|
250 | 267 | src = fetchurl { |
|
251 |
url = "https://files.pythonhosted.org/packages/ |
|
|
252 | sha256 = "1qbm4xdl5hfkja683x546bncg3rqq8qv79w1m1a1wd48cqqzb6rm"; | |
|
268 | url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz"; | |
|
269 | sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3"; | |
|
253 | 270 | }; |
|
254 | 271 | meta = { |
|
255 | 272 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
256 | 273 | }; |
|
257 | 274 | }; |
|
258 | 275 | "chardet" = super.buildPythonPackage { |
|
259 | 276 | name = "chardet-3.0.4"; |
|
260 | 277 | doCheck = false; |
|
261 | 278 | src = fetchurl { |
|
262 | 279 | url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"; |
|
263 | 280 | sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4"; |
|
264 | 281 | }; |
|
265 | 282 | meta = { |
|
266 | 283 | license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
267 | 284 | }; |
|
268 | 285 | }; |
|
269 | 286 | "click" = super.buildPythonPackage { |
|
270 | 287 | name = "click-7.0"; |
|
271 | 288 | doCheck = false; |
|
272 | 289 | src = fetchurl { |
|
273 | 290 | url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz"; |
|
274 | 291 | sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v"; |
|
275 | 292 | }; |
|
276 | 293 | meta = { |
|
277 | 294 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
278 | 295 | }; |
|
279 | 296 | }; |
|
280 | 297 | "colander" = super.buildPythonPackage { |
|
281 | 298 | name = "colander-1.7.0"; |
|
282 | 299 | doCheck = false; |
|
283 | 300 | propagatedBuildInputs = [ |
|
284 | 301 | self."translationstring" |
|
285 | 302 | self."iso8601" |
|
286 | 303 | self."enum34" |
|
287 | 304 | ]; |
|
288 | 305 | src = fetchurl { |
|
289 | 306 | url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz"; |
|
290 | 307 | sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p"; |
|
291 | 308 | }; |
|
292 | 309 | meta = { |
|
293 | 310 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
294 | 311 | }; |
|
295 | 312 | }; |
|
296 | 313 | "configobj" = super.buildPythonPackage { |
|
297 | 314 | name = "configobj-5.0.6"; |
|
298 | 315 | doCheck = false; |
|
299 | 316 | propagatedBuildInputs = [ |
|
300 | 317 | self."six" |
|
301 | 318 | ]; |
|
302 | 319 | src = fetchurl { |
|
303 | 320 | url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626"; |
|
304 | 321 | sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp"; |
|
305 | 322 | }; |
|
306 | 323 | meta = { |
|
307 | 324 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
308 | 325 | }; |
|
309 | 326 | }; |
|
310 | 327 | "configparser" = super.buildPythonPackage { |
|
311 | 328 | name = "configparser-4.0.2"; |
|
312 | 329 | doCheck = false; |
|
313 | 330 | src = fetchurl { |
|
314 | 331 | url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz"; |
|
315 | 332 | sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7"; |
|
316 | 333 | }; |
|
317 | 334 | meta = { |
|
318 | 335 | license = [ pkgs.lib.licenses.mit ]; |
|
319 | 336 | }; |
|
320 | 337 | }; |
|
321 | 338 | "contextlib2" = super.buildPythonPackage { |
|
322 | 339 | name = "contextlib2-0.6.0.post1"; |
|
323 | 340 | doCheck = false; |
|
324 | 341 | src = fetchurl { |
|
325 | 342 | url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz"; |
|
326 | 343 | sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01"; |
|
327 | 344 | }; |
|
328 | 345 | meta = { |
|
329 | 346 | license = [ pkgs.lib.licenses.psfl ]; |
|
330 | 347 | }; |
|
331 | 348 | }; |
|
332 | 349 | "cov-core" = super.buildPythonPackage { |
|
333 | 350 | name = "cov-core-1.15.0"; |
|
334 | 351 | doCheck = false; |
|
335 | 352 | propagatedBuildInputs = [ |
|
336 | 353 | self."coverage" |
|
337 | 354 | ]; |
|
338 | 355 | src = fetchurl { |
|
339 | 356 | url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz"; |
|
340 | 357 | sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a"; |
|
341 | 358 | }; |
|
342 | 359 | meta = { |
|
343 | 360 | license = [ pkgs.lib.licenses.mit ]; |
|
344 | 361 | }; |
|
345 | 362 | }; |
|
346 | 363 | "coverage" = super.buildPythonPackage { |
|
347 | 364 | name = "coverage-4.5.4"; |
|
348 | 365 | doCheck = false; |
|
349 | 366 | src = fetchurl { |
|
350 | 367 | url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz"; |
|
351 | 368 | sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0"; |
|
352 | 369 | }; |
|
353 | 370 | meta = { |
|
354 | 371 | license = [ pkgs.lib.licenses.asl20 ]; |
|
355 | 372 | }; |
|
356 | 373 | }; |
|
357 | 374 | "cryptography" = super.buildPythonPackage { |
|
358 | 375 | name = "cryptography-2.6.1"; |
|
359 | 376 | doCheck = false; |
|
360 | 377 | propagatedBuildInputs = [ |
|
361 | 378 | self."asn1crypto" |
|
362 | 379 | self."six" |
|
363 | 380 | self."cffi" |
|
364 | 381 | self."enum34" |
|
365 | 382 | self."ipaddress" |
|
366 | 383 | ]; |
|
367 | 384 | src = fetchurl { |
|
368 | 385 | url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz"; |
|
369 | 386 | sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16"; |
|
370 | 387 | }; |
|
371 | 388 | meta = { |
|
372 | 389 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ]; |
|
373 | 390 | }; |
|
374 | 391 | }; |
|
375 | 392 | "cssselect" = super.buildPythonPackage { |
|
376 | 393 | name = "cssselect-1.0.3"; |
|
377 | 394 | doCheck = false; |
|
378 | 395 | src = fetchurl { |
|
379 | 396 | url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz"; |
|
380 | 397 | sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86"; |
|
381 | 398 | }; |
|
382 | 399 | meta = { |
|
383 | 400 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
384 | 401 | }; |
|
385 | 402 | }; |
|
386 | 403 | "cssutils" = super.buildPythonPackage { |
|
387 | 404 | name = "cssutils-1.0.2"; |
|
388 | 405 | doCheck = false; |
|
389 | 406 | src = fetchurl { |
|
390 | 407 | url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz"; |
|
391 | 408 | sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52"; |
|
392 | 409 | }; |
|
393 | 410 | meta = { |
|
394 | 411 | license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ]; |
|
395 | 412 | }; |
|
396 | 413 | }; |
|
397 | 414 | "decorator" = super.buildPythonPackage { |
|
398 | 415 | name = "decorator-4.1.2"; |
|
399 | 416 | doCheck = false; |
|
400 | 417 | src = fetchurl { |
|
401 | 418 | url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz"; |
|
402 | 419 | sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw"; |
|
403 | 420 | }; |
|
404 | 421 | meta = { |
|
405 | 422 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ]; |
|
406 | 423 | }; |
|
407 | 424 | }; |
|
408 | 425 | "deform" = super.buildPythonPackage { |
|
409 | 426 | name = "deform-2.0.8"; |
|
410 | 427 | doCheck = false; |
|
411 | 428 | propagatedBuildInputs = [ |
|
412 | 429 | self."chameleon" |
|
413 | 430 | self."colander" |
|
414 | 431 | self."iso8601" |
|
415 | 432 | self."peppercorn" |
|
416 | 433 | self."translationstring" |
|
417 | 434 | self."zope.deprecation" |
|
418 | 435 | ]; |
|
419 | 436 | src = fetchurl { |
|
420 | 437 | url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz"; |
|
421 | 438 | sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9"; |
|
422 | 439 | }; |
|
423 | 440 | meta = { |
|
424 | 441 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
425 | 442 | }; |
|
426 | 443 | }; |
|
427 | 444 | "defusedxml" = super.buildPythonPackage { |
|
428 | 445 | name = "defusedxml-0.6.0"; |
|
429 | 446 | doCheck = false; |
|
430 | 447 | src = fetchurl { |
|
431 | 448 | url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz"; |
|
432 | 449 | sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n"; |
|
433 | 450 | }; |
|
434 | 451 | meta = { |
|
435 | 452 | license = [ pkgs.lib.licenses.psfl ]; |
|
436 | 453 | }; |
|
437 | 454 | }; |
|
438 | 455 | "dm.xmlsec.binding" = super.buildPythonPackage { |
|
439 | 456 | name = "dm.xmlsec.binding-1.3.7"; |
|
440 | 457 | doCheck = false; |
|
441 | 458 | propagatedBuildInputs = [ |
|
442 | 459 | self."setuptools" |
|
443 | 460 | self."lxml" |
|
444 | 461 | ]; |
|
445 | 462 | src = fetchurl { |
|
446 | 463 | url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz"; |
|
447 | 464 | sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3"; |
|
448 | 465 | }; |
|
449 | 466 | meta = { |
|
450 | 467 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
451 | 468 | }; |
|
452 | 469 | }; |
|
453 | 470 | "docutils" = super.buildPythonPackage { |
|
454 | 471 | name = "docutils-0.16"; |
|
455 | 472 | doCheck = false; |
|
456 | 473 | src = fetchurl { |
|
457 | 474 | url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz"; |
|
458 | 475 | sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2"; |
|
459 | 476 | }; |
|
460 | 477 | meta = { |
|
461 | 478 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ]; |
|
462 | 479 | }; |
|
463 | 480 | }; |
|
464 | 481 | "dogpile.cache" = super.buildPythonPackage { |
|
465 | 482 | name = "dogpile.cache-0.9.0"; |
|
466 | 483 | doCheck = false; |
|
467 | 484 | propagatedBuildInputs = [ |
|
468 | 485 | self."decorator" |
|
469 | 486 | ]; |
|
470 | 487 | src = fetchurl { |
|
471 | 488 | url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz"; |
|
472 | 489 | sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k"; |
|
473 | 490 | }; |
|
474 | 491 | meta = { |
|
475 | 492 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
476 | 493 | }; |
|
477 | 494 | }; |
|
478 | 495 | "dogpile.core" = super.buildPythonPackage { |
|
479 | 496 | name = "dogpile.core-0.4.1"; |
|
480 | 497 | doCheck = false; |
|
481 | 498 | src = fetchurl { |
|
482 | 499 | url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz"; |
|
483 | 500 | sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy"; |
|
484 | 501 | }; |
|
485 | 502 | meta = { |
|
486 | 503 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
487 | 504 | }; |
|
488 | 505 | }; |
|
489 | 506 | "ecdsa" = super.buildPythonPackage { |
|
490 | 507 | name = "ecdsa-0.13.2"; |
|
491 | 508 | doCheck = false; |
|
492 | 509 | src = fetchurl { |
|
493 | 510 | url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz"; |
|
494 | 511 | sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw"; |
|
495 | 512 | }; |
|
496 | 513 | meta = { |
|
497 | 514 | license = [ pkgs.lib.licenses.mit ]; |
|
498 | 515 | }; |
|
499 | 516 | }; |
|
500 | 517 | "elasticsearch" = super.buildPythonPackage { |
|
501 | 518 | name = "elasticsearch-6.3.1"; |
|
502 | 519 | doCheck = false; |
|
503 | 520 | propagatedBuildInputs = [ |
|
504 | 521 | self."urllib3" |
|
505 | 522 | ]; |
|
506 | 523 | src = fetchurl { |
|
507 | 524 | url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz"; |
|
508 | 525 | sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma"; |
|
509 | 526 | }; |
|
510 | 527 | meta = { |
|
511 | 528 | license = [ pkgs.lib.licenses.asl20 ]; |
|
512 | 529 | }; |
|
513 | 530 | }; |
|
514 | 531 | "elasticsearch-dsl" = super.buildPythonPackage { |
|
515 | 532 | name = "elasticsearch-dsl-6.3.1"; |
|
516 | 533 | doCheck = false; |
|
517 | 534 | propagatedBuildInputs = [ |
|
518 | 535 | self."six" |
|
519 | 536 | self."python-dateutil" |
|
520 | 537 | self."elasticsearch" |
|
521 | 538 | self."ipaddress" |
|
522 | 539 | ]; |
|
523 | 540 | src = fetchurl { |
|
524 | 541 | url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz"; |
|
525 | 542 | sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z"; |
|
526 | 543 | }; |
|
527 | 544 | meta = { |
|
528 | 545 | license = [ pkgs.lib.licenses.asl20 ]; |
|
529 | 546 | }; |
|
530 | 547 | }; |
|
531 | 548 | "elasticsearch1" = super.buildPythonPackage { |
|
532 | 549 | name = "elasticsearch1-1.10.0"; |
|
533 | 550 | doCheck = false; |
|
534 | 551 | propagatedBuildInputs = [ |
|
535 | 552 | self."urllib3" |
|
536 | 553 | ]; |
|
537 | 554 | src = fetchurl { |
|
538 | 555 | url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz"; |
|
539 | 556 | sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2"; |
|
540 | 557 | }; |
|
541 | 558 | meta = { |
|
542 | 559 | license = [ pkgs.lib.licenses.asl20 ]; |
|
543 | 560 | }; |
|
544 | 561 | }; |
|
545 | 562 | "elasticsearch1-dsl" = super.buildPythonPackage { |
|
546 | 563 | name = "elasticsearch1-dsl-0.0.12"; |
|
547 | 564 | doCheck = false; |
|
548 | 565 | propagatedBuildInputs = [ |
|
549 | 566 | self."six" |
|
550 | 567 | self."python-dateutil" |
|
551 | 568 | self."elasticsearch1" |
|
552 | 569 | ]; |
|
553 | 570 | src = fetchurl { |
|
554 | 571 | url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz"; |
|
555 | 572 | sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk"; |
|
556 | 573 | }; |
|
557 | 574 | meta = { |
|
558 | 575 | license = [ pkgs.lib.licenses.asl20 ]; |
|
559 | 576 | }; |
|
560 | 577 | }; |
|
561 | 578 | "elasticsearch2" = super.buildPythonPackage { |
|
562 | 579 | name = "elasticsearch2-2.5.1"; |
|
563 | 580 | doCheck = false; |
|
564 | 581 | propagatedBuildInputs = [ |
|
565 | 582 | self."urllib3" |
|
566 | 583 | ]; |
|
567 | 584 | src = fetchurl { |
|
568 | 585 | url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz"; |
|
569 | 586 | sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k"; |
|
570 | 587 | }; |
|
571 | 588 | meta = { |
|
572 | 589 | license = [ pkgs.lib.licenses.asl20 ]; |
|
573 | 590 | }; |
|
574 | 591 | }; |
|
575 | 592 | "entrypoints" = super.buildPythonPackage { |
|
576 | 593 | name = "entrypoints-0.2.2"; |
|
577 | 594 | doCheck = false; |
|
578 | 595 | propagatedBuildInputs = [ |
|
579 | 596 | self."configparser" |
|
580 | 597 | ]; |
|
581 | 598 | src = fetchurl { |
|
582 | 599 | url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d"; |
|
583 | 600 | sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5"; |
|
584 | 601 | }; |
|
585 | 602 | meta = { |
|
586 | 603 | license = [ pkgs.lib.licenses.mit ]; |
|
587 | 604 | }; |
|
588 | 605 | }; |
|
589 | 606 | "enum34" = super.buildPythonPackage { |
|
590 | 607 | name = "enum34-1.1.10"; |
|
591 | 608 | doCheck = false; |
|
592 | 609 | src = fetchurl { |
|
593 | 610 | url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz"; |
|
594 | 611 | sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc"; |
|
595 | 612 | }; |
|
596 | 613 | meta = { |
|
597 | 614 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
598 | 615 | }; |
|
599 | 616 | }; |
|
600 | 617 | "formencode" = super.buildPythonPackage { |
|
601 | 618 | name = "formencode-1.2.4"; |
|
602 | 619 | doCheck = false; |
|
603 | 620 | src = fetchurl { |
|
604 | 621 | url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz"; |
|
605 | 622 | sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42"; |
|
606 | 623 | }; |
|
607 | 624 | meta = { |
|
608 | 625 | license = [ pkgs.lib.licenses.psfl ]; |
|
609 | 626 | }; |
|
610 | 627 | }; |
|
611 | 628 | "funcsigs" = super.buildPythonPackage { |
|
612 | 629 | name = "funcsigs-1.0.2"; |
|
613 | 630 | doCheck = false; |
|
614 | 631 | src = fetchurl { |
|
615 | 632 | url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz"; |
|
616 | 633 | sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7"; |
|
617 | 634 | }; |
|
618 | 635 | meta = { |
|
619 | 636 | license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ]; |
|
620 | 637 | }; |
|
621 | 638 | }; |
|
622 | 639 | "functools32" = super.buildPythonPackage { |
|
623 | 640 | name = "functools32-3.2.3.post2"; |
|
624 | 641 | doCheck = false; |
|
625 | 642 | src = fetchurl { |
|
626 | 643 | url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz"; |
|
627 | 644 | sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn"; |
|
628 | 645 | }; |
|
629 | 646 | meta = { |
|
630 | 647 | license = [ pkgs.lib.licenses.psfl ]; |
|
631 | 648 | }; |
|
632 | 649 | }; |
|
633 | 650 | "future" = super.buildPythonPackage { |
|
634 | 651 | name = "future-0.14.3"; |
|
635 | 652 | doCheck = false; |
|
636 | 653 | src = fetchurl { |
|
637 | 654 | url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz"; |
|
638 | 655 | sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2"; |
|
639 | 656 | }; |
|
640 | 657 | meta = { |
|
641 | 658 | license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ]; |
|
642 | 659 | }; |
|
643 | 660 | }; |
|
644 | 661 | "futures" = super.buildPythonPackage { |
|
645 | 662 | name = "futures-3.0.2"; |
|
646 | 663 | doCheck = false; |
|
647 | 664 | src = fetchurl { |
|
648 | 665 | url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz"; |
|
649 | 666 | sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw"; |
|
650 | 667 | }; |
|
651 | 668 | meta = { |
|
652 | 669 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
653 | 670 | }; |
|
654 | 671 | }; |
|
655 | 672 | "gevent" = super.buildPythonPackage { |
|
656 | 673 | name = "gevent-1.5.0"; |
|
657 | 674 | doCheck = false; |
|
658 | 675 | propagatedBuildInputs = [ |
|
659 | 676 | self."greenlet" |
|
660 | 677 | ]; |
|
661 | 678 | src = fetchurl { |
|
662 | 679 | url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz"; |
|
663 | 680 | sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj"; |
|
664 | 681 | }; |
|
665 | 682 | meta = { |
|
666 | 683 | license = [ pkgs.lib.licenses.mit ]; |
|
667 | 684 | }; |
|
668 | 685 | }; |
|
669 | 686 | "gnureadline" = super.buildPythonPackage { |
|
670 | 687 | name = "gnureadline-6.3.8"; |
|
671 | 688 | doCheck = false; |
|
672 | 689 | src = fetchurl { |
|
673 | 690 | url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz"; |
|
674 | 691 | sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq"; |
|
675 | 692 | }; |
|
676 | 693 | meta = { |
|
677 | 694 | license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ]; |
|
678 | 695 | }; |
|
679 | 696 | }; |
|
680 | 697 | "gprof2dot" = super.buildPythonPackage { |
|
681 | 698 | name = "gprof2dot-2017.9.19"; |
|
682 | 699 | doCheck = false; |
|
683 | 700 | src = fetchurl { |
|
684 | 701 | url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz"; |
|
685 | 702 | sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f"; |
|
686 | 703 | }; |
|
687 | 704 | meta = { |
|
688 | 705 | license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ]; |
|
689 | 706 | }; |
|
690 | 707 | }; |
|
691 | 708 | "greenlet" = super.buildPythonPackage { |
|
692 | 709 | name = "greenlet-0.4.15"; |
|
693 | 710 | doCheck = false; |
|
694 | 711 | src = fetchurl { |
|
695 | 712 | url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz"; |
|
696 | 713 | sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll"; |
|
697 | 714 | }; |
|
698 | 715 | meta = { |
|
699 | 716 | license = [ pkgs.lib.licenses.mit ]; |
|
700 | 717 | }; |
|
701 | 718 | }; |
|
702 | 719 | "gunicorn" = super.buildPythonPackage { |
|
703 | 720 | name = "gunicorn-19.9.0"; |
|
704 | 721 | doCheck = false; |
|
705 | 722 | src = fetchurl { |
|
706 | 723 | url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz"; |
|
707 | 724 | sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps"; |
|
708 | 725 | }; |
|
709 | 726 | meta = { |
|
710 | 727 | license = [ pkgs.lib.licenses.mit ]; |
|
711 | 728 | }; |
|
712 | 729 | }; |
|
713 | 730 | "hupper" = super.buildPythonPackage { |
|
714 | 731 | name = "hupper-1.10.2"; |
|
715 | 732 | doCheck = false; |
|
716 | 733 | src = fetchurl { |
|
717 | 734 | url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz"; |
|
718 | 735 | sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q"; |
|
719 | 736 | }; |
|
720 | 737 | meta = { |
|
721 | 738 | license = [ pkgs.lib.licenses.mit ]; |
|
722 | 739 | }; |
|
723 | 740 | }; |
|
724 | 741 | "idna" = super.buildPythonPackage { |
|
725 | 742 | name = "idna-2.8"; |
|
726 | 743 | doCheck = false; |
|
727 | 744 | src = fetchurl { |
|
728 | 745 | url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"; |
|
729 | 746 | sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3"; |
|
730 | 747 | }; |
|
731 | 748 | meta = { |
|
732 | 749 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ]; |
|
733 | 750 | }; |
|
734 | 751 | }; |
|
735 | 752 | "importlib-metadata" = super.buildPythonPackage { |
|
736 | 753 | name = "importlib-metadata-1.6.0"; |
|
737 | 754 | doCheck = false; |
|
738 | 755 | propagatedBuildInputs = [ |
|
739 | 756 | self."zipp" |
|
740 | 757 | self."pathlib2" |
|
741 | 758 | self."contextlib2" |
|
742 | 759 | self."configparser" |
|
743 | 760 | ]; |
|
744 | 761 | src = fetchurl { |
|
745 | 762 | url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz"; |
|
746 | 763 | sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l"; |
|
747 | 764 | }; |
|
748 | 765 | meta = { |
|
749 | 766 | license = [ pkgs.lib.licenses.asl20 ]; |
|
750 | 767 | }; |
|
751 | 768 | }; |
|
752 | 769 | "infrae.cache" = super.buildPythonPackage { |
|
753 | 770 | name = "infrae.cache-1.0.1"; |
|
754 | 771 | doCheck = false; |
|
755 | 772 | propagatedBuildInputs = [ |
|
756 | 773 | self."beaker" |
|
757 | 774 | self."repoze.lru" |
|
758 | 775 | ]; |
|
759 | 776 | src = fetchurl { |
|
760 | 777 | url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz"; |
|
761 | 778 | sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4"; |
|
762 | 779 | }; |
|
763 | 780 | meta = { |
|
764 | 781 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
765 | 782 | }; |
|
766 | 783 | }; |
|
767 | 784 | "invoke" = super.buildPythonPackage { |
|
768 | 785 | name = "invoke-0.13.0"; |
|
769 | 786 | doCheck = false; |
|
770 | 787 | src = fetchurl { |
|
771 | 788 | url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz"; |
|
772 | 789 | sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s"; |
|
773 | 790 | }; |
|
774 | 791 | meta = { |
|
775 | 792 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
776 | 793 | }; |
|
777 | 794 | }; |
|
778 | 795 | "ipaddress" = super.buildPythonPackage { |
|
779 | 796 | name = "ipaddress-1.0.23"; |
|
780 | 797 | doCheck = false; |
|
781 | 798 | src = fetchurl { |
|
782 | 799 | url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz"; |
|
783 | 800 | sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p"; |
|
784 | 801 | }; |
|
785 | 802 | meta = { |
|
786 | 803 | license = [ pkgs.lib.licenses.psfl ]; |
|
787 | 804 | }; |
|
788 | 805 | }; |
|
789 | 806 | "ipdb" = super.buildPythonPackage { |
|
790 | 807 | name = "ipdb-0.13.2"; |
|
791 | 808 | doCheck = false; |
|
792 | 809 | propagatedBuildInputs = [ |
|
793 | 810 | self."setuptools" |
|
794 | 811 | self."ipython" |
|
795 | 812 | ]; |
|
796 | 813 | src = fetchurl { |
|
797 | 814 | url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz"; |
|
798 | 815 | sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp"; |
|
799 | 816 | }; |
|
800 | 817 | meta = { |
|
801 | 818 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
802 | 819 | }; |
|
803 | 820 | }; |
|
804 | 821 | "ipython" = super.buildPythonPackage { |
|
805 | 822 | name = "ipython-5.1.0"; |
|
806 | 823 | doCheck = false; |
|
807 | 824 | propagatedBuildInputs = [ |
|
808 | 825 | self."setuptools" |
|
809 | 826 | self."decorator" |
|
810 | 827 | self."pickleshare" |
|
811 | 828 | self."simplegeneric" |
|
812 | 829 | self."traitlets" |
|
813 | 830 | self."prompt-toolkit" |
|
814 | 831 | self."pygments" |
|
815 | 832 | self."pexpect" |
|
816 | 833 | self."backports.shutil-get-terminal-size" |
|
817 | 834 | self."pathlib2" |
|
818 | 835 | self."pexpect" |
|
819 | 836 | ]; |
|
820 | 837 | src = fetchurl { |
|
821 | 838 | url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz"; |
|
822 | 839 | sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y"; |
|
823 | 840 | }; |
|
824 | 841 | meta = { |
|
825 | 842 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
826 | 843 | }; |
|
827 | 844 | }; |
|
828 | 845 | "ipython-genutils" = super.buildPythonPackage { |
|
829 | 846 | name = "ipython-genutils-0.2.0"; |
|
830 | 847 | doCheck = false; |
|
831 | 848 | src = fetchurl { |
|
832 | 849 | url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz"; |
|
833 | 850 | sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb"; |
|
834 | 851 | }; |
|
835 | 852 | meta = { |
|
836 | 853 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
837 | 854 | }; |
|
838 | 855 | }; |
|
839 | 856 | "iso8601" = super.buildPythonPackage { |
|
840 | 857 | name = "iso8601-0.1.12"; |
|
841 | 858 | doCheck = false; |
|
842 | 859 | src = fetchurl { |
|
843 | 860 | url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz"; |
|
844 | 861 | sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29"; |
|
845 | 862 | }; |
|
846 | 863 | meta = { |
|
847 | 864 | license = [ pkgs.lib.licenses.mit ]; |
|
848 | 865 | }; |
|
849 | 866 | }; |
|
850 | 867 | "isodate" = super.buildPythonPackage { |
|
851 | 868 | name = "isodate-0.6.0"; |
|
852 | 869 | doCheck = false; |
|
853 | 870 | propagatedBuildInputs = [ |
|
854 | 871 | self."six" |
|
855 | 872 | ]; |
|
856 | 873 | src = fetchurl { |
|
857 | 874 | url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz"; |
|
858 | 875 | sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif"; |
|
859 | 876 | }; |
|
860 | 877 | meta = { |
|
861 | 878 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
862 | 879 | }; |
|
863 | 880 | }; |
|
864 | 881 | "itsdangerous" = super.buildPythonPackage { |
|
865 |
name = "itsdangerous-0 |
|
|
882 | name = "itsdangerous-1.1.0"; | |
|
866 | 883 | doCheck = false; |
|
867 | 884 | src = fetchurl { |
|
868 |
url = "https://files.pythonhosted.org/packages/dc |
|
|
869 | sha256 = "06856q6x675ly542ig0plbqcyab6ksfzijlyf1hzhgg3sgwgrcyb"; | |
|
885 | url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz"; | |
|
886 | sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj"; | |
|
870 | 887 | }; |
|
871 | 888 | meta = { |
|
872 | 889 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
873 | 890 | }; |
|
874 | 891 | }; |
|
875 | 892 | "jinja2" = super.buildPythonPackage { |
|
876 | 893 | name = "jinja2-2.9.6"; |
|
877 | 894 | doCheck = false; |
|
878 | 895 | propagatedBuildInputs = [ |
|
879 | 896 | self."markupsafe" |
|
880 | 897 | ]; |
|
881 | 898 | src = fetchurl { |
|
882 | 899 | url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz"; |
|
883 | 900 | sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx"; |
|
884 | 901 | }; |
|
885 | 902 | meta = { |
|
886 | 903 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
887 | 904 | }; |
|
888 | 905 | }; |
|
889 | 906 | "jsonschema" = super.buildPythonPackage { |
|
890 | 907 | name = "jsonschema-2.6.0"; |
|
891 | 908 | doCheck = false; |
|
892 | 909 | propagatedBuildInputs = [ |
|
893 | 910 | self."functools32" |
|
894 | 911 | ]; |
|
895 | 912 | src = fetchurl { |
|
896 | 913 | url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz"; |
|
897 | 914 | sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg"; |
|
898 | 915 | }; |
|
899 | 916 | meta = { |
|
900 | 917 | license = [ pkgs.lib.licenses.mit ]; |
|
901 | 918 | }; |
|
902 | 919 | }; |
|
903 | 920 | "jupyter-client" = super.buildPythonPackage { |
|
904 | 921 | name = "jupyter-client-5.0.0"; |
|
905 | 922 | doCheck = false; |
|
906 | 923 | propagatedBuildInputs = [ |
|
907 | 924 | self."traitlets" |
|
908 | 925 | self."jupyter-core" |
|
909 | 926 | self."pyzmq" |
|
910 | 927 | self."python-dateutil" |
|
911 | 928 | ]; |
|
912 | 929 | src = fetchurl { |
|
913 | 930 | url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz"; |
|
914 | 931 | sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7"; |
|
915 | 932 | }; |
|
916 | 933 | meta = { |
|
917 | 934 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
918 | 935 | }; |
|
919 | 936 | }; |
|
920 | 937 | "jupyter-core" = super.buildPythonPackage { |
|
921 | 938 | name = "jupyter-core-4.5.0"; |
|
922 | 939 | doCheck = false; |
|
923 | 940 | propagatedBuildInputs = [ |
|
924 | 941 | self."traitlets" |
|
925 | 942 | ]; |
|
926 | 943 | src = fetchurl { |
|
927 | 944 | url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz"; |
|
928 | 945 | sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic"; |
|
929 | 946 | }; |
|
930 | 947 | meta = { |
|
931 | 948 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
932 | 949 | }; |
|
933 | 950 | }; |
|
934 | 951 | "kombu" = super.buildPythonPackage { |
|
935 | 952 | name = "kombu-4.6.6"; |
|
936 | 953 | doCheck = false; |
|
937 | 954 | propagatedBuildInputs = [ |
|
938 | 955 | self."amqp" |
|
939 | 956 | self."importlib-metadata" |
|
940 | 957 | ]; |
|
941 | 958 | src = fetchurl { |
|
942 | 959 | url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz"; |
|
943 | 960 | sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p"; |
|
944 | 961 | }; |
|
945 | 962 | meta = { |
|
946 | 963 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
947 | 964 | }; |
|
948 | 965 | }; |
|
949 | 966 | "lxml" = super.buildPythonPackage { |
|
950 | 967 | name = "lxml-4.2.5"; |
|
951 | 968 | doCheck = false; |
|
952 | 969 | src = fetchurl { |
|
953 | 970 | url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz"; |
|
954 | 971 | sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin"; |
|
955 | 972 | }; |
|
956 | 973 | meta = { |
|
957 | 974 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
958 | 975 | }; |
|
959 | 976 | }; |
|
960 | 977 | "mako" = super.buildPythonPackage { |
|
961 | 978 | name = "mako-1.1.0"; |
|
962 | 979 | doCheck = false; |
|
963 | 980 | propagatedBuildInputs = [ |
|
964 | 981 | self."markupsafe" |
|
965 | 982 | ]; |
|
966 | 983 | src = fetchurl { |
|
967 | 984 | url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz"; |
|
968 | 985 | sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3"; |
|
969 | 986 | }; |
|
970 | 987 | meta = { |
|
971 | 988 | license = [ pkgs.lib.licenses.mit ]; |
|
972 | 989 | }; |
|
973 | 990 | }; |
|
974 | 991 | "markdown" = super.buildPythonPackage { |
|
975 | 992 | name = "markdown-2.6.11"; |
|
976 | 993 | doCheck = false; |
|
977 | 994 | src = fetchurl { |
|
978 | 995 | url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz"; |
|
979 | 996 | sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8"; |
|
980 | 997 | }; |
|
981 | 998 | meta = { |
|
982 | 999 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
983 | 1000 | }; |
|
984 | 1001 | }; |
|
985 | 1002 | "markupsafe" = super.buildPythonPackage { |
|
986 | 1003 | name = "markupsafe-1.1.1"; |
|
987 | 1004 | doCheck = false; |
|
988 | 1005 | src = fetchurl { |
|
989 | 1006 | url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"; |
|
990 | 1007 | sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9"; |
|
991 | 1008 | }; |
|
992 | 1009 | meta = { |
|
993 | 1010 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ]; |
|
994 | 1011 | }; |
|
995 | 1012 | }; |
|
1013 | "marshmallow" = super.buildPythonPackage { | |
|
1014 | name = "marshmallow-2.18.0"; | |
|
1015 | doCheck = false; | |
|
1016 | src = fetchurl { | |
|
1017 | url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz"; | |
|
1018 | sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm"; | |
|
1019 | }; | |
|
1020 | meta = { | |
|
1021 | license = [ pkgs.lib.licenses.mit ]; | |
|
1022 | }; | |
|
1023 | }; | |
|
996 | 1024 | "mistune" = super.buildPythonPackage { |
|
997 | 1025 | name = "mistune-0.8.4"; |
|
998 | 1026 | doCheck = false; |
|
999 | 1027 | src = fetchurl { |
|
1000 | 1028 | url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz"; |
|
1001 | 1029 | sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr"; |
|
1002 | 1030 | }; |
|
1003 | 1031 | meta = { |
|
1004 | 1032 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1005 | 1033 | }; |
|
1006 | 1034 | }; |
|
1007 | 1035 | "mock" = super.buildPythonPackage { |
|
1008 | 1036 | name = "mock-3.0.5"; |
|
1009 | 1037 | doCheck = false; |
|
1010 | 1038 | propagatedBuildInputs = [ |
|
1011 | 1039 | self."six" |
|
1012 | 1040 | self."funcsigs" |
|
1013 | 1041 | ]; |
|
1014 | 1042 | src = fetchurl { |
|
1015 | 1043 | url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz"; |
|
1016 | 1044 | sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3"; |
|
1017 | 1045 | }; |
|
1018 | 1046 | meta = { |
|
1019 | 1047 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ]; |
|
1020 | 1048 | }; |
|
1021 | 1049 | }; |
|
1022 | 1050 | "more-itertools" = super.buildPythonPackage { |
|
1023 | 1051 | name = "more-itertools-5.0.0"; |
|
1024 | 1052 | doCheck = false; |
|
1025 | 1053 | propagatedBuildInputs = [ |
|
1026 | 1054 | self."six" |
|
1027 | 1055 | ]; |
|
1028 | 1056 | src = fetchurl { |
|
1029 | 1057 | url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz"; |
|
1030 | 1058 | sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q"; |
|
1031 | 1059 | }; |
|
1032 | 1060 | meta = { |
|
1033 | 1061 | license = [ pkgs.lib.licenses.mit ]; |
|
1034 | 1062 | }; |
|
1035 | 1063 | }; |
|
1036 | 1064 | "msgpack-python" = super.buildPythonPackage { |
|
1037 | 1065 | name = "msgpack-python-0.5.6"; |
|
1038 | 1066 | doCheck = false; |
|
1039 | 1067 | src = fetchurl { |
|
1040 | 1068 | url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz"; |
|
1041 | 1069 | sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p"; |
|
1042 | 1070 | }; |
|
1043 | 1071 | meta = { |
|
1044 | 1072 | license = [ pkgs.lib.licenses.asl20 ]; |
|
1045 | 1073 | }; |
|
1046 | 1074 | }; |
|
1047 | 1075 | "mysql-python" = super.buildPythonPackage { |
|
1048 | 1076 | name = "mysql-python-1.2.5"; |
|
1049 | 1077 | doCheck = false; |
|
1050 | 1078 | src = fetchurl { |
|
1051 | 1079 | url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip"; |
|
1052 | 1080 | sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441"; |
|
1053 | 1081 | }; |
|
1054 | 1082 | meta = { |
|
1055 | 1083 | license = [ pkgs.lib.licenses.gpl1 ]; |
|
1056 | 1084 | }; |
|
1057 | 1085 | }; |
|
1058 | 1086 | "nbconvert" = super.buildPythonPackage { |
|
1059 | 1087 | name = "nbconvert-5.3.1"; |
|
1060 | 1088 | doCheck = false; |
|
1061 | 1089 | propagatedBuildInputs = [ |
|
1062 | 1090 | self."mistune" |
|
1063 | 1091 | self."jinja2" |
|
1064 | 1092 | self."pygments" |
|
1065 | 1093 | self."traitlets" |
|
1066 | 1094 | self."jupyter-core" |
|
1067 | 1095 | self."nbformat" |
|
1068 | 1096 | self."entrypoints" |
|
1069 | 1097 | self."bleach" |
|
1070 | 1098 | self."pandocfilters" |
|
1071 | 1099 | self."testpath" |
|
1072 | 1100 | ]; |
|
1073 | 1101 | src = fetchurl { |
|
1074 | 1102 | url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz"; |
|
1075 | 1103 | sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j"; |
|
1076 | 1104 | }; |
|
1077 | 1105 | meta = { |
|
1078 | 1106 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1079 | 1107 | }; |
|
1080 | 1108 | }; |
|
1081 | 1109 | "nbformat" = super.buildPythonPackage { |
|
1082 | 1110 | name = "nbformat-4.4.0"; |
|
1083 | 1111 | doCheck = false; |
|
1084 | 1112 | propagatedBuildInputs = [ |
|
1085 | 1113 | self."ipython-genutils" |
|
1086 | 1114 | self."traitlets" |
|
1087 | 1115 | self."jsonschema" |
|
1088 | 1116 | self."jupyter-core" |
|
1089 | 1117 | ]; |
|
1090 | 1118 | src = fetchurl { |
|
1091 | 1119 | url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz"; |
|
1092 | 1120 | sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp"; |
|
1093 | 1121 | }; |
|
1094 | 1122 | meta = { |
|
1095 | 1123 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1096 | 1124 | }; |
|
1097 | 1125 | }; |
|
1098 | 1126 | "packaging" = super.buildPythonPackage { |
|
1099 | 1127 | name = "packaging-20.3"; |
|
1100 | 1128 | doCheck = false; |
|
1101 | 1129 | propagatedBuildInputs = [ |
|
1102 | 1130 | self."pyparsing" |
|
1103 | 1131 | self."six" |
|
1104 | 1132 | ]; |
|
1105 | 1133 | src = fetchurl { |
|
1106 | 1134 | url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz"; |
|
1107 | 1135 | sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w"; |
|
1108 | 1136 | }; |
|
1109 | 1137 | meta = { |
|
1110 | 1138 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ]; |
|
1111 | 1139 | }; |
|
1112 | 1140 | }; |
|
1113 | 1141 | "pandocfilters" = super.buildPythonPackage { |
|
1114 | 1142 | name = "pandocfilters-1.4.2"; |
|
1115 | 1143 | doCheck = false; |
|
1116 | 1144 | src = fetchurl { |
|
1117 | 1145 | url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz"; |
|
1118 | 1146 | sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk"; |
|
1119 | 1147 | }; |
|
1120 | 1148 | meta = { |
|
1121 | 1149 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1122 | 1150 | }; |
|
1123 | 1151 | }; |
|
1124 | 1152 | "paste" = super.buildPythonPackage { |
|
1125 | 1153 | name = "paste-3.4.0"; |
|
1126 | 1154 | doCheck = false; |
|
1127 | 1155 | propagatedBuildInputs = [ |
|
1128 | 1156 | self."six" |
|
1129 | 1157 | ]; |
|
1130 | 1158 | src = fetchurl { |
|
1131 | 1159 | url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz"; |
|
1132 | 1160 | sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03"; |
|
1133 | 1161 | }; |
|
1134 | 1162 | meta = { |
|
1135 | 1163 | license = [ pkgs.lib.licenses.mit ]; |
|
1136 | 1164 | }; |
|
1137 | 1165 | }; |
|
1138 | 1166 | "pastedeploy" = super.buildPythonPackage { |
|
1139 | 1167 | name = "pastedeploy-2.1.0"; |
|
1140 | 1168 | doCheck = false; |
|
1141 | 1169 | src = fetchurl { |
|
1142 | 1170 | url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz"; |
|
1143 | 1171 | sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7"; |
|
1144 | 1172 | }; |
|
1145 | 1173 | meta = { |
|
1146 | 1174 | license = [ pkgs.lib.licenses.mit ]; |
|
1147 | 1175 | }; |
|
1148 | 1176 | }; |
|
1149 | 1177 | "pastescript" = super.buildPythonPackage { |
|
1150 | 1178 | name = "pastescript-3.2.0"; |
|
1151 | 1179 | doCheck = false; |
|
1152 | 1180 | propagatedBuildInputs = [ |
|
1153 | 1181 | self."paste" |
|
1154 | 1182 | self."pastedeploy" |
|
1155 | 1183 | self."six" |
|
1156 | 1184 | ]; |
|
1157 | 1185 | src = fetchurl { |
|
1158 | 1186 | url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz"; |
|
1159 | 1187 | sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv"; |
|
1160 | 1188 | }; |
|
1161 | 1189 | meta = { |
|
1162 | 1190 | license = [ pkgs.lib.licenses.mit ]; |
|
1163 | 1191 | }; |
|
1164 | 1192 | }; |
|
1165 | 1193 | "pathlib2" = super.buildPythonPackage { |
|
1166 | 1194 | name = "pathlib2-2.3.5"; |
|
1167 | 1195 | doCheck = false; |
|
1168 | 1196 | propagatedBuildInputs = [ |
|
1169 | 1197 | self."six" |
|
1170 | 1198 | self."scandir" |
|
1171 | 1199 | ]; |
|
1172 | 1200 | src = fetchurl { |
|
1173 | 1201 | url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz"; |
|
1174 | 1202 | sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc"; |
|
1175 | 1203 | }; |
|
1176 | 1204 | meta = { |
|
1177 | 1205 | license = [ pkgs.lib.licenses.mit ]; |
|
1178 | 1206 | }; |
|
1179 | 1207 | }; |
|
1180 | 1208 | "peppercorn" = super.buildPythonPackage { |
|
1181 | 1209 | name = "peppercorn-0.6"; |
|
1182 | 1210 | doCheck = false; |
|
1183 | 1211 | src = fetchurl { |
|
1184 | 1212 | url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz"; |
|
1185 | 1213 | sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn"; |
|
1186 | 1214 | }; |
|
1187 | 1215 | meta = { |
|
1188 | 1216 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1189 | 1217 | }; |
|
1190 | 1218 | }; |
|
1191 | 1219 | "pexpect" = super.buildPythonPackage { |
|
1192 | 1220 | name = "pexpect-4.8.0"; |
|
1193 | 1221 | doCheck = false; |
|
1194 | 1222 | propagatedBuildInputs = [ |
|
1195 | 1223 | self."ptyprocess" |
|
1196 | 1224 | ]; |
|
1197 | 1225 | src = fetchurl { |
|
1198 | 1226 | url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz"; |
|
1199 | 1227 | sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw"; |
|
1200 | 1228 | }; |
|
1201 | 1229 | meta = { |
|
1202 | 1230 | license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ]; |
|
1203 | 1231 | }; |
|
1204 | 1232 | }; |
|
1205 | 1233 | "pickleshare" = super.buildPythonPackage { |
|
1206 | 1234 | name = "pickleshare-0.7.5"; |
|
1207 | 1235 | doCheck = false; |
|
1208 | 1236 | propagatedBuildInputs = [ |
|
1209 | 1237 | self."pathlib2" |
|
1210 | 1238 | ]; |
|
1211 | 1239 | src = fetchurl { |
|
1212 | 1240 | url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz"; |
|
1213 | 1241 | sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47"; |
|
1214 | 1242 | }; |
|
1215 | 1243 | meta = { |
|
1216 | 1244 | license = [ pkgs.lib.licenses.mit ]; |
|
1217 | 1245 | }; |
|
1218 | 1246 | }; |
|
1219 | 1247 | "plaster" = super.buildPythonPackage { |
|
1220 | 1248 | name = "plaster-1.0"; |
|
1221 | 1249 | doCheck = false; |
|
1222 | 1250 | propagatedBuildInputs = [ |
|
1223 | 1251 | self."setuptools" |
|
1224 | 1252 | ]; |
|
1225 | 1253 | src = fetchurl { |
|
1226 | 1254 | url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz"; |
|
1227 | 1255 | sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3"; |
|
1228 | 1256 | }; |
|
1229 | 1257 | meta = { |
|
1230 | 1258 | license = [ pkgs.lib.licenses.mit ]; |
|
1231 | 1259 | }; |
|
1232 | 1260 | }; |
|
1233 | 1261 | "plaster-pastedeploy" = super.buildPythonPackage { |
|
1234 | 1262 | name = "plaster-pastedeploy-0.7"; |
|
1235 | 1263 | doCheck = false; |
|
1236 | 1264 | propagatedBuildInputs = [ |
|
1237 | 1265 | self."pastedeploy" |
|
1238 | 1266 | self."plaster" |
|
1239 | 1267 | ]; |
|
1240 | 1268 | src = fetchurl { |
|
1241 | 1269 | url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz"; |
|
1242 | 1270 | sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r"; |
|
1243 | 1271 | }; |
|
1244 | 1272 | meta = { |
|
1245 | 1273 | license = [ pkgs.lib.licenses.mit ]; |
|
1246 | 1274 | }; |
|
1247 | 1275 | }; |
|
1248 | 1276 | "pluggy" = super.buildPythonPackage { |
|
1249 | 1277 | name = "pluggy-0.13.1"; |
|
1250 | 1278 | doCheck = false; |
|
1251 | 1279 | propagatedBuildInputs = [ |
|
1252 | 1280 | self."importlib-metadata" |
|
1253 | 1281 | ]; |
|
1254 | 1282 | src = fetchurl { |
|
1255 | 1283 | url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz"; |
|
1256 | 1284 | sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm"; |
|
1257 | 1285 | }; |
|
1258 | 1286 | meta = { |
|
1259 | 1287 | license = [ pkgs.lib.licenses.mit ]; |
|
1260 | 1288 | }; |
|
1261 | 1289 | }; |
|
1262 | 1290 | "premailer" = super.buildPythonPackage { |
|
1263 | 1291 | name = "premailer-3.6.1"; |
|
1264 | 1292 | doCheck = false; |
|
1265 | 1293 | propagatedBuildInputs = [ |
|
1266 | 1294 | self."lxml" |
|
1267 | 1295 | self."cssselect" |
|
1268 | 1296 | self."cssutils" |
|
1269 | 1297 | self."requests" |
|
1270 | 1298 | self."cachetools" |
|
1271 | 1299 | ]; |
|
1272 | 1300 | src = fetchurl { |
|
1273 | 1301 | url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz"; |
|
1274 | 1302 | sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw"; |
|
1275 | 1303 | }; |
|
1276 | 1304 | meta = { |
|
1277 | 1305 | license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ]; |
|
1278 | 1306 | }; |
|
1279 | 1307 | }; |
|
1280 | 1308 | "prompt-toolkit" = super.buildPythonPackage { |
|
1281 | 1309 | name = "prompt-toolkit-1.0.18"; |
|
1282 | 1310 | doCheck = false; |
|
1283 | 1311 | propagatedBuildInputs = [ |
|
1284 | 1312 | self."six" |
|
1285 | 1313 | self."wcwidth" |
|
1286 | 1314 | ]; |
|
1287 | 1315 | src = fetchurl { |
|
1288 | 1316 | url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz"; |
|
1289 | 1317 | sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx"; |
|
1290 | 1318 | }; |
|
1291 | 1319 | meta = { |
|
1292 | 1320 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1293 | 1321 | }; |
|
1294 | 1322 | }; |
|
1295 | 1323 | "psutil" = super.buildPythonPackage { |
|
1296 | 1324 | name = "psutil-5.7.0"; |
|
1297 | 1325 | doCheck = false; |
|
1298 | 1326 | src = fetchurl { |
|
1299 | 1327 | url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz"; |
|
1300 | 1328 | sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8"; |
|
1301 | 1329 | }; |
|
1302 | 1330 | meta = { |
|
1303 | 1331 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1304 | 1332 | }; |
|
1305 | 1333 | }; |
|
1306 | 1334 | "psycopg2" = super.buildPythonPackage { |
|
1307 | 1335 | name = "psycopg2-2.8.4"; |
|
1308 | 1336 | doCheck = false; |
|
1309 | 1337 | src = fetchurl { |
|
1310 | 1338 | url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz"; |
|
1311 | 1339 | sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q"; |
|
1312 | 1340 | }; |
|
1313 | 1341 | meta = { |
|
1314 | 1342 | license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ]; |
|
1315 | 1343 | }; |
|
1316 | 1344 | }; |
|
1317 | 1345 | "ptyprocess" = super.buildPythonPackage { |
|
1318 | 1346 | name = "ptyprocess-0.6.0"; |
|
1319 | 1347 | doCheck = false; |
|
1320 | 1348 | src = fetchurl { |
|
1321 | 1349 | url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz"; |
|
1322 | 1350 | sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj"; |
|
1323 | 1351 | }; |
|
1324 | 1352 | meta = { |
|
1325 | 1353 | license = [ ]; |
|
1326 | 1354 | }; |
|
1327 | 1355 | }; |
|
1328 | 1356 | "py" = super.buildPythonPackage { |
|
1329 | 1357 | name = "py-1.8.0"; |
|
1330 | 1358 | doCheck = false; |
|
1331 | 1359 | src = fetchurl { |
|
1332 | 1360 | url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz"; |
|
1333 | 1361 | sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw"; |
|
1334 | 1362 | }; |
|
1335 | 1363 | meta = { |
|
1336 | 1364 | license = [ pkgs.lib.licenses.mit ]; |
|
1337 | 1365 | }; |
|
1338 | 1366 | }; |
|
1339 | 1367 | "py-bcrypt" = super.buildPythonPackage { |
|
1340 | 1368 | name = "py-bcrypt-0.4"; |
|
1341 | 1369 | doCheck = false; |
|
1342 | 1370 | src = fetchurl { |
|
1343 | 1371 | url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz"; |
|
1344 | 1372 | sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az"; |
|
1345 | 1373 | }; |
|
1346 | 1374 | meta = { |
|
1347 | 1375 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1348 | 1376 | }; |
|
1349 | 1377 | }; |
|
1350 | 1378 | "py-gfm" = super.buildPythonPackage { |
|
1351 | 1379 | name = "py-gfm-0.1.4"; |
|
1352 | 1380 | doCheck = false; |
|
1353 | 1381 | propagatedBuildInputs = [ |
|
1354 | 1382 | self."setuptools" |
|
1355 | 1383 | self."markdown" |
|
1356 | 1384 | ]; |
|
1357 | 1385 | src = fetchurl { |
|
1358 | 1386 | url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz"; |
|
1359 | 1387 | sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg"; |
|
1360 | 1388 | }; |
|
1361 | 1389 | meta = { |
|
1362 | 1390 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1363 | 1391 | }; |
|
1364 | 1392 | }; |
|
1365 | 1393 | "pyasn1" = super.buildPythonPackage { |
|
1366 | 1394 | name = "pyasn1-0.4.8"; |
|
1367 | 1395 | doCheck = false; |
|
1368 | 1396 | src = fetchurl { |
|
1369 | 1397 | url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz"; |
|
1370 | 1398 | sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf"; |
|
1371 | 1399 | }; |
|
1372 | 1400 | meta = { |
|
1373 | 1401 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1374 | 1402 | }; |
|
1375 | 1403 | }; |
|
1376 | 1404 | "pyasn1-modules" = super.buildPythonPackage { |
|
1377 | 1405 | name = "pyasn1-modules-0.2.6"; |
|
1378 | 1406 | doCheck = false; |
|
1379 | 1407 | propagatedBuildInputs = [ |
|
1380 | 1408 | self."pyasn1" |
|
1381 | 1409 | ]; |
|
1382 | 1410 | src = fetchurl { |
|
1383 | 1411 | url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz"; |
|
1384 | 1412 | sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3"; |
|
1385 | 1413 | }; |
|
1386 | 1414 | meta = { |
|
1387 | 1415 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ]; |
|
1388 | 1416 | }; |
|
1389 | 1417 | }; |
|
1390 | 1418 | "pycparser" = super.buildPythonPackage { |
|
1391 | 1419 | name = "pycparser-2.20"; |
|
1392 | 1420 | doCheck = false; |
|
1393 | 1421 | src = fetchurl { |
|
1394 | 1422 | url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz"; |
|
1395 | 1423 | sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird"; |
|
1396 | 1424 | }; |
|
1397 | 1425 | meta = { |
|
1398 | 1426 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1399 | 1427 | }; |
|
1400 | 1428 | }; |
|
1401 | 1429 | "pycrypto" = super.buildPythonPackage { |
|
1402 | 1430 | name = "pycrypto-2.6.1"; |
|
1403 | 1431 | doCheck = false; |
|
1404 | 1432 | src = fetchurl { |
|
1405 | 1433 | url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz"; |
|
1406 | 1434 | sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj"; |
|
1407 | 1435 | }; |
|
1408 | 1436 | meta = { |
|
1409 | 1437 | license = [ pkgs.lib.licenses.publicDomain ]; |
|
1410 | 1438 | }; |
|
1411 | 1439 | }; |
|
1412 | 1440 | "pycurl" = super.buildPythonPackage { |
|
1413 | 1441 | name = "pycurl-7.43.0.3"; |
|
1414 | 1442 | doCheck = false; |
|
1415 | 1443 | src = fetchurl { |
|
1416 | 1444 | url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz"; |
|
1417 | 1445 | sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g"; |
|
1418 | 1446 | }; |
|
1419 | 1447 | meta = { |
|
1420 | 1448 | license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
1421 | 1449 | }; |
|
1422 | 1450 | }; |
|
1423 | 1451 | "pygments" = super.buildPythonPackage { |
|
1424 | 1452 | name = "pygments-2.4.2"; |
|
1425 | 1453 | doCheck = false; |
|
1426 | 1454 | src = fetchurl { |
|
1427 | 1455 | url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"; |
|
1428 | 1456 | sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748"; |
|
1429 | 1457 | }; |
|
1430 | 1458 | meta = { |
|
1431 | 1459 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1432 | 1460 | }; |
|
1433 | 1461 | }; |
|
1434 | 1462 | "pymysql" = super.buildPythonPackage { |
|
1435 | 1463 | name = "pymysql-0.8.1"; |
|
1436 | 1464 | doCheck = false; |
|
1437 | 1465 | src = fetchurl { |
|
1438 | 1466 | url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz"; |
|
1439 | 1467 | sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l"; |
|
1440 | 1468 | }; |
|
1441 | 1469 | meta = { |
|
1442 | 1470 | license = [ pkgs.lib.licenses.mit ]; |
|
1443 | 1471 | }; |
|
1444 | 1472 | }; |
|
1445 | 1473 | "pyotp" = super.buildPythonPackage { |
|
1446 | 1474 | name = "pyotp-2.3.0"; |
|
1447 | 1475 | doCheck = false; |
|
1448 | 1476 | src = fetchurl { |
|
1449 | 1477 | url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz"; |
|
1450 | 1478 | sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw"; |
|
1451 | 1479 | }; |
|
1452 | 1480 | meta = { |
|
1453 | 1481 | license = [ pkgs.lib.licenses.mit ]; |
|
1454 | 1482 | }; |
|
1455 | 1483 | }; |
|
1456 | 1484 | "pyparsing" = super.buildPythonPackage { |
|
1457 | 1485 | name = "pyparsing-2.4.7"; |
|
1458 | 1486 | doCheck = false; |
|
1459 | 1487 | src = fetchurl { |
|
1460 | 1488 | url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz"; |
|
1461 | 1489 | sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2"; |
|
1462 | 1490 | }; |
|
1463 | 1491 | meta = { |
|
1464 | 1492 | license = [ pkgs.lib.licenses.mit ]; |
|
1465 | 1493 | }; |
|
1466 | 1494 | }; |
|
1467 | 1495 | "pyramid" = super.buildPythonPackage { |
|
1468 | 1496 | name = "pyramid-1.10.4"; |
|
1469 | 1497 | doCheck = false; |
|
1470 | 1498 | propagatedBuildInputs = [ |
|
1471 | 1499 | self."hupper" |
|
1472 | 1500 | self."plaster" |
|
1473 | 1501 | self."plaster-pastedeploy" |
|
1474 | 1502 | self."setuptools" |
|
1475 | 1503 | self."translationstring" |
|
1476 | 1504 | self."venusian" |
|
1477 | 1505 | self."webob" |
|
1478 | 1506 | self."zope.deprecation" |
|
1479 | 1507 | self."zope.interface" |
|
1480 | 1508 | self."repoze.lru" |
|
1481 | 1509 | ]; |
|
1482 | 1510 | src = fetchurl { |
|
1483 | 1511 | url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz"; |
|
1484 | 1512 | sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q"; |
|
1485 | 1513 | }; |
|
1486 | 1514 | meta = { |
|
1487 | 1515 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1488 | 1516 | }; |
|
1489 | 1517 | }; |
|
1490 | 1518 | "pyramid-debugtoolbar" = super.buildPythonPackage { |
|
1491 | 1519 | name = "pyramid-debugtoolbar-4.6.1"; |
|
1492 | 1520 | doCheck = false; |
|
1493 | 1521 | propagatedBuildInputs = [ |
|
1494 | 1522 | self."pyramid" |
|
1495 | 1523 | self."pyramid-mako" |
|
1496 | 1524 | self."repoze.lru" |
|
1497 | 1525 | self."pygments" |
|
1498 | 1526 | self."ipaddress" |
|
1499 | 1527 | ]; |
|
1500 | 1528 | src = fetchurl { |
|
1501 | 1529 | url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz"; |
|
1502 | 1530 | sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv"; |
|
1503 | 1531 | }; |
|
1504 | 1532 | meta = { |
|
1505 | 1533 | license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ]; |
|
1506 | 1534 | }; |
|
1507 | 1535 | }; |
|
1508 | 1536 | "pyramid-jinja2" = super.buildPythonPackage { |
|
1509 | 1537 | name = "pyramid-jinja2-2.7"; |
|
1510 | 1538 | doCheck = false; |
|
1511 | 1539 | propagatedBuildInputs = [ |
|
1512 | 1540 | self."pyramid" |
|
1513 | 1541 | self."zope.deprecation" |
|
1514 | 1542 | self."jinja2" |
|
1515 | 1543 | self."markupsafe" |
|
1516 | 1544 | ]; |
|
1517 | 1545 | src = fetchurl { |
|
1518 | 1546 | url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz"; |
|
1519 | 1547 | sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw"; |
|
1520 | 1548 | }; |
|
1521 | 1549 | meta = { |
|
1522 | 1550 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1523 | 1551 | }; |
|
1524 | 1552 | }; |
|
1553 | "pyramid-apispec" = super.buildPythonPackage { | |
|
1554 | name = "pyramid-apispec-0.3.2"; | |
|
1555 | doCheck = false; | |
|
1556 | propagatedBuildInputs = [ | |
|
1557 | self."apispec" | |
|
1558 | ]; | |
|
1559 | src = fetchurl { | |
|
1560 | url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz"; | |
|
1561 | sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0"; | |
|
1562 | }; | |
|
1563 | meta = { | |
|
1564 | license = [ pkgs.lib.licenses.bsdOriginal ]; | |
|
1565 | }; | |
|
1566 | }; | |
|
1525 | 1567 | "pyramid-mailer" = super.buildPythonPackage { |
|
1526 | 1568 | name = "pyramid-mailer-0.15.1"; |
|
1527 | 1569 | doCheck = false; |
|
1528 | 1570 | propagatedBuildInputs = [ |
|
1529 | 1571 | self."pyramid" |
|
1530 | 1572 | self."repoze.sendmail" |
|
1531 | 1573 | self."transaction" |
|
1532 | 1574 | ]; |
|
1533 | 1575 | src = fetchurl { |
|
1534 | 1576 | url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz"; |
|
1535 | 1577 | sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc"; |
|
1536 | 1578 | }; |
|
1537 | 1579 | meta = { |
|
1538 | 1580 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1539 | 1581 | }; |
|
1540 | 1582 | }; |
|
1541 | 1583 | "pyramid-mako" = super.buildPythonPackage { |
|
1542 | 1584 | name = "pyramid-mako-1.1.0"; |
|
1543 | 1585 | doCheck = false; |
|
1544 | 1586 | propagatedBuildInputs = [ |
|
1545 | 1587 | self."pyramid" |
|
1546 | 1588 | self."mako" |
|
1547 | 1589 | ]; |
|
1548 | 1590 | src = fetchurl { |
|
1549 | 1591 | url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz"; |
|
1550 | 1592 | sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0"; |
|
1551 | 1593 | }; |
|
1552 | 1594 | meta = { |
|
1553 | 1595 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1554 | 1596 | }; |
|
1555 | 1597 | }; |
|
1556 | 1598 | "pysqlite" = super.buildPythonPackage { |
|
1557 | 1599 | name = "pysqlite-2.8.3"; |
|
1558 | 1600 | doCheck = false; |
|
1559 | 1601 | src = fetchurl { |
|
1560 | 1602 | url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz"; |
|
1561 | 1603 | sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp"; |
|
1562 | 1604 | }; |
|
1563 | 1605 | meta = { |
|
1564 | 1606 | license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ]; |
|
1565 | 1607 | }; |
|
1566 | 1608 | }; |
|
1567 | 1609 | "pytest" = super.buildPythonPackage { |
|
1568 | 1610 | name = "pytest-4.6.5"; |
|
1569 | 1611 | doCheck = false; |
|
1570 | 1612 | propagatedBuildInputs = [ |
|
1571 | 1613 | self."py" |
|
1572 | 1614 | self."six" |
|
1573 | 1615 | self."packaging" |
|
1574 | 1616 | self."attrs" |
|
1575 | 1617 | self."atomicwrites" |
|
1576 | 1618 | self."pluggy" |
|
1577 | 1619 | self."importlib-metadata" |
|
1578 | 1620 | self."wcwidth" |
|
1579 | 1621 | self."funcsigs" |
|
1580 | 1622 | self."pathlib2" |
|
1581 | 1623 | self."more-itertools" |
|
1582 | 1624 | ]; |
|
1583 | 1625 | src = fetchurl { |
|
1584 | 1626 | url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz"; |
|
1585 | 1627 | sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg"; |
|
1586 | 1628 | }; |
|
1587 | 1629 | meta = { |
|
1588 | 1630 | license = [ pkgs.lib.licenses.mit ]; |
|
1589 | 1631 | }; |
|
1590 | 1632 | }; |
|
1591 | 1633 | "pytest-cov" = super.buildPythonPackage { |
|
1592 | 1634 | name = "pytest-cov-2.7.1"; |
|
1593 | 1635 | doCheck = false; |
|
1594 | 1636 | propagatedBuildInputs = [ |
|
1595 | 1637 | self."pytest" |
|
1596 | 1638 | self."coverage" |
|
1597 | 1639 | ]; |
|
1598 | 1640 | src = fetchurl { |
|
1599 | 1641 | url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz"; |
|
1600 | 1642 | sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0"; |
|
1601 | 1643 | }; |
|
1602 | 1644 | meta = { |
|
1603 | 1645 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ]; |
|
1604 | 1646 | }; |
|
1605 | 1647 | }; |
|
1606 | 1648 | "pytest-profiling" = super.buildPythonPackage { |
|
1607 | 1649 | name = "pytest-profiling-1.7.0"; |
|
1608 | 1650 | doCheck = false; |
|
1609 | 1651 | propagatedBuildInputs = [ |
|
1610 | 1652 | self."six" |
|
1611 | 1653 | self."pytest" |
|
1612 | 1654 | self."gprof2dot" |
|
1613 | 1655 | ]; |
|
1614 | 1656 | src = fetchurl { |
|
1615 | 1657 | url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz"; |
|
1616 | 1658 | sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk"; |
|
1617 | 1659 | }; |
|
1618 | 1660 | meta = { |
|
1619 | 1661 | license = [ pkgs.lib.licenses.mit ]; |
|
1620 | 1662 | }; |
|
1621 | 1663 | }; |
|
1622 | 1664 | "pytest-runner" = super.buildPythonPackage { |
|
1623 | 1665 | name = "pytest-runner-5.1"; |
|
1624 | 1666 | doCheck = false; |
|
1625 | 1667 | src = fetchurl { |
|
1626 | 1668 | url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz"; |
|
1627 | 1669 | sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815"; |
|
1628 | 1670 | }; |
|
1629 | 1671 | meta = { |
|
1630 | 1672 | license = [ pkgs.lib.licenses.mit ]; |
|
1631 | 1673 | }; |
|
1632 | 1674 | }; |
|
1633 | 1675 | "pytest-sugar" = super.buildPythonPackage { |
|
1634 | 1676 | name = "pytest-sugar-0.9.2"; |
|
1635 | 1677 | doCheck = false; |
|
1636 | 1678 | propagatedBuildInputs = [ |
|
1637 | 1679 | self."pytest" |
|
1638 | 1680 | self."termcolor" |
|
1639 | 1681 | self."packaging" |
|
1640 | 1682 | ]; |
|
1641 | 1683 | src = fetchurl { |
|
1642 | 1684 | url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz"; |
|
1643 | 1685 | sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w"; |
|
1644 | 1686 | }; |
|
1645 | 1687 | meta = { |
|
1646 | 1688 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1647 | 1689 | }; |
|
1648 | 1690 | }; |
|
1649 | 1691 | "pytest-timeout" = super.buildPythonPackage { |
|
1650 | 1692 | name = "pytest-timeout-1.3.3"; |
|
1651 | 1693 | doCheck = false; |
|
1652 | 1694 | propagatedBuildInputs = [ |
|
1653 | 1695 | self."pytest" |
|
1654 | 1696 | ]; |
|
1655 | 1697 | src = fetchurl { |
|
1656 | 1698 | url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz"; |
|
1657 | 1699 | sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a"; |
|
1658 | 1700 | }; |
|
1659 | 1701 | meta = { |
|
1660 | 1702 | license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ]; |
|
1661 | 1703 | }; |
|
1662 | 1704 | }; |
|
1663 | 1705 | "python-dateutil" = super.buildPythonPackage { |
|
1664 | 1706 | name = "python-dateutil-2.8.1"; |
|
1665 | 1707 | doCheck = false; |
|
1666 | 1708 | propagatedBuildInputs = [ |
|
1667 | 1709 | self."six" |
|
1668 | 1710 | ]; |
|
1669 | 1711 | src = fetchurl { |
|
1670 | 1712 | url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"; |
|
1671 | 1713 | sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk"; |
|
1672 | 1714 | }; |
|
1673 | 1715 | meta = { |
|
1674 | 1716 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ]; |
|
1675 | 1717 | }; |
|
1676 | 1718 | }; |
|
1677 | 1719 | "python-editor" = super.buildPythonPackage { |
|
1678 | 1720 | name = "python-editor-1.0.4"; |
|
1679 | 1721 | doCheck = false; |
|
1680 | 1722 | src = fetchurl { |
|
1681 | 1723 | url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz"; |
|
1682 | 1724 | sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai"; |
|
1683 | 1725 | }; |
|
1684 | 1726 | meta = { |
|
1685 | 1727 | license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ]; |
|
1686 | 1728 | }; |
|
1687 | 1729 | }; |
|
1688 | 1730 | "python-ldap" = super.buildPythonPackage { |
|
1689 | 1731 | name = "python-ldap-3.2.0"; |
|
1690 | 1732 | doCheck = false; |
|
1691 | 1733 | propagatedBuildInputs = [ |
|
1692 | 1734 | self."pyasn1" |
|
1693 | 1735 | self."pyasn1-modules" |
|
1694 | 1736 | ]; |
|
1695 | 1737 | src = fetchurl { |
|
1696 | 1738 | url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz"; |
|
1697 | 1739 | sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x"; |
|
1698 | 1740 | }; |
|
1699 | 1741 | meta = { |
|
1700 | 1742 | license = [ pkgs.lib.licenses.psfl ]; |
|
1701 | 1743 | }; |
|
1702 | 1744 | }; |
|
1703 | 1745 | "python-memcached" = super.buildPythonPackage { |
|
1704 | 1746 | name = "python-memcached-1.59"; |
|
1705 | 1747 | doCheck = false; |
|
1706 | 1748 | propagatedBuildInputs = [ |
|
1707 | 1749 | self."six" |
|
1708 | 1750 | ]; |
|
1709 | 1751 | src = fetchurl { |
|
1710 | 1752 | url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz"; |
|
1711 | 1753 | sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2"; |
|
1712 | 1754 | }; |
|
1713 | 1755 | meta = { |
|
1714 | 1756 | license = [ pkgs.lib.licenses.psfl ]; |
|
1715 | 1757 | }; |
|
1716 | 1758 | }; |
|
1717 | 1759 | "python-pam" = super.buildPythonPackage { |
|
1718 | 1760 | name = "python-pam-1.8.4"; |
|
1719 | 1761 | doCheck = false; |
|
1720 | 1762 | src = fetchurl { |
|
1721 | 1763 | url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz"; |
|
1722 | 1764 | sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8"; |
|
1723 | 1765 | }; |
|
1724 | 1766 | meta = { |
|
1725 | 1767 | license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ]; |
|
1726 | 1768 | }; |
|
1727 | 1769 | }; |
|
1728 | 1770 | "python-saml" = super.buildPythonPackage { |
|
1729 | 1771 | name = "python-saml-2.4.2"; |
|
1730 | 1772 | doCheck = false; |
|
1731 | 1773 | propagatedBuildInputs = [ |
|
1732 | 1774 | self."dm.xmlsec.binding" |
|
1733 | 1775 | self."isodate" |
|
1734 | 1776 | self."defusedxml" |
|
1735 | 1777 | ]; |
|
1736 | 1778 | src = fetchurl { |
|
1737 | 1779 | url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz"; |
|
1738 | 1780 | sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43"; |
|
1739 | 1781 | }; |
|
1740 | 1782 | meta = { |
|
1741 | 1783 | license = [ pkgs.lib.licenses.mit ]; |
|
1742 | 1784 | }; |
|
1743 | 1785 | }; |
|
1744 | 1786 | "pytz" = super.buildPythonPackage { |
|
1745 | 1787 | name = "pytz-2019.3"; |
|
1746 | 1788 | doCheck = false; |
|
1747 | 1789 | src = fetchurl { |
|
1748 | 1790 | url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz"; |
|
1749 | 1791 | sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h"; |
|
1750 | 1792 | }; |
|
1751 | 1793 | meta = { |
|
1752 | 1794 | license = [ pkgs.lib.licenses.mit ]; |
|
1753 | 1795 | }; |
|
1754 | 1796 | }; |
|
1755 | 1797 | "pyzmq" = super.buildPythonPackage { |
|
1756 | 1798 | name = "pyzmq-14.6.0"; |
|
1757 | 1799 | doCheck = false; |
|
1758 | 1800 | src = fetchurl { |
|
1759 | 1801 | url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz"; |
|
1760 | 1802 | sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp"; |
|
1761 | 1803 | }; |
|
1762 | 1804 | meta = { |
|
1763 | 1805 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; |
|
1764 | 1806 | }; |
|
1765 | 1807 | }; |
|
1808 | "PyYAML" = super.buildPythonPackage { | |
|
1809 | name = "PyYAML-5.3.1"; | |
|
1810 | doCheck = false; | |
|
1811 | src = fetchurl { | |
|
1812 | url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"; | |
|
1813 | sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq"; | |
|
1814 | }; | |
|
1815 | meta = { | |
|
1816 | license = [ pkgs.lib.licenses.mit ]; | |
|
1817 | }; | |
|
1818 | }; | |
|
1766 | 1819 | "redis" = super.buildPythonPackage { |
|
1767 | 1820 | name = "redis-3.4.1"; |
|
1768 | 1821 | doCheck = false; |
|
1769 | 1822 | src = fetchurl { |
|
1770 | 1823 | url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz"; |
|
1771 | 1824 | sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd"; |
|
1772 | 1825 | }; |
|
1773 | 1826 | meta = { |
|
1774 | 1827 | license = [ pkgs.lib.licenses.mit ]; |
|
1775 | 1828 | }; |
|
1776 | 1829 | }; |
|
1777 | 1830 | "repoze.lru" = super.buildPythonPackage { |
|
1778 | 1831 | name = "repoze.lru-0.7"; |
|
1779 | 1832 | doCheck = false; |
|
1780 | 1833 | src = fetchurl { |
|
1781 | 1834 | url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz"; |
|
1782 | 1835 | sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84"; |
|
1783 | 1836 | }; |
|
1784 | 1837 | meta = { |
|
1785 | 1838 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1786 | 1839 | }; |
|
1787 | 1840 | }; |
|
1788 | 1841 | "repoze.sendmail" = super.buildPythonPackage { |
|
1789 | 1842 | name = "repoze.sendmail-4.4.1"; |
|
1790 | 1843 | doCheck = false; |
|
1791 | 1844 | propagatedBuildInputs = [ |
|
1792 | 1845 | self."setuptools" |
|
1793 | 1846 | self."zope.interface" |
|
1794 | 1847 | self."transaction" |
|
1795 | 1848 | ]; |
|
1796 | 1849 | src = fetchurl { |
|
1797 | 1850 | url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz"; |
|
1798 | 1851 | sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks"; |
|
1799 | 1852 | }; |
|
1800 | 1853 | meta = { |
|
1801 | 1854 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
1802 | 1855 | }; |
|
1803 | 1856 | }; |
|
1804 | 1857 | "requests" = super.buildPythonPackage { |
|
1805 | 1858 | name = "requests-2.22.0"; |
|
1806 | 1859 | doCheck = false; |
|
1807 | 1860 | propagatedBuildInputs = [ |
|
1808 | 1861 | self."chardet" |
|
1809 | 1862 | self."idna" |
|
1810 | 1863 | self."urllib3" |
|
1811 | 1864 | self."certifi" |
|
1812 | 1865 | ]; |
|
1813 | 1866 | src = fetchurl { |
|
1814 | 1867 | url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"; |
|
1815 | 1868 | sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i"; |
|
1816 | 1869 | }; |
|
1817 | 1870 | meta = { |
|
1818 | 1871 | license = [ pkgs.lib.licenses.asl20 ]; |
|
1819 | 1872 | }; |
|
1820 | 1873 | }; |
|
1821 | 1874 | "rhodecode-enterprise-ce" = super.buildPythonPackage { |
|
1822 |
name = "rhodecode-enterprise-ce-4.20. |
|
|
1875 | name = "rhodecode-enterprise-ce-4.20.0"; | |
|
1823 | 1876 | buildInputs = [ |
|
1824 | 1877 | self."pytest" |
|
1825 | 1878 | self."py" |
|
1826 | 1879 | self."pytest-cov" |
|
1827 | 1880 | self."pytest-sugar" |
|
1828 | 1881 | self."pytest-runner" |
|
1829 | 1882 | self."pytest-profiling" |
|
1830 | 1883 | self."pytest-timeout" |
|
1831 | 1884 | self."gprof2dot" |
|
1832 | 1885 | self."mock" |
|
1833 | 1886 | self."cov-core" |
|
1834 | 1887 | self."coverage" |
|
1835 | 1888 | self."webtest" |
|
1836 | 1889 | self."beautifulsoup4" |
|
1837 | 1890 | self."configobj" |
|
1838 | 1891 | ]; |
|
1839 | 1892 | doCheck = true; |
|
1840 | 1893 | propagatedBuildInputs = [ |
|
1841 | 1894 | self."amqp" |
|
1842 | 1895 | self."babel" |
|
1843 | 1896 | self."beaker" |
|
1844 | 1897 | self."bleach" |
|
1845 | 1898 | self."celery" |
|
1846 | 1899 | self."channelstream" |
|
1847 | 1900 | self."click" |
|
1848 | 1901 | self."colander" |
|
1849 | 1902 | self."configobj" |
|
1850 | 1903 | self."cssselect" |
|
1851 | 1904 | self."cryptography" |
|
1852 | 1905 | self."decorator" |
|
1853 | 1906 | self."deform" |
|
1854 | 1907 | self."docutils" |
|
1855 | 1908 | self."dogpile.cache" |
|
1856 | 1909 | self."dogpile.core" |
|
1857 | 1910 | self."formencode" |
|
1858 | 1911 | self."future" |
|
1859 | 1912 | self."futures" |
|
1860 | 1913 | self."infrae.cache" |
|
1861 | 1914 | self."iso8601" |
|
1862 | 1915 | self."itsdangerous" |
|
1863 | 1916 | self."kombu" |
|
1864 | 1917 | self."lxml" |
|
1865 | 1918 | self."mako" |
|
1866 | 1919 | self."markdown" |
|
1867 | 1920 | self."markupsafe" |
|
1868 | 1921 | self."msgpack-python" |
|
1869 | 1922 | self."pyotp" |
|
1870 | 1923 | self."packaging" |
|
1871 | 1924 | self."pathlib2" |
|
1872 | 1925 | self."paste" |
|
1873 | 1926 | self."pastedeploy" |
|
1874 | 1927 | self."pastescript" |
|
1875 | 1928 | self."peppercorn" |
|
1876 | 1929 | self."premailer" |
|
1877 | 1930 | self."psutil" |
|
1878 | 1931 | self."py-bcrypt" |
|
1879 | 1932 | self."pycurl" |
|
1880 | 1933 | self."pycrypto" |
|
1881 | 1934 | self."pygments" |
|
1882 | 1935 | self."pyparsing" |
|
1883 | 1936 | self."pyramid-debugtoolbar" |
|
1884 | 1937 | self."pyramid-mako" |
|
1885 | 1938 | self."pyramid" |
|
1886 | 1939 | self."pyramid-mailer" |
|
1887 | 1940 | self."python-dateutil" |
|
1888 | 1941 | self."python-ldap" |
|
1889 | 1942 | self."python-memcached" |
|
1890 | 1943 | self."python-pam" |
|
1891 | 1944 | self."python-saml" |
|
1892 | 1945 | self."pytz" |
|
1893 | 1946 | self."tzlocal" |
|
1894 | 1947 | self."pyzmq" |
|
1895 | 1948 | self."py-gfm" |
|
1896 | 1949 | self."redis" |
|
1897 | 1950 | self."repoze.lru" |
|
1898 | 1951 | self."requests" |
|
1899 | 1952 | self."routes" |
|
1900 | 1953 | self."simplejson" |
|
1901 | 1954 | self."six" |
|
1902 | 1955 | self."sqlalchemy" |
|
1903 | 1956 | self."sshpubkeys" |
|
1904 | 1957 | self."subprocess32" |
|
1905 | 1958 | self."supervisor" |
|
1906 | 1959 | self."translationstring" |
|
1907 | 1960 | self."urllib3" |
|
1908 | 1961 | self."urlobject" |
|
1909 | 1962 | self."venusian" |
|
1910 | 1963 | self."weberror" |
|
1911 | 1964 | self."webhelpers2" |
|
1912 | 1965 | self."webob" |
|
1913 | 1966 | self."whoosh" |
|
1914 | 1967 | self."wsgiref" |
|
1915 | 1968 | self."zope.cachedescriptors" |
|
1916 | 1969 | self."zope.deprecation" |
|
1917 | 1970 | self."zope.event" |
|
1918 | 1971 | self."zope.interface" |
|
1919 | 1972 | self."mysql-python" |
|
1920 | 1973 | self."pymysql" |
|
1921 | 1974 | self."pysqlite" |
|
1922 | 1975 | self."psycopg2" |
|
1923 | 1976 | self."nbconvert" |
|
1924 | 1977 | self."nbformat" |
|
1925 | 1978 | self."jupyter-client" |
|
1926 | 1979 | self."jupyter-core" |
|
1927 | 1980 | self."alembic" |
|
1928 | 1981 | self."invoke" |
|
1929 | 1982 | self."bumpversion" |
|
1930 | 1983 | self."gevent" |
|
1931 | 1984 | self."greenlet" |
|
1932 | 1985 | self."gunicorn" |
|
1933 | 1986 | self."waitress" |
|
1934 | 1987 | self."ipdb" |
|
1935 | 1988 | self."ipython" |
|
1936 | 1989 | self."rhodecode-tools" |
|
1937 | 1990 | self."appenlight-client" |
|
1938 | 1991 | self."pytest" |
|
1939 | 1992 | self."py" |
|
1940 | 1993 | self."pytest-cov" |
|
1941 | 1994 | self."pytest-sugar" |
|
1942 | 1995 | self."pytest-runner" |
|
1943 | 1996 | self."pytest-profiling" |
|
1944 | 1997 | self."pytest-timeout" |
|
1945 | 1998 | self."gprof2dot" |
|
1946 | 1999 | self."mock" |
|
1947 | 2000 | self."cov-core" |
|
1948 | 2001 | self."coverage" |
|
1949 | 2002 | self."webtest" |
|
1950 | 2003 | self."beautifulsoup4" |
|
1951 | 2004 | ]; |
|
1952 | 2005 | src = ./.; |
|
1953 | 2006 | meta = { |
|
1954 | 2007 | license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ]; |
|
1955 | 2008 | }; |
|
1956 | 2009 | }; |
|
1957 | 2010 | "rhodecode-tools" = super.buildPythonPackage { |
|
1958 | 2011 | name = "rhodecode-tools-1.4.0"; |
|
1959 | 2012 | doCheck = false; |
|
1960 | 2013 | propagatedBuildInputs = [ |
|
1961 | 2014 | self."click" |
|
1962 | 2015 | self."future" |
|
1963 | 2016 | self."six" |
|
1964 | 2017 | self."mako" |
|
1965 | 2018 | self."markupsafe" |
|
1966 | 2019 | self."requests" |
|
1967 | 2020 | self."urllib3" |
|
1968 | 2021 | self."whoosh" |
|
1969 | 2022 | self."elasticsearch" |
|
1970 | 2023 | self."elasticsearch-dsl" |
|
1971 | 2024 | self."elasticsearch2" |
|
1972 | 2025 | self."elasticsearch1-dsl" |
|
1973 | 2026 | ]; |
|
1974 | 2027 | src = fetchurl { |
|
1975 | 2028 | url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a"; |
|
1976 | 2029 | sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n"; |
|
1977 | 2030 | }; |
|
1978 | 2031 | meta = { |
|
1979 | 2032 | license = [ { fullName = "Apache 2.0 and Proprietary"; } ]; |
|
1980 | 2033 | }; |
|
1981 | 2034 | }; |
|
1982 | 2035 | "routes" = super.buildPythonPackage { |
|
1983 | 2036 | name = "routes-2.4.1"; |
|
1984 | 2037 | doCheck = false; |
|
1985 | 2038 | propagatedBuildInputs = [ |
|
1986 | 2039 | self."six" |
|
1987 | 2040 | self."repoze.lru" |
|
1988 | 2041 | ]; |
|
1989 | 2042 | src = fetchurl { |
|
1990 | 2043 | url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz"; |
|
1991 | 2044 | sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6"; |
|
1992 | 2045 | }; |
|
1993 | 2046 | meta = { |
|
1994 | 2047 | license = [ pkgs.lib.licenses.mit ]; |
|
1995 | 2048 | }; |
|
1996 | 2049 | }; |
|
1997 | 2050 | "scandir" = super.buildPythonPackage { |
|
1998 | 2051 | name = "scandir-1.10.0"; |
|
1999 | 2052 | doCheck = false; |
|
2000 | 2053 | src = fetchurl { |
|
2001 | 2054 | url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz"; |
|
2002 | 2055 | sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd"; |
|
2003 | 2056 | }; |
|
2004 | 2057 | meta = { |
|
2005 | 2058 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ]; |
|
2006 | 2059 | }; |
|
2007 | 2060 | }; |
|
2008 | 2061 | "setproctitle" = super.buildPythonPackage { |
|
2009 | 2062 | name = "setproctitle-1.1.10"; |
|
2010 | 2063 | doCheck = false; |
|
2011 | 2064 | src = fetchurl { |
|
2012 | 2065 | url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz"; |
|
2013 | 2066 | sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2"; |
|
2014 | 2067 | }; |
|
2015 | 2068 | meta = { |
|
2016 | 2069 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
2017 | 2070 | }; |
|
2018 | 2071 | }; |
|
2019 | 2072 | "setuptools" = super.buildPythonPackage { |
|
2020 | 2073 | name = "setuptools-44.1.0"; |
|
2021 | 2074 | doCheck = false; |
|
2022 | 2075 | src = fetchurl { |
|
2023 | 2076 | url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip"; |
|
2024 | 2077 | sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr"; |
|
2025 | 2078 | }; |
|
2026 | 2079 | meta = { |
|
2027 | 2080 | license = [ pkgs.lib.licenses.mit ]; |
|
2028 | 2081 | }; |
|
2029 | 2082 | }; |
|
2030 | 2083 | "simplegeneric" = super.buildPythonPackage { |
|
2031 | 2084 | name = "simplegeneric-0.8.1"; |
|
2032 | 2085 | doCheck = false; |
|
2033 | 2086 | src = fetchurl { |
|
2034 | 2087 | url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip"; |
|
2035 | 2088 | sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw"; |
|
2036 | 2089 | }; |
|
2037 | 2090 | meta = { |
|
2038 | 2091 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2039 | 2092 | }; |
|
2040 | 2093 | }; |
|
2041 | 2094 | "simplejson" = super.buildPythonPackage { |
|
2042 | 2095 | name = "simplejson-3.16.0"; |
|
2043 | 2096 | doCheck = false; |
|
2044 | 2097 | src = fetchurl { |
|
2045 | 2098 | url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz"; |
|
2046 | 2099 | sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi"; |
|
2047 | 2100 | }; |
|
2048 | 2101 | meta = { |
|
2049 | 2102 | license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ]; |
|
2050 | 2103 | }; |
|
2051 | 2104 | }; |
|
2052 | 2105 | "six" = super.buildPythonPackage { |
|
2053 | 2106 | name = "six-1.11.0"; |
|
2054 | 2107 | doCheck = false; |
|
2055 | 2108 | src = fetchurl { |
|
2056 | 2109 | url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"; |
|
2057 | 2110 | sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h"; |
|
2058 | 2111 | }; |
|
2059 | 2112 | meta = { |
|
2060 | 2113 | license = [ pkgs.lib.licenses.mit ]; |
|
2061 | 2114 | }; |
|
2062 | 2115 | }; |
|
2063 | 2116 | "sqlalchemy" = super.buildPythonPackage { |
|
2064 | 2117 | name = "sqlalchemy-1.3.15"; |
|
2065 | 2118 | doCheck = false; |
|
2066 | 2119 | src = fetchurl { |
|
2067 | 2120 | url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz"; |
|
2068 | 2121 | sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64"; |
|
2069 | 2122 | }; |
|
2070 | 2123 | meta = { |
|
2071 | 2124 | license = [ pkgs.lib.licenses.mit ]; |
|
2072 | 2125 | }; |
|
2073 | 2126 | }; |
|
2074 | 2127 | "sshpubkeys" = super.buildPythonPackage { |
|
2075 | 2128 | name = "sshpubkeys-3.1.0"; |
|
2076 | 2129 | doCheck = false; |
|
2077 | 2130 | propagatedBuildInputs = [ |
|
2078 | 2131 | self."cryptography" |
|
2079 | 2132 | self."ecdsa" |
|
2080 | 2133 | ]; |
|
2081 | 2134 | src = fetchurl { |
|
2082 | 2135 | url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz"; |
|
2083 | 2136 | sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k"; |
|
2084 | 2137 | }; |
|
2085 | 2138 | meta = { |
|
2086 | 2139 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
2087 | 2140 | }; |
|
2088 | 2141 | }; |
|
2089 | 2142 | "subprocess32" = super.buildPythonPackage { |
|
2090 | 2143 | name = "subprocess32-3.5.4"; |
|
2091 | 2144 | doCheck = false; |
|
2092 | 2145 | src = fetchurl { |
|
2093 | 2146 | url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz"; |
|
2094 | 2147 | sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb"; |
|
2095 | 2148 | }; |
|
2096 | 2149 | meta = { |
|
2097 | 2150 | license = [ pkgs.lib.licenses.psfl ]; |
|
2098 | 2151 | }; |
|
2099 | 2152 | }; |
|
2100 | 2153 | "supervisor" = super.buildPythonPackage { |
|
2101 | 2154 | name = "supervisor-4.1.0"; |
|
2102 | 2155 | doCheck = false; |
|
2103 | 2156 | src = fetchurl { |
|
2104 | 2157 | url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz"; |
|
2105 | 2158 | sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d"; |
|
2106 | 2159 | }; |
|
2107 | 2160 | meta = { |
|
2108 | 2161 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
2109 | 2162 | }; |
|
2110 | 2163 | }; |
|
2111 | 2164 | "tempita" = super.buildPythonPackage { |
|
2112 | 2165 | name = "tempita-0.5.2"; |
|
2113 | 2166 | doCheck = false; |
|
2114 | 2167 | src = fetchurl { |
|
2115 | 2168 | url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz"; |
|
2116 | 2169 | sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna"; |
|
2117 | 2170 | }; |
|
2118 | 2171 | meta = { |
|
2119 | 2172 | license = [ pkgs.lib.licenses.mit ]; |
|
2120 | 2173 | }; |
|
2121 | 2174 | }; |
|
2122 | 2175 | "termcolor" = super.buildPythonPackage { |
|
2123 | 2176 | name = "termcolor-1.1.0"; |
|
2124 | 2177 | doCheck = false; |
|
2125 | 2178 | src = fetchurl { |
|
2126 | 2179 | url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz"; |
|
2127 | 2180 | sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x"; |
|
2128 | 2181 | }; |
|
2129 | 2182 | meta = { |
|
2130 | 2183 | license = [ pkgs.lib.licenses.mit ]; |
|
2131 | 2184 | }; |
|
2132 | 2185 | }; |
|
2133 | 2186 | "testpath" = super.buildPythonPackage { |
|
2134 | 2187 | name = "testpath-0.4.4"; |
|
2135 | 2188 | doCheck = false; |
|
2136 | 2189 | src = fetchurl { |
|
2137 | 2190 | url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz"; |
|
2138 | 2191 | sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30"; |
|
2139 | 2192 | }; |
|
2140 | 2193 | meta = { |
|
2141 | 2194 | license = [ ]; |
|
2142 | 2195 | }; |
|
2143 | 2196 | }; |
|
2144 | 2197 | "traitlets" = super.buildPythonPackage { |
|
2145 | 2198 | name = "traitlets-4.3.3"; |
|
2146 | 2199 | doCheck = false; |
|
2147 | 2200 | propagatedBuildInputs = [ |
|
2148 | 2201 | self."ipython-genutils" |
|
2149 | 2202 | self."six" |
|
2150 | 2203 | self."decorator" |
|
2151 | 2204 | self."enum34" |
|
2152 | 2205 | ]; |
|
2153 | 2206 | src = fetchurl { |
|
2154 | 2207 | url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz"; |
|
2155 | 2208 | sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh"; |
|
2156 | 2209 | }; |
|
2157 | 2210 | meta = { |
|
2158 | 2211 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
2159 | 2212 | }; |
|
2160 | 2213 | }; |
|
2161 | 2214 | "transaction" = super.buildPythonPackage { |
|
2162 | 2215 | name = "transaction-2.4.0"; |
|
2163 | 2216 | doCheck = false; |
|
2164 | 2217 | propagatedBuildInputs = [ |
|
2165 | 2218 | self."zope.interface" |
|
2166 | 2219 | ]; |
|
2167 | 2220 | src = fetchurl { |
|
2168 | 2221 | url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz"; |
|
2169 | 2222 | sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j"; |
|
2170 | 2223 | }; |
|
2171 | 2224 | meta = { |
|
2172 | 2225 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2173 | 2226 | }; |
|
2174 | 2227 | }; |
|
2175 | 2228 | "translationstring" = super.buildPythonPackage { |
|
2176 | 2229 | name = "translationstring-1.3"; |
|
2177 | 2230 | doCheck = false; |
|
2178 | 2231 | src = fetchurl { |
|
2179 | 2232 | url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz"; |
|
2180 | 2233 | sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f"; |
|
2181 | 2234 | }; |
|
2182 | 2235 | meta = { |
|
2183 | 2236 | license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ]; |
|
2184 | 2237 | }; |
|
2185 | 2238 | }; |
|
2186 | 2239 | "tzlocal" = super.buildPythonPackage { |
|
2187 | 2240 | name = "tzlocal-1.5.1"; |
|
2188 | 2241 | doCheck = false; |
|
2189 | 2242 | propagatedBuildInputs = [ |
|
2190 | 2243 | self."pytz" |
|
2191 | 2244 | ]; |
|
2192 | 2245 | src = fetchurl { |
|
2193 | 2246 | url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz"; |
|
2194 | 2247 | sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf"; |
|
2195 | 2248 | }; |
|
2196 | 2249 | meta = { |
|
2197 | 2250 | license = [ pkgs.lib.licenses.mit ]; |
|
2198 | 2251 | }; |
|
2199 | 2252 | }; |
|
2200 | 2253 | "urllib3" = super.buildPythonPackage { |
|
2201 | 2254 | name = "urllib3-1.25.2"; |
|
2202 | 2255 | doCheck = false; |
|
2203 | 2256 | src = fetchurl { |
|
2204 | 2257 | url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz"; |
|
2205 | 2258 | sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55"; |
|
2206 | 2259 | }; |
|
2207 | 2260 | meta = { |
|
2208 | 2261 | license = [ pkgs.lib.licenses.mit ]; |
|
2209 | 2262 | }; |
|
2210 | 2263 | }; |
|
2211 | 2264 | "urlobject" = super.buildPythonPackage { |
|
2212 | 2265 | name = "urlobject-2.4.3"; |
|
2213 | 2266 | doCheck = false; |
|
2214 | 2267 | src = fetchurl { |
|
2215 | 2268 | url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz"; |
|
2216 | 2269 | sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7"; |
|
2217 | 2270 | }; |
|
2218 | 2271 | meta = { |
|
2219 | 2272 | license = [ pkgs.lib.licenses.publicDomain ]; |
|
2220 | 2273 | }; |
|
2221 | 2274 | }; |
|
2222 | 2275 | "venusian" = super.buildPythonPackage { |
|
2223 | 2276 | name = "venusian-1.2.0"; |
|
2224 | 2277 | doCheck = false; |
|
2225 | 2278 | src = fetchurl { |
|
2226 | 2279 | url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz"; |
|
2227 | 2280 | sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34"; |
|
2228 | 2281 | }; |
|
2229 | 2282 | meta = { |
|
2230 | 2283 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
2231 | 2284 | }; |
|
2232 | 2285 | }; |
|
2233 | 2286 | "vine" = super.buildPythonPackage { |
|
2234 | 2287 | name = "vine-1.3.0"; |
|
2235 | 2288 | doCheck = false; |
|
2236 | 2289 | src = fetchurl { |
|
2237 | 2290 | url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz"; |
|
2238 | 2291 | sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk"; |
|
2239 | 2292 | }; |
|
2240 | 2293 | meta = { |
|
2241 | 2294 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
2242 | 2295 | }; |
|
2243 | 2296 | }; |
|
2244 | 2297 | "waitress" = super.buildPythonPackage { |
|
2245 | 2298 | name = "waitress-1.3.1"; |
|
2246 | 2299 | doCheck = false; |
|
2247 | 2300 | src = fetchurl { |
|
2248 | 2301 | url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz"; |
|
2249 | 2302 | sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7"; |
|
2250 | 2303 | }; |
|
2251 | 2304 | meta = { |
|
2252 | 2305 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2253 | 2306 | }; |
|
2254 | 2307 | }; |
|
2255 | 2308 | "wcwidth" = super.buildPythonPackage { |
|
2256 | 2309 | name = "wcwidth-0.1.9"; |
|
2257 | 2310 | doCheck = false; |
|
2258 | 2311 | src = fetchurl { |
|
2259 | 2312 | url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz"; |
|
2260 | 2313 | sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf"; |
|
2261 | 2314 | }; |
|
2262 | 2315 | meta = { |
|
2263 | 2316 | license = [ pkgs.lib.licenses.mit ]; |
|
2264 | 2317 | }; |
|
2265 | 2318 | }; |
|
2266 | 2319 | "webencodings" = super.buildPythonPackage { |
|
2267 | 2320 | name = "webencodings-0.5.1"; |
|
2268 | 2321 | doCheck = false; |
|
2269 | 2322 | src = fetchurl { |
|
2270 | 2323 | url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz"; |
|
2271 | 2324 | sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk"; |
|
2272 | 2325 | }; |
|
2273 | 2326 | meta = { |
|
2274 | 2327 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
2275 | 2328 | }; |
|
2276 | 2329 | }; |
|
2277 | 2330 | "weberror" = super.buildPythonPackage { |
|
2278 | 2331 | name = "weberror-0.13.1"; |
|
2279 | 2332 | doCheck = false; |
|
2280 | 2333 | propagatedBuildInputs = [ |
|
2281 | 2334 | self."webob" |
|
2282 | 2335 | self."tempita" |
|
2283 | 2336 | self."pygments" |
|
2284 | 2337 | self."paste" |
|
2285 | 2338 | ]; |
|
2286 | 2339 | src = fetchurl { |
|
2287 | 2340 | url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz"; |
|
2288 | 2341 | sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1"; |
|
2289 | 2342 | }; |
|
2290 | 2343 | meta = { |
|
2291 | 2344 | license = [ pkgs.lib.licenses.mit ]; |
|
2292 | 2345 | }; |
|
2293 | 2346 | }; |
|
2294 | 2347 | "webhelpers2" = super.buildPythonPackage { |
|
2295 | 2348 | name = "webhelpers2-2.0"; |
|
2296 | 2349 | doCheck = false; |
|
2297 | 2350 | propagatedBuildInputs = [ |
|
2298 | 2351 | self."markupsafe" |
|
2299 | 2352 | self."six" |
|
2300 | 2353 | ]; |
|
2301 | 2354 | src = fetchurl { |
|
2302 | 2355 | url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz"; |
|
2303 | 2356 | sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs"; |
|
2304 | 2357 | }; |
|
2305 | 2358 | meta = { |
|
2306 | 2359 | license = [ pkgs.lib.licenses.mit ]; |
|
2307 | 2360 | }; |
|
2308 | 2361 | }; |
|
2309 | 2362 | "webob" = super.buildPythonPackage { |
|
2310 | 2363 | name = "webob-1.8.5"; |
|
2311 | 2364 | doCheck = false; |
|
2312 | 2365 | src = fetchurl { |
|
2313 | 2366 | url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz"; |
|
2314 | 2367 | sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5"; |
|
2315 | 2368 | }; |
|
2316 | 2369 | meta = { |
|
2317 | 2370 | license = [ pkgs.lib.licenses.mit ]; |
|
2318 | 2371 | }; |
|
2319 | 2372 | }; |
|
2320 | 2373 | "webtest" = super.buildPythonPackage { |
|
2321 | 2374 | name = "webtest-2.0.34"; |
|
2322 | 2375 | doCheck = false; |
|
2323 | 2376 | propagatedBuildInputs = [ |
|
2324 | 2377 | self."six" |
|
2325 | 2378 | self."webob" |
|
2326 | 2379 | self."waitress" |
|
2327 | 2380 | self."beautifulsoup4" |
|
2328 | 2381 | ]; |
|
2329 | 2382 | src = fetchurl { |
|
2330 | 2383 | url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz"; |
|
2331 | 2384 | sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi"; |
|
2332 | 2385 | }; |
|
2333 | 2386 | meta = { |
|
2334 | 2387 | license = [ pkgs.lib.licenses.mit ]; |
|
2335 | 2388 | }; |
|
2336 | 2389 | }; |
|
2337 | 2390 | "whoosh" = super.buildPythonPackage { |
|
2338 | 2391 | name = "whoosh-2.7.4"; |
|
2339 | 2392 | doCheck = false; |
|
2340 | 2393 | src = fetchurl { |
|
2341 | 2394 | url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz"; |
|
2342 | 2395 | sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw"; |
|
2343 | 2396 | }; |
|
2344 | 2397 | meta = { |
|
2345 | 2398 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ]; |
|
2346 | 2399 | }; |
|
2347 | 2400 | }; |
|
2348 | 2401 | "ws4py" = super.buildPythonPackage { |
|
2349 | 2402 | name = "ws4py-0.5.1"; |
|
2350 | 2403 | doCheck = false; |
|
2351 | 2404 | src = fetchurl { |
|
2352 | 2405 | url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz"; |
|
2353 | 2406 | sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19"; |
|
2354 | 2407 | }; |
|
2355 | 2408 | meta = { |
|
2356 | 2409 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
2357 | 2410 | }; |
|
2358 | 2411 | }; |
|
2359 | 2412 | "wsgiref" = super.buildPythonPackage { |
|
2360 | 2413 | name = "wsgiref-0.1.2"; |
|
2361 | 2414 | doCheck = false; |
|
2362 | 2415 | src = fetchurl { |
|
2363 | 2416 | url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip"; |
|
2364 | 2417 | sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7"; |
|
2365 | 2418 | }; |
|
2366 | 2419 | meta = { |
|
2367 | 2420 | license = [ { fullName = "PSF or ZPL"; } ]; |
|
2368 | 2421 | }; |
|
2369 | 2422 | }; |
|
2370 | 2423 | "zipp" = super.buildPythonPackage { |
|
2371 | 2424 | name = "zipp-1.2.0"; |
|
2372 | 2425 | doCheck = false; |
|
2373 | 2426 | propagatedBuildInputs = [ |
|
2374 | 2427 | self."contextlib2" |
|
2375 | 2428 | ]; |
|
2376 | 2429 | src = fetchurl { |
|
2377 | 2430 | url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz"; |
|
2378 | 2431 | sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167"; |
|
2379 | 2432 | }; |
|
2380 | 2433 | meta = { |
|
2381 | 2434 | license = [ pkgs.lib.licenses.mit ]; |
|
2382 | 2435 | }; |
|
2383 | 2436 | }; |
|
2384 | 2437 | "zope.cachedescriptors" = super.buildPythonPackage { |
|
2385 | 2438 | name = "zope.cachedescriptors-4.3.1"; |
|
2386 | 2439 | doCheck = false; |
|
2387 | 2440 | propagatedBuildInputs = [ |
|
2388 | 2441 | self."setuptools" |
|
2389 | 2442 | ]; |
|
2390 | 2443 | src = fetchurl { |
|
2391 | 2444 | url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz"; |
|
2392 | 2445 | sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z"; |
|
2393 | 2446 | }; |
|
2394 | 2447 | meta = { |
|
2395 | 2448 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2396 | 2449 | }; |
|
2397 | 2450 | }; |
|
2398 | 2451 | "zope.deprecation" = super.buildPythonPackage { |
|
2399 | 2452 | name = "zope.deprecation-4.4.0"; |
|
2400 | 2453 | doCheck = false; |
|
2401 | 2454 | propagatedBuildInputs = [ |
|
2402 | 2455 | self."setuptools" |
|
2403 | 2456 | ]; |
|
2404 | 2457 | src = fetchurl { |
|
2405 | 2458 | url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz"; |
|
2406 | 2459 | sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d"; |
|
2407 | 2460 | }; |
|
2408 | 2461 | meta = { |
|
2409 | 2462 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2410 | 2463 | }; |
|
2411 | 2464 | }; |
|
2412 | 2465 | "zope.event" = super.buildPythonPackage { |
|
2413 | 2466 | name = "zope.event-4.4"; |
|
2414 | 2467 | doCheck = false; |
|
2415 | 2468 | propagatedBuildInputs = [ |
|
2416 | 2469 | self."setuptools" |
|
2417 | 2470 | ]; |
|
2418 | 2471 | src = fetchurl { |
|
2419 | 2472 | url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz"; |
|
2420 | 2473 | sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9"; |
|
2421 | 2474 | }; |
|
2422 | 2475 | meta = { |
|
2423 | 2476 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2424 | 2477 | }; |
|
2425 | 2478 | }; |
|
2426 | 2479 | "zope.interface" = super.buildPythonPackage { |
|
2427 | 2480 | name = "zope.interface-4.6.0"; |
|
2428 | 2481 | doCheck = false; |
|
2429 | 2482 | propagatedBuildInputs = [ |
|
2430 | 2483 | self."setuptools" |
|
2431 | 2484 | ]; |
|
2432 | 2485 | src = fetchurl { |
|
2433 | 2486 | url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz"; |
|
2434 | 2487 | sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v"; |
|
2435 | 2488 | }; |
|
2436 | 2489 | meta = { |
|
2437 | 2490 | license = [ pkgs.lib.licenses.zpl21 ]; |
|
2438 | 2491 | }; |
|
2439 | 2492 | }; |
|
2440 | 2493 | |
|
2441 | 2494 | ### Test requirements |
|
2442 | 2495 | |
|
2443 | 2496 | |
|
2444 | 2497 | } |
@@ -1,123 +1,123 b'' | |||
|
1 | 1 | ## dependencies |
|
2 | 2 | |
|
3 | 3 | amqp==2.5.2 |
|
4 | 4 | babel==1.3 |
|
5 | 5 | beaker==1.9.1 |
|
6 | 6 | bleach==3.1.3 |
|
7 | 7 | celery==4.3.0 |
|
8 |
channelstream==0. |
|
|
8 | channelstream==0.6.14 | |
|
9 | 9 | click==7.0 |
|
10 | 10 | colander==1.7.0 |
|
11 | 11 | # our custom configobj |
|
12 | 12 | https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6 |
|
13 | 13 | cssselect==1.0.3 |
|
14 | 14 | cryptography==2.6.1 |
|
15 | 15 | decorator==4.1.2 |
|
16 | 16 | deform==2.0.8 |
|
17 | 17 | docutils==0.16.0 |
|
18 | 18 | dogpile.cache==0.9.0 |
|
19 | 19 | dogpile.core==0.4.1 |
|
20 | 20 | formencode==1.2.4 |
|
21 | 21 | future==0.14.3 |
|
22 | 22 | futures==3.0.2 |
|
23 | 23 | infrae.cache==1.0.1 |
|
24 | 24 | iso8601==0.1.12 |
|
25 |
itsdangerous==0 |
|
|
25 | itsdangerous==1.1.0 | |
|
26 | 26 | kombu==4.6.6 |
|
27 | 27 | lxml==4.2.5 |
|
28 | 28 | mako==1.1.0 |
|
29 | 29 | markdown==2.6.11 |
|
30 | 30 | markupsafe==1.1.1 |
|
31 | 31 | msgpack-python==0.5.6 |
|
32 | 32 | pyotp==2.3.0 |
|
33 | 33 | packaging==20.3 |
|
34 | 34 | pathlib2==2.3.5 |
|
35 | 35 | paste==3.4.0 |
|
36 | 36 | pastedeploy==2.1.0 |
|
37 | 37 | pastescript==3.2.0 |
|
38 | 38 | peppercorn==0.6 |
|
39 | 39 | premailer==3.6.1 |
|
40 | 40 | psutil==5.7.0 |
|
41 | 41 | py-bcrypt==0.4 |
|
42 | 42 | pycurl==7.43.0.3 |
|
43 | 43 | pycrypto==2.6.1 |
|
44 | 44 | pygments==2.4.2 |
|
45 | 45 | pyparsing==2.4.7 |
|
46 | 46 | pyramid-debugtoolbar==4.6.1 |
|
47 | 47 | pyramid-mako==1.1.0 |
|
48 | 48 | pyramid==1.10.4 |
|
49 | 49 | pyramid_mailer==0.15.1 |
|
50 | 50 | python-dateutil==2.8.1 |
|
51 | 51 | python-ldap==3.2.0 |
|
52 | 52 | python-memcached==1.59 |
|
53 | 53 | python-pam==1.8.4 |
|
54 | 54 | python-saml==2.4.2 |
|
55 | 55 | pytz==2019.3 |
|
56 | 56 | tzlocal==1.5.1 |
|
57 | 57 | pyzmq==14.6.0 |
|
58 | 58 | py-gfm==0.1.4 |
|
59 | 59 | redis==3.4.1 |
|
60 | 60 | repoze.lru==0.7 |
|
61 | 61 | requests==2.22.0 |
|
62 | 62 | routes==2.4.1 |
|
63 | 63 | simplejson==3.16.0 |
|
64 | 64 | six==1.11.0 |
|
65 | 65 | sqlalchemy==1.3.15 |
|
66 | 66 | sshpubkeys==3.1.0 |
|
67 | 67 | subprocess32==3.5.4 |
|
68 | 68 | supervisor==4.1.0 |
|
69 | 69 | translationstring==1.3 |
|
70 | 70 | urllib3==1.25.2 |
|
71 | 71 | urlobject==2.4.3 |
|
72 | 72 | venusian==1.2.0 |
|
73 | 73 | weberror==0.13.1 |
|
74 | 74 | webhelpers2==2.0 |
|
75 | 75 | webob==1.8.5 |
|
76 | 76 | whoosh==2.7.4 |
|
77 | 77 | wsgiref==0.1.2 |
|
78 | 78 | zope.cachedescriptors==4.3.1 |
|
79 | 79 | zope.deprecation==4.4.0 |
|
80 | 80 | zope.event==4.4.0 |
|
81 | 81 | zope.interface==4.6.0 |
|
82 | 82 | |
|
83 | 83 | # DB drivers |
|
84 | 84 | mysql-python==1.2.5 |
|
85 | 85 | pymysql==0.8.1 |
|
86 | 86 | pysqlite==2.8.3 |
|
87 | 87 | psycopg2==2.8.4 |
|
88 | 88 | |
|
89 | 89 | # IPYTHON RENDERING |
|
90 | 90 | # entrypoints backport, pypi version doesn't support egg installs |
|
91 | 91 | https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d#egg=entrypoints==0.2.2.rhodecode-upstream1 |
|
92 | 92 | nbconvert==5.3.1 |
|
93 | 93 | nbformat==4.4.0 |
|
94 | 94 | jupyter-client==5.0.0 |
|
95 | 95 | jupyter-core==4.5.0 |
|
96 | 96 | |
|
97 | 97 | ## cli tools |
|
98 | 98 | alembic==1.4.2 |
|
99 | 99 | invoke==0.13.0 |
|
100 | 100 | bumpversion==0.5.3 |
|
101 | 101 | |
|
102 | 102 | ## http servers |
|
103 | 103 | gevent==1.5.0 |
|
104 | 104 | greenlet==0.4.15 |
|
105 | 105 | gunicorn==19.9.0 |
|
106 | 106 | waitress==1.3.1 |
|
107 | 107 | |
|
108 | 108 | ## debug |
|
109 | 109 | ipdb==0.13.2 |
|
110 | 110 | ipython==5.1.0 |
|
111 | 111 | |
|
112 | 112 | ## rhodecode-tools, special case, use file://PATH.tar.gz#egg=rhodecode-tools==X.Y.Z, to test local version |
|
113 | 113 | https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a#egg=rhodecode-tools==1.4.0 |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | ## appenlight |
|
117 | 117 | appenlight-client==0.6.26 |
|
118 | 118 | |
|
119 | 119 | ## test related requirements |
|
120 | 120 | -r requirements_test.txt |
|
121 | 121 | |
|
122 | 122 | ## uncomment to add the debug libraries |
|
123 | 123 | #-r requirements_debug.txt |
@@ -1,27 +1,28 b'' | |||
|
1 | 1 | # contains not directly required libraries we want to pin the version. |
|
2 | 2 | |
|
3 | 3 | atomicwrites==1.3.0 |
|
4 | 4 | attrs==19.3.0 |
|
5 | 5 | asn1crypto==0.24.0 |
|
6 | 6 | billiard==3.6.1.0 |
|
7 | 7 | cffi==1.12.3 |
|
8 | 8 | chameleon==2.24 |
|
9 | 9 | configparser==4.0.2 |
|
10 | 10 | contextlib2==0.6.0.post1 |
|
11 | 11 | ecdsa==0.13.2 |
|
12 | 12 | gnureadline==6.3.8 |
|
13 | 13 | hupper==1.10.2 |
|
14 | 14 | ipaddress==1.0.23 |
|
15 | 15 | importlib-metadata==1.6.0 |
|
16 | 16 | jinja2==2.9.6 |
|
17 | 17 | jsonschema==2.6.0 |
|
18 | 18 | pluggy==0.13.1 |
|
19 | 19 | pyasn1-modules==0.2.6 |
|
20 | 20 | pyramid-jinja2==2.7 |
|
21 | pyramid-apispec==0.3.2 | |
|
21 | 22 | scandir==1.10.0 |
|
22 | 23 | setproctitle==1.1.10 |
|
23 | 24 | tempita==0.5.2 |
|
24 | 25 | testpath==0.4.4 |
|
25 | 26 | transaction==2.4.0 |
|
26 | 27 | vine==1.3.0 |
|
27 | wcwidth==0.1.9 | |
|
28 | wcwidth==0.1.9 No newline at end of file |
@@ -1,60 +1,60 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | from collections import OrderedDict |
|
23 | 23 | |
|
24 | 24 | import sys |
|
25 | 25 | import platform |
|
26 | 26 | |
|
27 | 27 | VERSION = tuple(open(os.path.join( |
|
28 | 28 | os.path.dirname(__file__), 'VERSION')).read().split('.')) |
|
29 | 29 | |
|
30 | 30 | BACKENDS = OrderedDict() |
|
31 | 31 | |
|
32 | 32 | BACKENDS['hg'] = 'Mercurial repository' |
|
33 | 33 | BACKENDS['git'] = 'Git repository' |
|
34 | 34 | BACKENDS['svn'] = 'Subversion repository' |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | CELERY_ENABLED = False |
|
38 | 38 | CELERY_EAGER = False |
|
39 | 39 | |
|
40 | 40 | # link to config for pyramid |
|
41 | 41 | CONFIG = {} |
|
42 | 42 | |
|
43 | 43 | # Populated with the settings dictionary from application init in |
|
44 | 44 | # rhodecode.conf.environment.load_pyramid_environment |
|
45 | 45 | PYRAMID_SETTINGS = {} |
|
46 | 46 | |
|
47 | 47 | # Linked module for extensions |
|
48 | 48 | EXTENSIONS = {} |
|
49 | 49 | |
|
50 | 50 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
51 |
__dbversion__ = 10 |
|
|
51 | __dbversion__ = 109 # defines current db version for migrations | |
|
52 | 52 | __platform__ = platform.system() |
|
53 | 53 | __license__ = 'AGPLv3, and Commercial License' |
|
54 | 54 | __author__ = 'RhodeCode GmbH' |
|
55 | 55 | __url__ = 'https://code.rhodecode.com' |
|
56 | 56 | |
|
57 | 57 | is_windows = __platform__ in ['Windows'] |
|
58 | 58 | is_unix = not is_windows |
|
59 | 59 | is_test = False |
|
60 | 60 | disable_error_handler = False |
@@ -1,453 +1,452 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | JSON RPC utils |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import collections |
|
26 | 26 | import logging |
|
27 | 27 | |
|
28 | 28 | from rhodecode.api.exc import JSONRPCError |
|
29 | 29 | from rhodecode.lib.auth import ( |
|
30 | 30 | HasPermissionAnyApi, HasRepoPermissionAnyApi, HasRepoGroupPermissionAnyApi) |
|
31 | 31 | from rhodecode.lib.utils import safe_unicode |
|
32 | 32 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
33 | 33 | from rhodecode.lib.view_utils import get_commit_from_ref_name |
|
34 | 34 | from rhodecode.lib.utils2 import str2bool |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class OAttr(object): |
|
40 | 40 | """ |
|
41 | 41 | Special Option that defines other attribute, and can default to them |
|
42 | 42 | |
|
43 | 43 | Example:: |
|
44 | 44 | |
|
45 | 45 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
46 | 46 | user = Optional.extract(userid, evaluate_locals=local()) |
|
47 | 47 | #if we pass in userid, we get it, else it will default to apiuser |
|
48 | 48 | #attribute |
|
49 | 49 | """ |
|
50 | 50 | |
|
51 | 51 | def __init__(self, attr_name): |
|
52 | 52 | self.attr_name = attr_name |
|
53 | 53 | |
|
54 | 54 | def __repr__(self): |
|
55 | 55 | return '<OptionalAttr:%s>' % self.attr_name |
|
56 | 56 | |
|
57 | 57 | def __call__(self): |
|
58 | 58 | return self |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class Optional(object): |
|
62 | 62 | """ |
|
63 | 63 | Defines an optional parameter:: |
|
64 | 64 | |
|
65 | 65 | param = param.getval() if isinstance(param, Optional) else param |
|
66 | 66 | param = param() if isinstance(param, Optional) else param |
|
67 | 67 | |
|
68 | 68 | is equivalent of:: |
|
69 | 69 | |
|
70 | 70 | param = Optional.extract(param) |
|
71 | 71 | |
|
72 | 72 | """ |
|
73 | 73 | |
|
74 | 74 | def __init__(self, type_): |
|
75 | 75 | self.type_ = type_ |
|
76 | 76 | |
|
77 | 77 | def __repr__(self): |
|
78 | 78 | return '<Optional:%s>' % self.type_.__repr__() |
|
79 | 79 | |
|
80 | 80 | def __call__(self): |
|
81 | 81 | return self.getval() |
|
82 | 82 | |
|
83 | 83 | def getval(self, evaluate_locals=None): |
|
84 | 84 | """ |
|
85 | 85 | returns value from this Optional instance |
|
86 | 86 | """ |
|
87 | 87 | if isinstance(self.type_, OAttr): |
|
88 | 88 | param_name = self.type_.attr_name |
|
89 | 89 | if evaluate_locals: |
|
90 | 90 | return evaluate_locals[param_name] |
|
91 | 91 | # use params name |
|
92 | 92 | return param_name |
|
93 | 93 | return self.type_ |
|
94 | 94 | |
|
95 | 95 | @classmethod |
|
96 | 96 | def extract(cls, val, evaluate_locals=None, binary=None): |
|
97 | 97 | """ |
|
98 | 98 | Extracts value from Optional() instance |
|
99 | 99 | |
|
100 | 100 | :param val: |
|
101 | 101 | :return: original value if it's not Optional instance else |
|
102 | 102 | value of instance |
|
103 | 103 | """ |
|
104 | 104 | if isinstance(val, cls): |
|
105 | 105 | val = val.getval(evaluate_locals) |
|
106 | 106 | |
|
107 | 107 | if binary: |
|
108 | 108 | val = str2bool(val) |
|
109 | 109 | |
|
110 | 110 | return val |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | def parse_args(cli_args, key_prefix=''): |
|
114 | 114 | from rhodecode.lib.utils2 import (escape_split) |
|
115 | 115 | kwargs = collections.defaultdict(dict) |
|
116 | 116 | for el in escape_split(cli_args, ','): |
|
117 | 117 | kv = escape_split(el, '=', 1) |
|
118 | 118 | if len(kv) == 2: |
|
119 | 119 | k, v = kv |
|
120 | 120 | kwargs[key_prefix + k] = v |
|
121 | 121 | return kwargs |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def get_origin(obj): |
|
125 | 125 | """ |
|
126 | 126 | Get origin of permission from object. |
|
127 | 127 | |
|
128 | 128 | :param obj: |
|
129 | 129 | """ |
|
130 | 130 | origin = 'permission' |
|
131 | 131 | |
|
132 | 132 | if getattr(obj, 'owner_row', '') and getattr(obj, 'admin_row', ''): |
|
133 | 133 | # admin and owner case, maybe we should use dual string ? |
|
134 | 134 | origin = 'owner' |
|
135 | 135 | elif getattr(obj, 'owner_row', ''): |
|
136 | 136 | origin = 'owner' |
|
137 | 137 | elif getattr(obj, 'admin_row', ''): |
|
138 | 138 | origin = 'super-admin' |
|
139 | 139 | return origin |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | def store_update(updates, attr, name): |
|
143 | 143 | """ |
|
144 | 144 | Stores param in updates dict if it's not instance of Optional |
|
145 | 145 | allows easy updates of passed in params |
|
146 | 146 | """ |
|
147 | 147 | if not isinstance(attr, Optional): |
|
148 | 148 | updates[name] = attr |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | def has_superadmin_permission(apiuser): |
|
152 | 152 | """ |
|
153 | 153 | Return True if apiuser is admin or return False |
|
154 | 154 | |
|
155 | 155 | :param apiuser: |
|
156 | 156 | """ |
|
157 | 157 | if HasPermissionAnyApi('hg.admin')(user=apiuser): |
|
158 | 158 | return True |
|
159 | 159 | return False |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | def validate_repo_permissions(apiuser, repoid, repo, perms): |
|
163 | 163 | """ |
|
164 | 164 | Raise JsonRPCError if apiuser is not authorized or return True |
|
165 | 165 | |
|
166 | 166 | :param apiuser: |
|
167 | 167 | :param repoid: |
|
168 | 168 | :param repo: |
|
169 | 169 | :param perms: |
|
170 | 170 | """ |
|
171 | 171 | if not HasRepoPermissionAnyApi(*perms)( |
|
172 | 172 | user=apiuser, repo_name=repo.repo_name): |
|
173 | raise JSONRPCError( | |
|
174 | 'repository `%s` does not exist' % repoid) | |
|
173 | raise JSONRPCError('repository `%s` does not exist' % repoid) | |
|
175 | 174 | |
|
176 | 175 | return True |
|
177 | 176 | |
|
178 | 177 | |
|
179 | 178 | def validate_repo_group_permissions(apiuser, repogroupid, repo_group, perms): |
|
180 | 179 | """ |
|
181 | 180 | Raise JsonRPCError if apiuser is not authorized or return True |
|
182 | 181 | |
|
183 | 182 | :param apiuser: |
|
184 | 183 | :param repogroupid: just the id of repository group |
|
185 | 184 | :param repo_group: instance of repo_group |
|
186 | 185 | :param perms: |
|
187 | 186 | """ |
|
188 | 187 | if not HasRepoGroupPermissionAnyApi(*perms)( |
|
189 | 188 | user=apiuser, group_name=repo_group.group_name): |
|
190 | 189 | raise JSONRPCError( |
|
191 | 190 | 'repository group `%s` does not exist' % repogroupid) |
|
192 | 191 | |
|
193 | 192 | return True |
|
194 | 193 | |
|
195 | 194 | |
|
196 | 195 | def validate_set_owner_permissions(apiuser, owner): |
|
197 | 196 | if isinstance(owner, Optional): |
|
198 | 197 | owner = get_user_or_error(apiuser.user_id) |
|
199 | 198 | else: |
|
200 | 199 | if has_superadmin_permission(apiuser): |
|
201 | 200 | owner = get_user_or_error(owner) |
|
202 | 201 | else: |
|
203 | 202 | # forbid setting owner for non-admins |
|
204 | 203 | raise JSONRPCError( |
|
205 | 204 | 'Only RhodeCode super-admin can specify `owner` param') |
|
206 | 205 | return owner |
|
207 | 206 | |
|
208 | 207 | |
|
209 | 208 | def get_user_or_error(userid): |
|
210 | 209 | """ |
|
211 | 210 | Get user by id or name or return JsonRPCError if not found |
|
212 | 211 | |
|
213 | 212 | :param userid: |
|
214 | 213 | """ |
|
215 | 214 | from rhodecode.model.user import UserModel |
|
216 | 215 | user_model = UserModel() |
|
217 | 216 | |
|
218 | 217 | if isinstance(userid, (int, long)): |
|
219 | 218 | try: |
|
220 | 219 | user = user_model.get_user(userid) |
|
221 | 220 | except ValueError: |
|
222 | 221 | user = None |
|
223 | 222 | else: |
|
224 | 223 | user = user_model.get_by_username(userid) |
|
225 | 224 | |
|
226 | 225 | if user is None: |
|
227 | 226 | raise JSONRPCError( |
|
228 | 227 | 'user `%s` does not exist' % (userid,)) |
|
229 | 228 | return user |
|
230 | 229 | |
|
231 | 230 | |
|
232 | 231 | def get_repo_or_error(repoid): |
|
233 | 232 | """ |
|
234 | 233 | Get repo by id or name or return JsonRPCError if not found |
|
235 | 234 | |
|
236 | 235 | :param repoid: |
|
237 | 236 | """ |
|
238 | 237 | from rhodecode.model.repo import RepoModel |
|
239 | 238 | repo_model = RepoModel() |
|
240 | 239 | |
|
241 | 240 | if isinstance(repoid, (int, long)): |
|
242 | 241 | try: |
|
243 | 242 | repo = repo_model.get_repo(repoid) |
|
244 | 243 | except ValueError: |
|
245 | 244 | repo = None |
|
246 | 245 | else: |
|
247 | 246 | repo = repo_model.get_by_repo_name(repoid) |
|
248 | 247 | |
|
249 | 248 | if repo is None: |
|
250 | 249 | raise JSONRPCError( |
|
251 | 250 | 'repository `%s` does not exist' % (repoid,)) |
|
252 | 251 | return repo |
|
253 | 252 | |
|
254 | 253 | |
|
255 | 254 | def get_repo_group_or_error(repogroupid): |
|
256 | 255 | """ |
|
257 | 256 | Get repo group by id or name or return JsonRPCError if not found |
|
258 | 257 | |
|
259 | 258 | :param repogroupid: |
|
260 | 259 | """ |
|
261 | 260 | from rhodecode.model.repo_group import RepoGroupModel |
|
262 | 261 | repo_group_model = RepoGroupModel() |
|
263 | 262 | |
|
264 | 263 | if isinstance(repogroupid, (int, long)): |
|
265 | 264 | try: |
|
266 | 265 | repo_group = repo_group_model._get_repo_group(repogroupid) |
|
267 | 266 | except ValueError: |
|
268 | 267 | repo_group = None |
|
269 | 268 | else: |
|
270 | 269 | repo_group = repo_group_model.get_by_group_name(repogroupid) |
|
271 | 270 | |
|
272 | 271 | if repo_group is None: |
|
273 | 272 | raise JSONRPCError( |
|
274 | 273 | 'repository group `%s` does not exist' % (repogroupid,)) |
|
275 | 274 | return repo_group |
|
276 | 275 | |
|
277 | 276 | |
|
278 | 277 | def get_user_group_or_error(usergroupid): |
|
279 | 278 | """ |
|
280 | 279 | Get user group by id or name or return JsonRPCError if not found |
|
281 | 280 | |
|
282 | 281 | :param usergroupid: |
|
283 | 282 | """ |
|
284 | 283 | from rhodecode.model.user_group import UserGroupModel |
|
285 | 284 | user_group_model = UserGroupModel() |
|
286 | 285 | |
|
287 | 286 | if isinstance(usergroupid, (int, long)): |
|
288 | 287 | try: |
|
289 | 288 | user_group = user_group_model.get_group(usergroupid) |
|
290 | 289 | except ValueError: |
|
291 | 290 | user_group = None |
|
292 | 291 | else: |
|
293 | 292 | user_group = user_group_model.get_by_name(usergroupid) |
|
294 | 293 | |
|
295 | 294 | if user_group is None: |
|
296 | 295 | raise JSONRPCError( |
|
297 | 296 | 'user group `%s` does not exist' % (usergroupid,)) |
|
298 | 297 | return user_group |
|
299 | 298 | |
|
300 | 299 | |
|
301 | 300 | def get_perm_or_error(permid, prefix=None): |
|
302 | 301 | """ |
|
303 | 302 | Get permission by id or name or return JsonRPCError if not found |
|
304 | 303 | |
|
305 | 304 | :param permid: |
|
306 | 305 | """ |
|
307 | 306 | from rhodecode.model.permission import PermissionModel |
|
308 | 307 | |
|
309 | 308 | perm = PermissionModel.cls.get_by_key(permid) |
|
310 | 309 | if perm is None: |
|
311 | 310 | msg = 'permission `{}` does not exist.'.format(permid) |
|
312 | 311 | if prefix: |
|
313 | 312 | msg += ' Permission should start with prefix: `{}`'.format(prefix) |
|
314 | 313 | raise JSONRPCError(msg) |
|
315 | 314 | |
|
316 | 315 | if prefix: |
|
317 | 316 | if not perm.permission_name.startswith(prefix): |
|
318 | 317 | raise JSONRPCError('permission `%s` is invalid, ' |
|
319 | 318 | 'should start with %s' % (permid, prefix)) |
|
320 | 319 | return perm |
|
321 | 320 | |
|
322 | 321 | |
|
323 | 322 | def get_gist_or_error(gistid): |
|
324 | 323 | """ |
|
325 | 324 | Get gist by id or gist_access_id or return JsonRPCError if not found |
|
326 | 325 | |
|
327 | 326 | :param gistid: |
|
328 | 327 | """ |
|
329 | 328 | from rhodecode.model.gist import GistModel |
|
330 | 329 | |
|
331 | 330 | gist = GistModel.cls.get_by_access_id(gistid) |
|
332 | 331 | if gist is None: |
|
333 | 332 | raise JSONRPCError('gist `%s` does not exist' % (gistid,)) |
|
334 | 333 | return gist |
|
335 | 334 | |
|
336 | 335 | |
|
337 | 336 | def get_pull_request_or_error(pullrequestid): |
|
338 | 337 | """ |
|
339 | 338 | Get pull request by id or return JsonRPCError if not found |
|
340 | 339 | |
|
341 | 340 | :param pullrequestid: |
|
342 | 341 | """ |
|
343 | 342 | from rhodecode.model.pull_request import PullRequestModel |
|
344 | 343 | |
|
345 | 344 | try: |
|
346 | 345 | pull_request = PullRequestModel().get(int(pullrequestid)) |
|
347 | 346 | except ValueError: |
|
348 | 347 | raise JSONRPCError('pullrequestid must be an integer') |
|
349 | 348 | if not pull_request: |
|
350 | 349 | raise JSONRPCError('pull request `%s` does not exist' % ( |
|
351 | 350 | pullrequestid,)) |
|
352 | 351 | return pull_request |
|
353 | 352 | |
|
354 | 353 | |
|
355 | 354 | def build_commit_data(commit, detail_level): |
|
356 | 355 | parsed_diff = [] |
|
357 | 356 | if detail_level == 'extended': |
|
358 | 357 | for f_path in commit.added_paths: |
|
359 | 358 | parsed_diff.append(_get_commit_dict(filename=f_path, op='A')) |
|
360 | 359 | for f_path in commit.changed_paths: |
|
361 | 360 | parsed_diff.append(_get_commit_dict(filename=f_path, op='M')) |
|
362 | 361 | for f_path in commit.removed_paths: |
|
363 | 362 | parsed_diff.append(_get_commit_dict(filename=f_path, op='D')) |
|
364 | 363 | |
|
365 | 364 | elif detail_level == 'full': |
|
366 | 365 | from rhodecode.lib.diffs import DiffProcessor |
|
367 | 366 | diff_processor = DiffProcessor(commit.diff()) |
|
368 | 367 | for dp in diff_processor.prepare(): |
|
369 | 368 | del dp['stats']['ops'] |
|
370 | 369 | _stats = dp['stats'] |
|
371 | 370 | parsed_diff.append(_get_commit_dict( |
|
372 | 371 | filename=dp['filename'], op=dp['operation'], |
|
373 | 372 | new_revision=dp['new_revision'], |
|
374 | 373 | old_revision=dp['old_revision'], |
|
375 | 374 | raw_diff=dp['raw_diff'], stats=_stats)) |
|
376 | 375 | |
|
377 | 376 | return parsed_diff |
|
378 | 377 | |
|
379 | 378 | |
|
380 | 379 | def get_commit_or_error(ref, repo): |
|
381 | 380 | try: |
|
382 | 381 | ref_type, _, ref_hash = ref.split(':') |
|
383 | 382 | except ValueError: |
|
384 | 383 | raise JSONRPCError( |
|
385 | 384 | 'Ref `{ref}` given in a wrong format. Please check the API' |
|
386 | 385 | ' documentation for more details'.format(ref=ref)) |
|
387 | 386 | try: |
|
388 | 387 | # TODO: dan: refactor this to use repo.scm_instance().get_commit() |
|
389 | 388 | # once get_commit supports ref_types |
|
390 | 389 | return get_commit_from_ref_name(repo, ref_hash) |
|
391 | 390 | except RepositoryError: |
|
392 | 391 | raise JSONRPCError('Ref `{ref}` does not exist'.format(ref=ref)) |
|
393 | 392 | |
|
394 | 393 | |
|
395 | 394 | def _get_ref_hash(repo, type_, name): |
|
396 | 395 | vcs_repo = repo.scm_instance() |
|
397 | 396 | if type_ in ['branch'] and vcs_repo.alias in ('hg', 'git'): |
|
398 | 397 | return vcs_repo.branches[name] |
|
399 | 398 | elif type_ in ['bookmark', 'book'] and vcs_repo.alias == 'hg': |
|
400 | 399 | return vcs_repo.bookmarks[name] |
|
401 | 400 | else: |
|
402 | 401 | raise ValueError() |
|
403 | 402 | |
|
404 | 403 | |
|
405 | 404 | def resolve_ref_or_error(ref, repo, allowed_ref_types=None): |
|
406 | 405 | allowed_ref_types = allowed_ref_types or ['bookmark', 'book', 'tag', 'branch'] |
|
407 | 406 | |
|
408 | 407 | def _parse_ref(type_, name, hash_=None): |
|
409 | 408 | return type_, name, hash_ |
|
410 | 409 | |
|
411 | 410 | try: |
|
412 | 411 | ref_type, ref_name, ref_hash = _parse_ref(*ref.split(':')) |
|
413 | 412 | except TypeError: |
|
414 | 413 | raise JSONRPCError( |
|
415 | 414 | 'Ref `{ref}` given in a wrong format. Please check the API' |
|
416 | 415 | ' documentation for more details'.format(ref=ref)) |
|
417 | 416 | |
|
418 | 417 | if ref_type not in allowed_ref_types: |
|
419 | 418 | raise JSONRPCError( |
|
420 | 419 | 'Ref `{ref}` type is not allowed. ' |
|
421 | 420 | 'Only:{allowed_refs} are possible.'.format( |
|
422 | 421 | ref=ref, allowed_refs=allowed_ref_types)) |
|
423 | 422 | |
|
424 | 423 | try: |
|
425 | 424 | ref_hash = ref_hash or _get_ref_hash(repo, ref_type, ref_name) |
|
426 | 425 | except (KeyError, ValueError): |
|
427 | 426 | raise JSONRPCError( |
|
428 | 427 | 'The specified value:{type}:`{name}` does not exist, or is not allowed.'.format( |
|
429 | 428 | type=ref_type, name=ref_name)) |
|
430 | 429 | |
|
431 | 430 | return ':'.join([ref_type, ref_name, ref_hash]) |
|
432 | 431 | |
|
433 | 432 | |
|
434 | 433 | def _get_commit_dict( |
|
435 | 434 | filename, op, new_revision=None, old_revision=None, |
|
436 | 435 | raw_diff=None, stats=None): |
|
437 | 436 | if stats is None: |
|
438 | 437 | stats = { |
|
439 | 438 | "added": None, |
|
440 | 439 | "binary": None, |
|
441 | 440 | "deleted": None |
|
442 | 441 | } |
|
443 | 442 | return { |
|
444 | 443 | "filename": safe_unicode(filename), |
|
445 | 444 | "op": op, |
|
446 | 445 | |
|
447 | 446 | # extra details |
|
448 | 447 | "new_revision": new_revision, |
|
449 | 448 | "old_revision": old_revision, |
|
450 | 449 | |
|
451 | 450 | "raw_diff": raw_diff, |
|
452 | 451 | "stats": stats |
|
453 | 452 | } |
@@ -1,2506 +1,2507 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import time |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.api import ( |
|
26 | 26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
27 | 27 | from rhodecode.api.utils import ( |
|
28 | 28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
29 | 29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, |
|
30 | 30 | get_perm_or_error, parse_args, get_origin, build_commit_data, |
|
31 | 31 | validate_set_owner_permissions) |
|
32 | 32 | from rhodecode.lib import audit_logger, rc_cache |
|
33 | 33 | from rhodecode.lib import repo_maintenance |
|
34 | 34 | from rhodecode.lib.auth import ( |
|
35 | 35 | HasPermissionAnyApi, HasUserGroupPermissionAnyApi, |
|
36 | 36 | HasRepoPermissionAnyApi) |
|
37 | 37 | from rhodecode.lib.celerylib.utils import get_task_id |
|
38 | 38 | from rhodecode.lib.utils2 import ( |
|
39 | 39 | str2bool, time_to_datetime, safe_str, safe_int, safe_unicode) |
|
40 | 40 | from rhodecode.lib.ext_json import json |
|
41 | 41 | from rhodecode.lib.exceptions import ( |
|
42 | 42 | StatusChangeOnClosedPullRequestError, CommentVersionMismatch) |
|
43 | 43 | from rhodecode.lib.vcs import RepositoryError |
|
44 | 44 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
45 | 45 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
46 | 46 | from rhodecode.model.comment import CommentsModel |
|
47 | 47 | from rhodecode.model.db import ( |
|
48 | 48 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, |
|
49 | 49 | ChangesetComment) |
|
50 | 50 | from rhodecode.model.permission import PermissionModel |
|
51 | 51 | from rhodecode.model.pull_request import PullRequestModel |
|
52 | 52 | from rhodecode.model.repo import RepoModel |
|
53 | 53 | from rhodecode.model.scm import ScmModel, RepoList |
|
54 | 54 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
55 | 55 | from rhodecode.model import validation_schema |
|
56 | 56 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
57 | 57 | |
|
58 | 58 | log = logging.getLogger(__name__) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | @jsonrpc_method() |
|
62 | 62 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
63 | 63 | """ |
|
64 | 64 | Gets an existing repository by its name or repository_id. |
|
65 | 65 | |
|
66 | 66 | The members section so the output returns users groups or users |
|
67 | 67 | associated with that repository. |
|
68 | 68 | |
|
69 | 69 | This command can only be run using an |authtoken| with admin rights, |
|
70 | 70 | or users with at least read rights to the |repo|. |
|
71 | 71 | |
|
72 | 72 | :param apiuser: This is filled automatically from the |authtoken|. |
|
73 | 73 | :type apiuser: AuthUser |
|
74 | 74 | :param repoid: The repository name or repository id. |
|
75 | 75 | :type repoid: str or int |
|
76 | 76 | :param cache: use the cached value for last changeset |
|
77 | 77 | :type: cache: Optional(bool) |
|
78 | 78 | |
|
79 | 79 | Example output: |
|
80 | 80 | |
|
81 | 81 | .. code-block:: bash |
|
82 | 82 | |
|
83 | 83 | { |
|
84 | 84 | "error": null, |
|
85 | 85 | "id": <repo_id>, |
|
86 | 86 | "result": { |
|
87 | 87 | "clone_uri": null, |
|
88 | 88 | "created_on": "timestamp", |
|
89 | 89 | "description": "repo description", |
|
90 | 90 | "enable_downloads": false, |
|
91 | 91 | "enable_locking": false, |
|
92 | 92 | "enable_statistics": false, |
|
93 | 93 | "followers": [ |
|
94 | 94 | { |
|
95 | 95 | "active": true, |
|
96 | 96 | "admin": false, |
|
97 | 97 | "api_key": "****************************************", |
|
98 | 98 | "api_keys": [ |
|
99 | 99 | "****************************************" |
|
100 | 100 | ], |
|
101 | 101 | "email": "user@example.com", |
|
102 | 102 | "emails": [ |
|
103 | 103 | "user@example.com" |
|
104 | 104 | ], |
|
105 | 105 | "extern_name": "rhodecode", |
|
106 | 106 | "extern_type": "rhodecode", |
|
107 | 107 | "firstname": "username", |
|
108 | 108 | "ip_addresses": [], |
|
109 | 109 | "language": null, |
|
110 | 110 | "last_login": "2015-09-16T17:16:35.854", |
|
111 | 111 | "lastname": "surname", |
|
112 | 112 | "user_id": <user_id>, |
|
113 | 113 | "username": "name" |
|
114 | 114 | } |
|
115 | 115 | ], |
|
116 | 116 | "fork_of": "parent-repo", |
|
117 | 117 | "landing_rev": [ |
|
118 | 118 | "rev", |
|
119 | 119 | "tip" |
|
120 | 120 | ], |
|
121 | 121 | "last_changeset": { |
|
122 | 122 | "author": "User <user@example.com>", |
|
123 | 123 | "branch": "default", |
|
124 | 124 | "date": "timestamp", |
|
125 | 125 | "message": "last commit message", |
|
126 | 126 | "parents": [ |
|
127 | 127 | { |
|
128 | 128 | "raw_id": "commit-id" |
|
129 | 129 | } |
|
130 | 130 | ], |
|
131 | 131 | "raw_id": "commit-id", |
|
132 | 132 | "revision": <revision number>, |
|
133 | 133 | "short_id": "short id" |
|
134 | 134 | }, |
|
135 | 135 | "lock_reason": null, |
|
136 | 136 | "locked_by": null, |
|
137 | 137 | "locked_date": null, |
|
138 | 138 | "owner": "owner-name", |
|
139 | 139 | "permissions": [ |
|
140 | 140 | { |
|
141 | 141 | "name": "super-admin-name", |
|
142 | 142 | "origin": "super-admin", |
|
143 | 143 | "permission": "repository.admin", |
|
144 | 144 | "type": "user" |
|
145 | 145 | }, |
|
146 | 146 | { |
|
147 | 147 | "name": "owner-name", |
|
148 | 148 | "origin": "owner", |
|
149 | 149 | "permission": "repository.admin", |
|
150 | 150 | "type": "user" |
|
151 | 151 | }, |
|
152 | 152 | { |
|
153 | 153 | "name": "user-group-name", |
|
154 | 154 | "origin": "permission", |
|
155 | 155 | "permission": "repository.write", |
|
156 | 156 | "type": "user_group" |
|
157 | 157 | } |
|
158 | 158 | ], |
|
159 | 159 | "private": true, |
|
160 | 160 | "repo_id": 676, |
|
161 | 161 | "repo_name": "user-group/repo-name", |
|
162 | 162 | "repo_type": "hg" |
|
163 | 163 | } |
|
164 | 164 | } |
|
165 | 165 | """ |
|
166 | 166 | |
|
167 | 167 | repo = get_repo_or_error(repoid) |
|
168 | 168 | cache = Optional.extract(cache) |
|
169 | 169 | |
|
170 | 170 | include_secrets = False |
|
171 | 171 | if has_superadmin_permission(apiuser): |
|
172 | 172 | include_secrets = True |
|
173 | 173 | else: |
|
174 | 174 | # check if we have at least read permission for this repo ! |
|
175 | 175 | _perms = ( |
|
176 | 176 | 'repository.admin', 'repository.write', 'repository.read',) |
|
177 | 177 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
178 | 178 | |
|
179 | 179 | permissions = [] |
|
180 | 180 | for _user in repo.permissions(): |
|
181 | 181 | user_data = { |
|
182 | 182 | 'name': _user.username, |
|
183 | 183 | 'permission': _user.permission, |
|
184 | 184 | 'origin': get_origin(_user), |
|
185 | 185 | 'type': "user", |
|
186 | 186 | } |
|
187 | 187 | permissions.append(user_data) |
|
188 | 188 | |
|
189 | 189 | for _user_group in repo.permission_user_groups(): |
|
190 | 190 | user_group_data = { |
|
191 | 191 | 'name': _user_group.users_group_name, |
|
192 | 192 | 'permission': _user_group.permission, |
|
193 | 193 | 'origin': get_origin(_user_group), |
|
194 | 194 | 'type': "user_group", |
|
195 | 195 | } |
|
196 | 196 | permissions.append(user_group_data) |
|
197 | 197 | |
|
198 | 198 | following_users = [ |
|
199 | 199 | user.user.get_api_data(include_secrets=include_secrets) |
|
200 | 200 | for user in repo.followers] |
|
201 | 201 | |
|
202 | 202 | if not cache: |
|
203 | 203 | repo.update_commit_cache() |
|
204 | 204 | data = repo.get_api_data(include_secrets=include_secrets) |
|
205 | 205 | data['permissions'] = permissions |
|
206 | 206 | data['followers'] = following_users |
|
207 | 207 | return data |
|
208 | 208 | |
|
209 | 209 | |
|
210 | 210 | @jsonrpc_method() |
|
211 | 211 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): |
|
212 | 212 | """ |
|
213 | 213 | Lists all existing repositories. |
|
214 | 214 | |
|
215 | 215 | This command can only be run using an |authtoken| with admin rights, |
|
216 | 216 | or users with at least read rights to |repos|. |
|
217 | 217 | |
|
218 | 218 | :param apiuser: This is filled automatically from the |authtoken|. |
|
219 | 219 | :type apiuser: AuthUser |
|
220 | 220 | :param root: specify root repository group to fetch repositories. |
|
221 | 221 | filters the returned repositories to be members of given root group. |
|
222 | 222 | :type root: Optional(None) |
|
223 | 223 | :param traverse: traverse given root into subrepositories. With this flag |
|
224 | 224 | set to False, it will only return top-level repositories from `root`. |
|
225 | 225 | if root is empty it will return just top-level repositories. |
|
226 | 226 | :type traverse: Optional(True) |
|
227 | 227 | |
|
228 | 228 | |
|
229 | 229 | Example output: |
|
230 | 230 | |
|
231 | 231 | .. code-block:: bash |
|
232 | 232 | |
|
233 | 233 | id : <id_given_in_input> |
|
234 | 234 | result: [ |
|
235 | 235 | { |
|
236 | 236 | "repo_id" : "<repo_id>", |
|
237 | 237 | "repo_name" : "<reponame>" |
|
238 | 238 | "repo_type" : "<repo_type>", |
|
239 | 239 | "clone_uri" : "<clone_uri>", |
|
240 | 240 | "private": : "<bool>", |
|
241 | 241 | "created_on" : "<datetimecreated>", |
|
242 | 242 | "description" : "<description>", |
|
243 | 243 | "landing_rev": "<landing_rev>", |
|
244 | 244 | "owner": "<repo_owner>", |
|
245 | 245 | "fork_of": "<name_of_fork_parent>", |
|
246 | 246 | "enable_downloads": "<bool>", |
|
247 | 247 | "enable_locking": "<bool>", |
|
248 | 248 | "enable_statistics": "<bool>", |
|
249 | 249 | }, |
|
250 | 250 | ... |
|
251 | 251 | ] |
|
252 | 252 | error: null |
|
253 | 253 | """ |
|
254 | 254 | |
|
255 | 255 | include_secrets = has_superadmin_permission(apiuser) |
|
256 | 256 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
257 | 257 | extras = {'user': apiuser} |
|
258 | 258 | |
|
259 | 259 | root = Optional.extract(root) |
|
260 | 260 | traverse = Optional.extract(traverse, binary=True) |
|
261 | 261 | |
|
262 | 262 | if root: |
|
263 | 263 | # verify parent existance, if it's empty return an error |
|
264 | 264 | parent = RepoGroup.get_by_group_name(root) |
|
265 | 265 | if not parent: |
|
266 | 266 | raise JSONRPCError( |
|
267 | 267 | 'Root repository group `{}` does not exist'.format(root)) |
|
268 | 268 | |
|
269 | 269 | if traverse: |
|
270 | 270 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) |
|
271 | 271 | else: |
|
272 | 272 | repos = RepoModel().get_repos_for_root(root=parent) |
|
273 | 273 | else: |
|
274 | 274 | if traverse: |
|
275 | 275 | repos = RepoModel().get_all() |
|
276 | 276 | else: |
|
277 | 277 | # return just top-level |
|
278 | 278 | repos = RepoModel().get_repos_for_root(root=None) |
|
279 | 279 | |
|
280 | 280 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) |
|
281 | 281 | return [repo.get_api_data(include_secrets=include_secrets) |
|
282 | 282 | for repo in repo_list] |
|
283 | 283 | |
|
284 | 284 | |
|
285 | 285 | @jsonrpc_method() |
|
286 | 286 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
287 | 287 | details=Optional('basic')): |
|
288 | 288 | """ |
|
289 | 289 | Returns information about a changeset. |
|
290 | 290 | |
|
291 | 291 | Additionally parameters define the amount of details returned by |
|
292 | 292 | this function. |
|
293 | 293 | |
|
294 | 294 | This command can only be run using an |authtoken| with admin rights, |
|
295 | 295 | or users with at least read rights to the |repo|. |
|
296 | 296 | |
|
297 | 297 | :param apiuser: This is filled automatically from the |authtoken|. |
|
298 | 298 | :type apiuser: AuthUser |
|
299 | 299 | :param repoid: The repository name or repository id |
|
300 | 300 | :type repoid: str or int |
|
301 | 301 | :param revision: revision for which listing should be done |
|
302 | 302 | :type revision: str |
|
303 | 303 | :param details: details can be 'basic|extended|full' full gives diff |
|
304 | 304 | info details like the diff itself, and number of changed files etc. |
|
305 | 305 | :type details: Optional(str) |
|
306 | 306 | |
|
307 | 307 | """ |
|
308 | 308 | repo = get_repo_or_error(repoid) |
|
309 | 309 | if not has_superadmin_permission(apiuser): |
|
310 | _perms = ( | |
|
311 | 'repository.admin', 'repository.write', 'repository.read',) | |
|
310 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
|
312 | 311 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
313 | 312 | |
|
314 | 313 | changes_details = Optional.extract(details) |
|
315 | 314 | _changes_details_types = ['basic', 'extended', 'full'] |
|
316 | 315 | if changes_details not in _changes_details_types: |
|
317 | 316 | raise JSONRPCError( |
|
318 | 317 | 'ret_type must be one of %s' % ( |
|
319 | 318 | ','.join(_changes_details_types))) |
|
320 | 319 | |
|
321 | 320 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
322 | 321 | 'status', '_commit', '_file_paths'] |
|
323 | 322 | |
|
324 | 323 | try: |
|
325 | 324 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
326 | 325 | except TypeError as e: |
|
327 | 326 | raise JSONRPCError(safe_str(e)) |
|
328 | 327 | _cs_json = cs.__json__() |
|
329 | 328 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
330 | 329 | if changes_details == 'full': |
|
331 | 330 | _cs_json['refs'] = cs._get_refs() |
|
332 | 331 | return _cs_json |
|
333 | 332 | |
|
334 | 333 | |
|
335 | 334 | @jsonrpc_method() |
|
336 | 335 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
337 | 336 | details=Optional('basic')): |
|
338 | 337 | """ |
|
339 | 338 | Returns a set of commits limited by the number starting |
|
340 | 339 | from the `start_rev` option. |
|
341 | 340 | |
|
342 | 341 | Additional parameters define the amount of details returned by this |
|
343 | 342 | function. |
|
344 | 343 | |
|
345 | 344 | This command can only be run using an |authtoken| with admin rights, |
|
346 | 345 | or users with at least read rights to |repos|. |
|
347 | 346 | |
|
348 | 347 | :param apiuser: This is filled automatically from the |authtoken|. |
|
349 | 348 | :type apiuser: AuthUser |
|
350 | 349 | :param repoid: The repository name or repository ID. |
|
351 | 350 | :type repoid: str or int |
|
352 | 351 | :param start_rev: The starting revision from where to get changesets. |
|
353 | 352 | :type start_rev: str |
|
354 | 353 | :param limit: Limit the number of commits to this amount |
|
355 | 354 | :type limit: str or int |
|
356 | 355 | :param details: Set the level of detail returned. Valid option are: |
|
357 | 356 | ``basic``, ``extended`` and ``full``. |
|
358 | 357 | :type details: Optional(str) |
|
359 | 358 | |
|
360 | 359 | .. note:: |
|
361 | 360 | |
|
362 | 361 | Setting the parameter `details` to the value ``full`` is extensive |
|
363 | 362 | and returns details like the diff itself, and the number |
|
364 | 363 | of changed files. |
|
365 | 364 | |
|
366 | 365 | """ |
|
367 | 366 | repo = get_repo_or_error(repoid) |
|
368 | 367 | if not has_superadmin_permission(apiuser): |
|
369 | _perms = ( | |
|
370 | 'repository.admin', 'repository.write', 'repository.read',) | |
|
368 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
|
371 | 369 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
372 | 370 | |
|
373 | 371 | changes_details = Optional.extract(details) |
|
374 | 372 | _changes_details_types = ['basic', 'extended', 'full'] |
|
375 | 373 | if changes_details not in _changes_details_types: |
|
376 | 374 | raise JSONRPCError( |
|
377 | 375 | 'ret_type must be one of %s' % ( |
|
378 | 376 | ','.join(_changes_details_types))) |
|
379 | 377 | |
|
380 | 378 | limit = int(limit) |
|
381 | 379 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
382 | 380 | 'status', '_commit', '_file_paths'] |
|
383 | 381 | |
|
384 | 382 | vcs_repo = repo.scm_instance() |
|
385 | 383 | # SVN needs a special case to distinguish its index and commit id |
|
386 | 384 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
387 | 385 | start_rev = vcs_repo.commit_ids[0] |
|
388 | 386 | |
|
389 | 387 | try: |
|
390 | 388 | commits = vcs_repo.get_commits( |
|
391 | 389 | start_id=start_rev, pre_load=pre_load, translate_tags=False) |
|
392 | 390 | except TypeError as e: |
|
393 | 391 | raise JSONRPCError(safe_str(e)) |
|
394 | 392 | except Exception: |
|
395 | 393 | log.exception('Fetching of commits failed') |
|
396 | 394 | raise JSONRPCError('Error occurred during commit fetching') |
|
397 | 395 | |
|
398 | 396 | ret = [] |
|
399 | 397 | for cnt, commit in enumerate(commits): |
|
400 | 398 | if cnt >= limit != -1: |
|
401 | 399 | break |
|
402 | 400 | _cs_json = commit.__json__() |
|
403 | 401 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
404 | 402 | if changes_details == 'full': |
|
405 | 403 | _cs_json['refs'] = { |
|
406 | 404 | 'branches': [commit.branch], |
|
407 | 405 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
408 | 406 | 'tags': commit.tags |
|
409 | 407 | } |
|
410 | 408 | ret.append(_cs_json) |
|
411 | 409 | return ret |
|
412 | 410 | |
|
413 | 411 | |
|
414 | 412 | @jsonrpc_method() |
|
415 | 413 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
416 | 414 | ret_type=Optional('all'), details=Optional('basic'), |
|
417 | 415 | max_file_bytes=Optional(None)): |
|
418 | 416 | """ |
|
419 | 417 | Returns a list of nodes and children in a flat list for a given |
|
420 | 418 | path at given revision. |
|
421 | 419 | |
|
422 | 420 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
423 | 421 | |
|
424 | 422 | This command can only be run using an |authtoken| with admin rights, |
|
425 | 423 | or users with at least read rights to |repos|. |
|
426 | 424 | |
|
427 | 425 | :param apiuser: This is filled automatically from the |authtoken|. |
|
428 | 426 | :type apiuser: AuthUser |
|
429 | 427 | :param repoid: The repository name or repository ID. |
|
430 | 428 | :type repoid: str or int |
|
431 | 429 | :param revision: The revision for which listing should be done. |
|
432 | 430 | :type revision: str |
|
433 | 431 | :param root_path: The path from which to start displaying. |
|
434 | 432 | :type root_path: str |
|
435 | 433 | :param ret_type: Set the return type. Valid options are |
|
436 | 434 | ``all`` (default), ``files`` and ``dirs``. |
|
437 | 435 | :type ret_type: Optional(str) |
|
438 | 436 | :param details: Returns extended information about nodes, such as |
|
439 | 437 | md5, binary, and or content. |
|
440 | 438 | The valid options are ``basic`` and ``full``. |
|
441 | 439 | :type details: Optional(str) |
|
442 | 440 | :param max_file_bytes: Only return file content under this file size bytes |
|
443 | 441 | :type details: Optional(int) |
|
444 | 442 | |
|
445 | 443 | Example output: |
|
446 | 444 | |
|
447 | 445 | .. code-block:: bash |
|
448 | 446 | |
|
449 | 447 | id : <id_given_in_input> |
|
450 | 448 | result: [ |
|
451 | 449 | { |
|
452 | 450 | "binary": false, |
|
453 | 451 | "content": "File line", |
|
454 | 452 | "extension": "md", |
|
455 | 453 | "lines": 2, |
|
456 | 454 | "md5": "059fa5d29b19c0657e384749480f6422", |
|
457 | 455 | "mimetype": "text/x-minidsrc", |
|
458 | 456 | "name": "file.md", |
|
459 | 457 | "size": 580, |
|
460 | 458 | "type": "file" |
|
461 | 459 | }, |
|
462 | 460 | ... |
|
463 | 461 | ] |
|
464 | 462 | error: null |
|
465 | 463 | """ |
|
466 | 464 | |
|
467 | 465 | repo = get_repo_or_error(repoid) |
|
468 | 466 | if not has_superadmin_permission(apiuser): |
|
469 | 467 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
470 | 468 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
471 | 469 | |
|
472 | 470 | ret_type = Optional.extract(ret_type) |
|
473 | 471 | details = Optional.extract(details) |
|
474 | 472 | _extended_types = ['basic', 'full'] |
|
475 | 473 | if details not in _extended_types: |
|
476 | 474 | raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types))) |
|
477 | 475 | extended_info = False |
|
478 | 476 | content = False |
|
479 | 477 | if details == 'basic': |
|
480 | 478 | extended_info = True |
|
481 | 479 | |
|
482 | 480 | if details == 'full': |
|
483 | 481 | extended_info = content = True |
|
484 | 482 | |
|
485 | 483 | _map = {} |
|
486 | 484 | try: |
|
487 | 485 | # check if repo is not empty by any chance, skip quicker if it is. |
|
488 | 486 | _scm = repo.scm_instance() |
|
489 | 487 | if _scm.is_empty(): |
|
490 | 488 | return [] |
|
491 | 489 | |
|
492 | 490 | _d, _f = ScmModel().get_nodes( |
|
493 | 491 | repo, revision, root_path, flat=False, |
|
494 | 492 | extended_info=extended_info, content=content, |
|
495 | 493 | max_file_bytes=max_file_bytes) |
|
496 | 494 | _map = { |
|
497 | 495 | 'all': _d + _f, |
|
498 | 496 | 'files': _f, |
|
499 | 497 | 'dirs': _d, |
|
500 | 498 | } |
|
501 | 499 | return _map[ret_type] |
|
502 | 500 | except KeyError: |
|
503 | 501 | raise JSONRPCError( |
|
504 | 502 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
505 | 503 | except Exception: |
|
506 | 504 | log.exception("Exception occurred while trying to get repo nodes") |
|
507 | 505 | raise JSONRPCError( |
|
508 | 506 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
509 | 507 | ) |
|
510 | 508 | |
|
511 | 509 | |
|
512 | 510 | @jsonrpc_method() |
|
513 | 511 | def get_repo_file(request, apiuser, repoid, commit_id, file_path, |
|
514 | 512 | max_file_bytes=Optional(None), details=Optional('basic'), |
|
515 | 513 | cache=Optional(True)): |
|
516 | 514 | """ |
|
517 | 515 | Returns a single file from repository at given revision. |
|
518 | 516 | |
|
519 | 517 | This command can only be run using an |authtoken| with admin rights, |
|
520 | 518 | or users with at least read rights to |repos|. |
|
521 | 519 | |
|
522 | 520 | :param apiuser: This is filled automatically from the |authtoken|. |
|
523 | 521 | :type apiuser: AuthUser |
|
524 | 522 | :param repoid: The repository name or repository ID. |
|
525 | 523 | :type repoid: str or int |
|
526 | 524 | :param commit_id: The revision for which listing should be done. |
|
527 | 525 | :type commit_id: str |
|
528 | 526 | :param file_path: The path from which to start displaying. |
|
529 | 527 | :type file_path: str |
|
530 | 528 | :param details: Returns different set of information about nodes. |
|
531 | 529 | The valid options are ``minimal`` ``basic`` and ``full``. |
|
532 | 530 | :type details: Optional(str) |
|
533 | 531 | :param max_file_bytes: Only return file content under this file size bytes |
|
534 | 532 | :type max_file_bytes: Optional(int) |
|
535 | 533 | :param cache: Use internal caches for fetching files. If disabled fetching |
|
536 | 534 | files is slower but more memory efficient |
|
537 | 535 | :type cache: Optional(bool) |
|
538 | 536 | |
|
539 | 537 | Example output: |
|
540 | 538 | |
|
541 | 539 | .. code-block:: bash |
|
542 | 540 | |
|
543 | 541 | id : <id_given_in_input> |
|
544 | 542 | result: { |
|
545 | 543 | "binary": false, |
|
546 | 544 | "extension": "py", |
|
547 | 545 | "lines": 35, |
|
548 | 546 | "content": "....", |
|
549 | 547 | "md5": "76318336366b0f17ee249e11b0c99c41", |
|
550 | 548 | "mimetype": "text/x-python", |
|
551 | 549 | "name": "python.py", |
|
552 | 550 | "size": 817, |
|
553 | 551 | "type": "file", |
|
554 | 552 | } |
|
555 | 553 | error: null |
|
556 | 554 | """ |
|
557 | 555 | |
|
558 | 556 | repo = get_repo_or_error(repoid) |
|
559 | 557 | if not has_superadmin_permission(apiuser): |
|
560 | 558 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
561 | 559 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
562 | 560 | |
|
563 | 561 | cache = Optional.extract(cache, binary=True) |
|
564 | 562 | details = Optional.extract(details) |
|
565 | 563 | _extended_types = ['minimal', 'minimal+search', 'basic', 'full'] |
|
566 | 564 | if details not in _extended_types: |
|
567 | 565 | raise JSONRPCError( |
|
568 | 566 | 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details) |
|
569 | 567 | extended_info = False |
|
570 | 568 | content = False |
|
571 | 569 | |
|
572 | 570 | if details == 'minimal': |
|
573 | 571 | extended_info = False |
|
574 | 572 | |
|
575 | 573 | elif details == 'basic': |
|
576 | 574 | extended_info = True |
|
577 | 575 | |
|
578 | 576 | elif details == 'full': |
|
579 | 577 | extended_info = content = True |
|
580 | 578 | |
|
581 | 579 | file_path = safe_unicode(file_path) |
|
582 | 580 | try: |
|
583 | 581 | # check if repo is not empty by any chance, skip quicker if it is. |
|
584 | 582 | _scm = repo.scm_instance() |
|
585 | 583 | if _scm.is_empty(): |
|
586 | 584 | return None |
|
587 | 585 | |
|
588 | 586 | node = ScmModel().get_node( |
|
589 | 587 | repo, commit_id, file_path, extended_info=extended_info, |
|
590 | 588 | content=content, max_file_bytes=max_file_bytes, cache=cache) |
|
591 | 589 | except NodeDoesNotExistError: |
|
592 | 590 | raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format( |
|
593 | 591 | repo.repo_name, file_path, commit_id)) |
|
594 | 592 | except Exception: |
|
595 | 593 | log.exception(u"Exception occurred while trying to get repo %s file", |
|
596 | 594 | repo.repo_name) |
|
597 | 595 | raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format( |
|
598 | 596 | repo.repo_name, file_path)) |
|
599 | 597 | |
|
600 | 598 | return node |
|
601 | 599 | |
|
602 | 600 | |
|
603 | 601 | @jsonrpc_method() |
|
604 | 602 | def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path): |
|
605 | 603 | """ |
|
606 | 604 | Returns a list of tree nodes for path at given revision. This api is built |
|
607 | 605 | strictly for usage in full text search building, and shouldn't be consumed |
|
608 | 606 | |
|
609 | 607 | This command can only be run using an |authtoken| with admin rights, |
|
610 | 608 | or users with at least read rights to |repos|. |
|
611 | 609 | |
|
612 | 610 | """ |
|
613 | 611 | |
|
614 | 612 | repo = get_repo_or_error(repoid) |
|
615 | 613 | if not has_superadmin_permission(apiuser): |
|
616 | 614 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
617 | 615 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
618 | 616 | |
|
619 | 617 | repo_id = repo.repo_id |
|
620 | 618 | cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
621 | 619 | cache_on = cache_seconds > 0 |
|
622 | 620 | |
|
623 | 621 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
624 | 622 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
625 | 623 | |
|
626 | 624 | def compute_fts_tree(cache_ver, repo_id, commit_id, root_path): |
|
627 | 625 | return ScmModel().get_fts_data(repo_id, commit_id, root_path) |
|
628 | 626 | |
|
629 | 627 | try: |
|
630 | 628 | # check if repo is not empty by any chance, skip quicker if it is. |
|
631 | 629 | _scm = repo.scm_instance() |
|
632 | 630 | if _scm.is_empty(): |
|
633 | 631 | return [] |
|
634 | 632 | except RepositoryError: |
|
635 | 633 | log.exception("Exception occurred while trying to get repo nodes") |
|
636 | 634 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
637 | 635 | |
|
638 | 636 | try: |
|
639 | 637 | # we need to resolve commit_id to a FULL sha for cache to work correctly. |
|
640 | 638 | # sending 'master' is a pointer that needs to be translated to current commit. |
|
641 | 639 | commit_id = _scm.get_commit(commit_id=commit_id).raw_id |
|
642 | 640 | log.debug( |
|
643 | 641 | 'Computing FTS REPO TREE for repo_id %s commit_id `%s` ' |
|
644 | 642 | 'with caching: %s[TTL: %ss]' % ( |
|
645 | 643 | repo_id, commit_id, cache_on, cache_seconds or 0)) |
|
646 | 644 | |
|
647 | 645 | tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path) |
|
648 | 646 | return tree_files |
|
649 | 647 | |
|
650 | 648 | except Exception: |
|
651 | 649 | log.exception("Exception occurred while trying to get repo nodes") |
|
652 | 650 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
653 | 651 | |
|
654 | 652 | |
|
655 | 653 | @jsonrpc_method() |
|
656 | 654 | def get_repo_refs(request, apiuser, repoid): |
|
657 | 655 | """ |
|
658 | 656 | Returns a dictionary of current references. It returns |
|
659 | 657 | bookmarks, branches, closed_branches, and tags for given repository |
|
660 | 658 | |
|
661 | 659 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
662 | 660 | |
|
663 | 661 | This command can only be run using an |authtoken| with admin rights, |
|
664 | 662 | or users with at least read rights to |repos|. |
|
665 | 663 | |
|
666 | 664 | :param apiuser: This is filled automatically from the |authtoken|. |
|
667 | 665 | :type apiuser: AuthUser |
|
668 | 666 | :param repoid: The repository name or repository ID. |
|
669 | 667 | :type repoid: str or int |
|
670 | 668 | |
|
671 | 669 | Example output: |
|
672 | 670 | |
|
673 | 671 | .. code-block:: bash |
|
674 | 672 | |
|
675 | 673 | id : <id_given_in_input> |
|
676 | 674 | "result": { |
|
677 | 675 | "bookmarks": { |
|
678 | 676 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
679 | 677 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
680 | 678 | }, |
|
681 | 679 | "branches": { |
|
682 | 680 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
683 | 681 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
684 | 682 | }, |
|
685 | 683 | "branches_closed": {}, |
|
686 | 684 | "tags": { |
|
687 | 685 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
688 | 686 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", |
|
689 | 687 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", |
|
690 | 688 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", |
|
691 | 689 | } |
|
692 | 690 | } |
|
693 | 691 | error: null |
|
694 | 692 | """ |
|
695 | 693 | |
|
696 | 694 | repo = get_repo_or_error(repoid) |
|
697 | 695 | if not has_superadmin_permission(apiuser): |
|
698 | 696 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
699 | 697 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
700 | 698 | |
|
701 | 699 | try: |
|
702 | 700 | # check if repo is not empty by any chance, skip quicker if it is. |
|
703 | 701 | vcs_instance = repo.scm_instance() |
|
704 | 702 | refs = vcs_instance.refs() |
|
705 | 703 | return refs |
|
706 | 704 | except Exception: |
|
707 | 705 | log.exception("Exception occurred while trying to get repo refs") |
|
708 | 706 | raise JSONRPCError( |
|
709 | 707 | 'failed to get repo: `%s` references' % repo.repo_name |
|
710 | 708 | ) |
|
711 | 709 | |
|
712 | 710 | |
|
713 | 711 | @jsonrpc_method() |
|
714 | 712 | def create_repo( |
|
715 | 713 | request, apiuser, repo_name, repo_type, |
|
716 | 714 | owner=Optional(OAttr('apiuser')), |
|
717 | 715 | description=Optional(''), |
|
718 | 716 | private=Optional(False), |
|
719 | 717 | clone_uri=Optional(None), |
|
720 | 718 | push_uri=Optional(None), |
|
721 | 719 | landing_rev=Optional(None), |
|
722 | 720 | enable_statistics=Optional(False), |
|
723 | 721 | enable_locking=Optional(False), |
|
724 | 722 | enable_downloads=Optional(False), |
|
725 | 723 | copy_permissions=Optional(False)): |
|
726 | 724 | """ |
|
727 | 725 | Creates a repository. |
|
728 | 726 | |
|
729 | 727 | * If the repository name contains "/", repository will be created inside |
|
730 | 728 | a repository group or nested repository groups |
|
731 | 729 | |
|
732 | 730 | For example "foo/bar/repo1" will create |repo| called "repo1" inside |
|
733 | 731 | group "foo/bar". You have to have permissions to access and write to |
|
734 | 732 | the last repository group ("bar" in this example) |
|
735 | 733 | |
|
736 | 734 | This command can only be run using an |authtoken| with at least |
|
737 | 735 | permissions to create repositories, or write permissions to |
|
738 | 736 | parent repository groups. |
|
739 | 737 | |
|
740 | 738 | :param apiuser: This is filled automatically from the |authtoken|. |
|
741 | 739 | :type apiuser: AuthUser |
|
742 | 740 | :param repo_name: Set the repository name. |
|
743 | 741 | :type repo_name: str |
|
744 | 742 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
745 | 743 | :type repo_type: str |
|
746 | 744 | :param owner: user_id or username |
|
747 | 745 | :type owner: Optional(str) |
|
748 | 746 | :param description: Set the repository description. |
|
749 | 747 | :type description: Optional(str) |
|
750 | 748 | :param private: set repository as private |
|
751 | 749 | :type private: bool |
|
752 | 750 | :param clone_uri: set clone_uri |
|
753 | 751 | :type clone_uri: str |
|
754 | 752 | :param push_uri: set push_uri |
|
755 | 753 | :type push_uri: str |
|
756 | 754 | :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd |
|
757 | 755 | :type landing_rev: str |
|
758 | 756 | :param enable_locking: |
|
759 | 757 | :type enable_locking: bool |
|
760 | 758 | :param enable_downloads: |
|
761 | 759 | :type enable_downloads: bool |
|
762 | 760 | :param enable_statistics: |
|
763 | 761 | :type enable_statistics: bool |
|
764 | 762 | :param copy_permissions: Copy permission from group in which the |
|
765 | 763 | repository is being created. |
|
766 | 764 | :type copy_permissions: bool |
|
767 | 765 | |
|
768 | 766 | |
|
769 | 767 | Example output: |
|
770 | 768 | |
|
771 | 769 | .. code-block:: bash |
|
772 | 770 | |
|
773 | 771 | id : <id_given_in_input> |
|
774 | 772 | result: { |
|
775 | 773 | "msg": "Created new repository `<reponame>`", |
|
776 | 774 | "success": true, |
|
777 | 775 | "task": "<celery task id or None if done sync>" |
|
778 | 776 | } |
|
779 | 777 | error: null |
|
780 | 778 | |
|
781 | 779 | |
|
782 | 780 | Example error output: |
|
783 | 781 | |
|
784 | 782 | .. code-block:: bash |
|
785 | 783 | |
|
786 | 784 | id : <id_given_in_input> |
|
787 | 785 | result : null |
|
788 | 786 | error : { |
|
789 | 787 | 'failed to create repository `<repo_name>`' |
|
790 | 788 | } |
|
791 | 789 | |
|
792 | 790 | """ |
|
793 | 791 | |
|
794 | 792 | owner = validate_set_owner_permissions(apiuser, owner) |
|
795 | 793 | |
|
796 | 794 | description = Optional.extract(description) |
|
797 | 795 | copy_permissions = Optional.extract(copy_permissions) |
|
798 | 796 | clone_uri = Optional.extract(clone_uri) |
|
799 | 797 | push_uri = Optional.extract(push_uri) |
|
800 | 798 | |
|
801 | 799 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
802 | 800 | if isinstance(private, Optional): |
|
803 | 801 | private = defs.get('repo_private') or Optional.extract(private) |
|
804 | 802 | if isinstance(repo_type, Optional): |
|
805 | 803 | repo_type = defs.get('repo_type') |
|
806 | 804 | if isinstance(enable_statistics, Optional): |
|
807 | 805 | enable_statistics = defs.get('repo_enable_statistics') |
|
808 | 806 | if isinstance(enable_locking, Optional): |
|
809 | 807 | enable_locking = defs.get('repo_enable_locking') |
|
810 | 808 | if isinstance(enable_downloads, Optional): |
|
811 | 809 | enable_downloads = defs.get('repo_enable_downloads') |
|
812 | 810 | |
|
813 | 811 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
814 | 812 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
815 | 813 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
816 | 814 | |
|
817 | 815 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
818 | 816 | |
|
819 | 817 | schema = repo_schema.RepoSchema().bind( |
|
820 | 818 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
821 | 819 | repo_ref_options=ref_choices, |
|
822 | 820 | repo_type=repo_type, |
|
823 | 821 | # user caller |
|
824 | 822 | user=apiuser) |
|
825 | 823 | |
|
826 | 824 | try: |
|
827 | 825 | schema_data = schema.deserialize(dict( |
|
828 | 826 | repo_name=repo_name, |
|
829 | 827 | repo_type=repo_type, |
|
830 | 828 | repo_owner=owner.username, |
|
831 | 829 | repo_description=description, |
|
832 | 830 | repo_landing_commit_ref=landing_commit_ref, |
|
833 | 831 | repo_clone_uri=clone_uri, |
|
834 | 832 | repo_push_uri=push_uri, |
|
835 | 833 | repo_private=private, |
|
836 | 834 | repo_copy_permissions=copy_permissions, |
|
837 | 835 | repo_enable_statistics=enable_statistics, |
|
838 | 836 | repo_enable_downloads=enable_downloads, |
|
839 | 837 | repo_enable_locking=enable_locking)) |
|
840 | 838 | except validation_schema.Invalid as err: |
|
841 | 839 | raise JSONRPCValidationError(colander_exc=err) |
|
842 | 840 | |
|
843 | 841 | try: |
|
844 | 842 | data = { |
|
845 | 843 | 'owner': owner, |
|
846 | 844 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
847 | 845 | 'repo_name_full': schema_data['repo_name'], |
|
848 | 846 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
849 | 847 | 'repo_type': schema_data['repo_type'], |
|
850 | 848 | 'repo_description': schema_data['repo_description'], |
|
851 | 849 | 'repo_private': schema_data['repo_private'], |
|
852 | 850 | 'clone_uri': schema_data['repo_clone_uri'], |
|
853 | 851 | 'push_uri': schema_data['repo_push_uri'], |
|
854 | 852 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], |
|
855 | 853 | 'enable_statistics': schema_data['repo_enable_statistics'], |
|
856 | 854 | 'enable_locking': schema_data['repo_enable_locking'], |
|
857 | 855 | 'enable_downloads': schema_data['repo_enable_downloads'], |
|
858 | 856 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], |
|
859 | 857 | } |
|
860 | 858 | |
|
861 | 859 | task = RepoModel().create(form_data=data, cur_user=owner.user_id) |
|
862 | 860 | task_id = get_task_id(task) |
|
863 | 861 | # no commit, it's done in RepoModel, or async via celery |
|
864 | 862 | return { |
|
865 | 863 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), |
|
866 | 864 | 'success': True, # cannot return the repo data here since fork |
|
867 | 865 | # can be done async |
|
868 | 866 | 'task': task_id |
|
869 | 867 | } |
|
870 | 868 | except Exception: |
|
871 | 869 | log.exception( |
|
872 | 870 | u"Exception while trying to create the repository %s", |
|
873 | 871 | schema_data['repo_name']) |
|
874 | 872 | raise JSONRPCError( |
|
875 | 873 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) |
|
876 | 874 | |
|
877 | 875 | |
|
878 | 876 | @jsonrpc_method() |
|
879 | 877 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
880 | 878 | description=Optional('')): |
|
881 | 879 | """ |
|
882 | 880 | Adds an extra field to a repository. |
|
883 | 881 | |
|
884 | 882 | This command can only be run using an |authtoken| with at least |
|
885 | 883 | write permissions to the |repo|. |
|
886 | 884 | |
|
887 | 885 | :param apiuser: This is filled automatically from the |authtoken|. |
|
888 | 886 | :type apiuser: AuthUser |
|
889 | 887 | :param repoid: Set the repository name or repository id. |
|
890 | 888 | :type repoid: str or int |
|
891 | 889 | :param key: Create a unique field key for this repository. |
|
892 | 890 | :type key: str |
|
893 | 891 | :param label: |
|
894 | 892 | :type label: Optional(str) |
|
895 | 893 | :param description: |
|
896 | 894 | :type description: Optional(str) |
|
897 | 895 | """ |
|
898 | 896 | repo = get_repo_or_error(repoid) |
|
899 | 897 | if not has_superadmin_permission(apiuser): |
|
900 | 898 | _perms = ('repository.admin',) |
|
901 | 899 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
902 | 900 | |
|
903 | 901 | label = Optional.extract(label) or key |
|
904 | 902 | description = Optional.extract(description) |
|
905 | 903 | |
|
906 | 904 | field = RepositoryField.get_by_key_name(key, repo) |
|
907 | 905 | if field: |
|
908 | 906 | raise JSONRPCError('Field with key ' |
|
909 | 907 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
910 | 908 | |
|
911 | 909 | try: |
|
912 | 910 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
913 | 911 | field_desc=description) |
|
914 | 912 | Session().commit() |
|
915 | 913 | return { |
|
916 | 914 | 'msg': "Added new repository field `%s`" % (key,), |
|
917 | 915 | 'success': True, |
|
918 | 916 | } |
|
919 | 917 | except Exception: |
|
920 | 918 | log.exception("Exception occurred while trying to add field to repo") |
|
921 | 919 | raise JSONRPCError( |
|
922 | 920 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
923 | 921 | |
|
924 | 922 | |
|
925 | 923 | @jsonrpc_method() |
|
926 | 924 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
927 | 925 | """ |
|
928 | 926 | Removes an extra field from a repository. |
|
929 | 927 | |
|
930 | 928 | This command can only be run using an |authtoken| with at least |
|
931 | 929 | write permissions to the |repo|. |
|
932 | 930 | |
|
933 | 931 | :param apiuser: This is filled automatically from the |authtoken|. |
|
934 | 932 | :type apiuser: AuthUser |
|
935 | 933 | :param repoid: Set the repository name or repository ID. |
|
936 | 934 | :type repoid: str or int |
|
937 | 935 | :param key: Set the unique field key for this repository. |
|
938 | 936 | :type key: str |
|
939 | 937 | """ |
|
940 | 938 | |
|
941 | 939 | repo = get_repo_or_error(repoid) |
|
942 | 940 | if not has_superadmin_permission(apiuser): |
|
943 | 941 | _perms = ('repository.admin',) |
|
944 | 942 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
945 | 943 | |
|
946 | 944 | field = RepositoryField.get_by_key_name(key, repo) |
|
947 | 945 | if not field: |
|
948 | 946 | raise JSONRPCError('Field with key `%s` does not ' |
|
949 | 947 | 'exists for repo `%s`' % (key, repoid)) |
|
950 | 948 | |
|
951 | 949 | try: |
|
952 | 950 | RepoModel().delete_repo_field(repo, field_key=key) |
|
953 | 951 | Session().commit() |
|
954 | 952 | return { |
|
955 | 953 | 'msg': "Deleted repository field `%s`" % (key,), |
|
956 | 954 | 'success': True, |
|
957 | 955 | } |
|
958 | 956 | except Exception: |
|
959 | 957 | log.exception( |
|
960 | 958 | "Exception occurred while trying to delete field from repo") |
|
961 | 959 | raise JSONRPCError( |
|
962 | 960 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
963 | 961 | |
|
964 | 962 | |
|
965 | 963 | @jsonrpc_method() |
|
966 | 964 | def update_repo( |
|
967 | 965 | request, apiuser, repoid, repo_name=Optional(None), |
|
968 | 966 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
969 | 967 | private=Optional(False), |
|
970 | 968 | clone_uri=Optional(None), push_uri=Optional(None), |
|
971 | 969 | landing_rev=Optional(None), fork_of=Optional(None), |
|
972 | 970 | enable_statistics=Optional(False), |
|
973 | 971 | enable_locking=Optional(False), |
|
974 | 972 | enable_downloads=Optional(False), fields=Optional('')): |
|
975 | 973 | """ |
|
976 | 974 | Updates a repository with the given information. |
|
977 | 975 | |
|
978 | 976 | This command can only be run using an |authtoken| with at least |
|
979 | 977 | admin permissions to the |repo|. |
|
980 | 978 | |
|
981 | 979 | * If the repository name contains "/", repository will be updated |
|
982 | 980 | accordingly with a repository group or nested repository groups |
|
983 | 981 | |
|
984 | 982 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| |
|
985 | 983 | called "repo-test" and place it inside group "foo/bar". |
|
986 | 984 | You have to have permissions to access and write to the last repository |
|
987 | 985 | group ("bar" in this example) |
|
988 | 986 | |
|
989 | 987 | :param apiuser: This is filled automatically from the |authtoken|. |
|
990 | 988 | :type apiuser: AuthUser |
|
991 | 989 | :param repoid: repository name or repository ID. |
|
992 | 990 | :type repoid: str or int |
|
993 | 991 | :param repo_name: Update the |repo| name, including the |
|
994 | 992 | repository group it's in. |
|
995 | 993 | :type repo_name: str |
|
996 | 994 | :param owner: Set the |repo| owner. |
|
997 | 995 | :type owner: str |
|
998 | 996 | :param fork_of: Set the |repo| as fork of another |repo|. |
|
999 | 997 | :type fork_of: str |
|
1000 | 998 | :param description: Update the |repo| description. |
|
1001 | 999 | :type description: str |
|
1002 | 1000 | :param private: Set the |repo| as private. (True | False) |
|
1003 | 1001 | :type private: bool |
|
1004 | 1002 | :param clone_uri: Update the |repo| clone URI. |
|
1005 | 1003 | :type clone_uri: str |
|
1006 | 1004 | :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd |
|
1007 | 1005 | :type landing_rev: str |
|
1008 | 1006 | :param enable_statistics: Enable statistics on the |repo|, (True | False). |
|
1009 | 1007 | :type enable_statistics: bool |
|
1010 | 1008 | :param enable_locking: Enable |repo| locking. |
|
1011 | 1009 | :type enable_locking: bool |
|
1012 | 1010 | :param enable_downloads: Enable downloads from the |repo|, (True | False). |
|
1013 | 1011 | :type enable_downloads: bool |
|
1014 | 1012 | :param fields: Add extra fields to the |repo|. Use the following |
|
1015 | 1013 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
1016 | 1014 | Escape ', ' with \, |
|
1017 | 1015 | :type fields: str |
|
1018 | 1016 | """ |
|
1019 | 1017 | |
|
1020 | 1018 | repo = get_repo_or_error(repoid) |
|
1021 | 1019 | |
|
1022 | 1020 | include_secrets = False |
|
1023 | 1021 | if not has_superadmin_permission(apiuser): |
|
1024 |
|
|
|
1022 | _perms = ('repository.admin',) | |
|
1023 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1025 | 1024 | else: |
|
1026 | 1025 | include_secrets = True |
|
1027 | 1026 | |
|
1028 | 1027 | updates = dict( |
|
1029 | 1028 | repo_name=repo_name |
|
1030 | 1029 | if not isinstance(repo_name, Optional) else repo.repo_name, |
|
1031 | 1030 | |
|
1032 | 1031 | fork_id=fork_of |
|
1033 | 1032 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, |
|
1034 | 1033 | |
|
1035 | 1034 | user=owner |
|
1036 | 1035 | if not isinstance(owner, Optional) else repo.user.username, |
|
1037 | 1036 | |
|
1038 | 1037 | repo_description=description |
|
1039 | 1038 | if not isinstance(description, Optional) else repo.description, |
|
1040 | 1039 | |
|
1041 | 1040 | repo_private=private |
|
1042 | 1041 | if not isinstance(private, Optional) else repo.private, |
|
1043 | 1042 | |
|
1044 | 1043 | clone_uri=clone_uri |
|
1045 | 1044 | if not isinstance(clone_uri, Optional) else repo.clone_uri, |
|
1046 | 1045 | |
|
1047 | 1046 | push_uri=push_uri |
|
1048 | 1047 | if not isinstance(push_uri, Optional) else repo.push_uri, |
|
1049 | 1048 | |
|
1050 | 1049 | repo_landing_rev=landing_rev |
|
1051 | 1050 | if not isinstance(landing_rev, Optional) else repo._landing_revision, |
|
1052 | 1051 | |
|
1053 | 1052 | repo_enable_statistics=enable_statistics |
|
1054 | 1053 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, |
|
1055 | 1054 | |
|
1056 | 1055 | repo_enable_locking=enable_locking |
|
1057 | 1056 | if not isinstance(enable_locking, Optional) else repo.enable_locking, |
|
1058 | 1057 | |
|
1059 | 1058 | repo_enable_downloads=enable_downloads |
|
1060 | 1059 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) |
|
1061 | 1060 | |
|
1062 | 1061 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1063 | 1062 | ref_choices, _labels = ScmModel().get_repo_landing_revs( |
|
1064 | 1063 | request.translate, repo=repo) |
|
1065 | 1064 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1066 | 1065 | |
|
1067 | 1066 | old_values = repo.get_api_data() |
|
1068 | 1067 | repo_type = repo.repo_type |
|
1069 | 1068 | schema = repo_schema.RepoSchema().bind( |
|
1070 | 1069 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1071 | 1070 | repo_ref_options=ref_choices, |
|
1072 | 1071 | repo_type=repo_type, |
|
1073 | 1072 | # user caller |
|
1074 | 1073 | user=apiuser, |
|
1075 | 1074 | old_values=old_values) |
|
1076 | 1075 | try: |
|
1077 | 1076 | schema_data = schema.deserialize(dict( |
|
1078 | 1077 | # we save old value, users cannot change type |
|
1079 | 1078 | repo_type=repo_type, |
|
1080 | 1079 | |
|
1081 | 1080 | repo_name=updates['repo_name'], |
|
1082 | 1081 | repo_owner=updates['user'], |
|
1083 | 1082 | repo_description=updates['repo_description'], |
|
1084 | 1083 | repo_clone_uri=updates['clone_uri'], |
|
1085 | 1084 | repo_push_uri=updates['push_uri'], |
|
1086 | 1085 | repo_fork_of=updates['fork_id'], |
|
1087 | 1086 | repo_private=updates['repo_private'], |
|
1088 | 1087 | repo_landing_commit_ref=updates['repo_landing_rev'], |
|
1089 | 1088 | repo_enable_statistics=updates['repo_enable_statistics'], |
|
1090 | 1089 | repo_enable_downloads=updates['repo_enable_downloads'], |
|
1091 | 1090 | repo_enable_locking=updates['repo_enable_locking'])) |
|
1092 | 1091 | except validation_schema.Invalid as err: |
|
1093 | 1092 | raise JSONRPCValidationError(colander_exc=err) |
|
1094 | 1093 | |
|
1095 | 1094 | # save validated data back into the updates dict |
|
1096 | 1095 | validated_updates = dict( |
|
1097 | 1096 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
1098 | 1097 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
1099 | 1098 | |
|
1100 | 1099 | user=schema_data['repo_owner'], |
|
1101 | 1100 | repo_description=schema_data['repo_description'], |
|
1102 | 1101 | repo_private=schema_data['repo_private'], |
|
1103 | 1102 | clone_uri=schema_data['repo_clone_uri'], |
|
1104 | 1103 | push_uri=schema_data['repo_push_uri'], |
|
1105 | 1104 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
1106 | 1105 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
1107 | 1106 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
1108 | 1107 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
1109 | 1108 | ) |
|
1110 | 1109 | |
|
1111 | 1110 | if schema_data['repo_fork_of']: |
|
1112 | 1111 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) |
|
1113 | 1112 | validated_updates['fork_id'] = fork_repo.repo_id |
|
1114 | 1113 | |
|
1115 | 1114 | # extra fields |
|
1116 | 1115 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') |
|
1117 | 1116 | if fields: |
|
1118 | 1117 | validated_updates.update(fields) |
|
1119 | 1118 | |
|
1120 | 1119 | try: |
|
1121 | 1120 | RepoModel().update(repo, **validated_updates) |
|
1122 | 1121 | audit_logger.store_api( |
|
1123 | 1122 | 'repo.edit', action_data={'old_data': old_values}, |
|
1124 | 1123 | user=apiuser, repo=repo) |
|
1125 | 1124 | Session().commit() |
|
1126 | 1125 | return { |
|
1127 | 1126 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
1128 | 1127 | 'repository': repo.get_api_data(include_secrets=include_secrets) |
|
1129 | 1128 | } |
|
1130 | 1129 | except Exception: |
|
1131 | 1130 | log.exception( |
|
1132 | 1131 | u"Exception while trying to update the repository %s", |
|
1133 | 1132 | repoid) |
|
1134 | 1133 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
1135 | 1134 | |
|
1136 | 1135 | |
|
1137 | 1136 | @jsonrpc_method() |
|
1138 | 1137 | def fork_repo(request, apiuser, repoid, fork_name, |
|
1139 | 1138 | owner=Optional(OAttr('apiuser')), |
|
1140 | 1139 | description=Optional(''), |
|
1141 | 1140 | private=Optional(False), |
|
1142 | 1141 | clone_uri=Optional(None), |
|
1143 | 1142 | landing_rev=Optional(None), |
|
1144 | 1143 | copy_permissions=Optional(False)): |
|
1145 | 1144 | """ |
|
1146 | 1145 | Creates a fork of the specified |repo|. |
|
1147 | 1146 | |
|
1148 | 1147 | * If the fork_name contains "/", fork will be created inside |
|
1149 | 1148 | a repository group or nested repository groups |
|
1150 | 1149 | |
|
1151 | 1150 | For example "foo/bar/fork-repo" will create fork called "fork-repo" |
|
1152 | 1151 | inside group "foo/bar". You have to have permissions to access and |
|
1153 | 1152 | write to the last repository group ("bar" in this example) |
|
1154 | 1153 | |
|
1155 | 1154 | This command can only be run using an |authtoken| with minimum |
|
1156 | 1155 | read permissions of the forked repo, create fork permissions for an user. |
|
1157 | 1156 | |
|
1158 | 1157 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1159 | 1158 | :type apiuser: AuthUser |
|
1160 | 1159 | :param repoid: Set repository name or repository ID. |
|
1161 | 1160 | :type repoid: str or int |
|
1162 | 1161 | :param fork_name: Set the fork name, including it's repository group membership. |
|
1163 | 1162 | :type fork_name: str |
|
1164 | 1163 | :param owner: Set the fork owner. |
|
1165 | 1164 | :type owner: str |
|
1166 | 1165 | :param description: Set the fork description. |
|
1167 | 1166 | :type description: str |
|
1168 | 1167 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
1169 | 1168 | default is False. |
|
1170 | 1169 | :type copy_permissions: bool |
|
1171 | 1170 | :param private: Make the fork private. The default is False. |
|
1172 | 1171 | :type private: bool |
|
1173 | 1172 | :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd |
|
1174 | 1173 | |
|
1175 | 1174 | Example output: |
|
1176 | 1175 | |
|
1177 | 1176 | .. code-block:: bash |
|
1178 | 1177 | |
|
1179 | 1178 | id : <id_for_response> |
|
1180 | 1179 | api_key : "<api_key>" |
|
1181 | 1180 | args: { |
|
1182 | 1181 | "repoid" : "<reponame or repo_id>", |
|
1183 | 1182 | "fork_name": "<forkname>", |
|
1184 | 1183 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
1185 | 1184 | "description": "<description>", |
|
1186 | 1185 | "copy_permissions": "<bool>", |
|
1187 | 1186 | "private": "<bool>", |
|
1188 | 1187 | "landing_rev": "<landing_rev>" |
|
1189 | 1188 | } |
|
1190 | 1189 | |
|
1191 | 1190 | Example error output: |
|
1192 | 1191 | |
|
1193 | 1192 | .. code-block:: bash |
|
1194 | 1193 | |
|
1195 | 1194 | id : <id_given_in_input> |
|
1196 | 1195 | result: { |
|
1197 | 1196 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
1198 | 1197 | "success": true, |
|
1199 | 1198 | "task": "<celery task id or None if done sync>" |
|
1200 | 1199 | } |
|
1201 | 1200 | error: null |
|
1202 | 1201 | |
|
1203 | 1202 | """ |
|
1204 | 1203 | |
|
1205 | 1204 | repo = get_repo_or_error(repoid) |
|
1206 | 1205 | repo_name = repo.repo_name |
|
1207 | 1206 | |
|
1208 | 1207 | if not has_superadmin_permission(apiuser): |
|
1209 | 1208 | # check if we have at least read permission for |
|
1210 | 1209 | # this repo that we fork ! |
|
1211 | _perms = ( | |
|
1212 | 'repository.admin', 'repository.write', 'repository.read') | |
|
1210 | _perms = ('repository.admin', 'repository.write', 'repository.read') | |
|
1213 | 1211 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1214 | 1212 | |
|
1215 | 1213 | # check if the regular user has at least fork permissions as well |
|
1216 | 1214 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): |
|
1217 | 1215 | raise JSONRPCForbidden() |
|
1218 | 1216 | |
|
1219 | 1217 | # check if user can set owner parameter |
|
1220 | 1218 | owner = validate_set_owner_permissions(apiuser, owner) |
|
1221 | 1219 | |
|
1222 | 1220 | description = Optional.extract(description) |
|
1223 | 1221 | copy_permissions = Optional.extract(copy_permissions) |
|
1224 | 1222 | clone_uri = Optional.extract(clone_uri) |
|
1225 | 1223 | |
|
1226 | 1224 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1227 | 1225 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
1228 | 1226 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1229 | 1227 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
1230 | 1228 | |
|
1231 | 1229 | private = Optional.extract(private) |
|
1232 | 1230 | |
|
1233 | 1231 | schema = repo_schema.RepoSchema().bind( |
|
1234 | 1232 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1235 | 1233 | repo_ref_options=ref_choices, |
|
1236 | 1234 | repo_type=repo.repo_type, |
|
1237 | 1235 | # user caller |
|
1238 | 1236 | user=apiuser) |
|
1239 | 1237 | |
|
1240 | 1238 | try: |
|
1241 | 1239 | schema_data = schema.deserialize(dict( |
|
1242 | 1240 | repo_name=fork_name, |
|
1243 | 1241 | repo_type=repo.repo_type, |
|
1244 | 1242 | repo_owner=owner.username, |
|
1245 | 1243 | repo_description=description, |
|
1246 | 1244 | repo_landing_commit_ref=landing_commit_ref, |
|
1247 | 1245 | repo_clone_uri=clone_uri, |
|
1248 | 1246 | repo_private=private, |
|
1249 | 1247 | repo_copy_permissions=copy_permissions)) |
|
1250 | 1248 | except validation_schema.Invalid as err: |
|
1251 | 1249 | raise JSONRPCValidationError(colander_exc=err) |
|
1252 | 1250 | |
|
1253 | 1251 | try: |
|
1254 | 1252 | data = { |
|
1255 | 1253 | 'fork_parent_id': repo.repo_id, |
|
1256 | 1254 | |
|
1257 | 1255 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
1258 | 1256 | 'repo_name_full': schema_data['repo_name'], |
|
1259 | 1257 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
1260 | 1258 | 'repo_type': schema_data['repo_type'], |
|
1261 | 1259 | 'description': schema_data['repo_description'], |
|
1262 | 1260 | 'private': schema_data['repo_private'], |
|
1263 | 1261 | 'copy_permissions': schema_data['repo_copy_permissions'], |
|
1264 | 1262 | 'landing_rev': schema_data['repo_landing_commit_ref'], |
|
1265 | 1263 | } |
|
1266 | 1264 | |
|
1267 | 1265 | task = RepoModel().create_fork(data, cur_user=owner.user_id) |
|
1268 | 1266 | # no commit, it's done in RepoModel, or async via celery |
|
1269 | 1267 | task_id = get_task_id(task) |
|
1270 | 1268 | |
|
1271 | 1269 | return { |
|
1272 | 1270 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1273 | 1271 | repo.repo_name, schema_data['repo_name']), |
|
1274 | 1272 | 'success': True, # cannot return the repo data here since fork |
|
1275 | 1273 | # can be done async |
|
1276 | 1274 | 'task': task_id |
|
1277 | 1275 | } |
|
1278 | 1276 | except Exception: |
|
1279 | 1277 | log.exception( |
|
1280 | 1278 | u"Exception while trying to create fork %s", |
|
1281 | 1279 | schema_data['repo_name']) |
|
1282 | 1280 | raise JSONRPCError( |
|
1283 | 1281 | 'failed to fork repository `%s` as `%s`' % ( |
|
1284 | 1282 | repo_name, schema_data['repo_name'])) |
|
1285 | 1283 | |
|
1286 | 1284 | |
|
1287 | 1285 | @jsonrpc_method() |
|
1288 | 1286 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1289 | 1287 | """ |
|
1290 | 1288 | Deletes a repository. |
|
1291 | 1289 | |
|
1292 | 1290 | * When the `forks` parameter is set it's possible to detach or delete |
|
1293 | 1291 | forks of deleted repository. |
|
1294 | 1292 | |
|
1295 | 1293 | This command can only be run using an |authtoken| with admin |
|
1296 | 1294 | permissions on the |repo|. |
|
1297 | 1295 | |
|
1298 | 1296 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1299 | 1297 | :type apiuser: AuthUser |
|
1300 | 1298 | :param repoid: Set the repository name or repository ID. |
|
1301 | 1299 | :type repoid: str or int |
|
1302 | 1300 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1303 | 1301 | :type forks: Optional(str) |
|
1304 | 1302 | |
|
1305 | 1303 | Example error output: |
|
1306 | 1304 | |
|
1307 | 1305 | .. code-block:: bash |
|
1308 | 1306 | |
|
1309 | 1307 | id : <id_given_in_input> |
|
1310 | 1308 | result: { |
|
1311 | 1309 | "msg": "Deleted repository `<reponame>`", |
|
1312 | 1310 | "success": true |
|
1313 | 1311 | } |
|
1314 | 1312 | error: null |
|
1315 | 1313 | """ |
|
1316 | 1314 | |
|
1317 | 1315 | repo = get_repo_or_error(repoid) |
|
1318 | 1316 | repo_name = repo.repo_name |
|
1319 | 1317 | if not has_superadmin_permission(apiuser): |
|
1320 | 1318 | _perms = ('repository.admin',) |
|
1321 | 1319 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1322 | 1320 | |
|
1323 | 1321 | try: |
|
1324 | 1322 | handle_forks = Optional.extract(forks) |
|
1325 | 1323 | _forks_msg = '' |
|
1326 | 1324 | _forks = [f for f in repo.forks] |
|
1327 | 1325 | if handle_forks == 'detach': |
|
1328 | 1326 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1329 | 1327 | elif handle_forks == 'delete': |
|
1330 | 1328 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1331 | 1329 | elif _forks: |
|
1332 | 1330 | raise JSONRPCError( |
|
1333 | 1331 | 'Cannot delete `%s` it still contains attached forks' % |
|
1334 | 1332 | (repo.repo_name,) |
|
1335 | 1333 | ) |
|
1336 | 1334 | old_data = repo.get_api_data() |
|
1337 | 1335 | RepoModel().delete(repo, forks=forks) |
|
1338 | 1336 | |
|
1339 | 1337 | repo = audit_logger.RepoWrap(repo_id=None, |
|
1340 | 1338 | repo_name=repo.repo_name) |
|
1341 | 1339 | |
|
1342 | 1340 | audit_logger.store_api( |
|
1343 | 1341 | 'repo.delete', action_data={'old_data': old_data}, |
|
1344 | 1342 | user=apiuser, repo=repo) |
|
1345 | 1343 | |
|
1346 | 1344 | ScmModel().mark_for_invalidation(repo_name, delete=True) |
|
1347 | 1345 | Session().commit() |
|
1348 | 1346 | return { |
|
1349 | 1347 | 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg), |
|
1350 | 1348 | 'success': True |
|
1351 | 1349 | } |
|
1352 | 1350 | except Exception: |
|
1353 | 1351 | log.exception("Exception occurred while trying to delete repo") |
|
1354 | 1352 | raise JSONRPCError( |
|
1355 | 1353 | 'failed to delete repository `%s`' % (repo_name,) |
|
1356 | 1354 | ) |
|
1357 | 1355 | |
|
1358 | 1356 | |
|
1359 | 1357 | #TODO: marcink, change name ? |
|
1360 | 1358 | @jsonrpc_method() |
|
1361 | 1359 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1362 | 1360 | """ |
|
1363 | 1361 | Invalidates the cache for the specified repository. |
|
1364 | 1362 | |
|
1365 | 1363 | This command can only be run using an |authtoken| with admin rights to |
|
1366 | 1364 | the specified repository. |
|
1367 | 1365 | |
|
1368 | 1366 | This command takes the following options: |
|
1369 | 1367 | |
|
1370 | 1368 | :param apiuser: This is filled automatically from |authtoken|. |
|
1371 | 1369 | :type apiuser: AuthUser |
|
1372 | 1370 | :param repoid: Sets the repository name or repository ID. |
|
1373 | 1371 | :type repoid: str or int |
|
1374 | 1372 | :param delete_keys: This deletes the invalidated keys instead of |
|
1375 | 1373 | just flagging them. |
|
1376 | 1374 | :type delete_keys: Optional(``True`` | ``False``) |
|
1377 | 1375 | |
|
1378 | 1376 | Example output: |
|
1379 | 1377 | |
|
1380 | 1378 | .. code-block:: bash |
|
1381 | 1379 | |
|
1382 | 1380 | id : <id_given_in_input> |
|
1383 | 1381 | result : { |
|
1384 | 1382 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1385 | 1383 | 'repository': <repository name> |
|
1386 | 1384 | } |
|
1387 | 1385 | error : null |
|
1388 | 1386 | |
|
1389 | 1387 | Example error output: |
|
1390 | 1388 | |
|
1391 | 1389 | .. code-block:: bash |
|
1392 | 1390 | |
|
1393 | 1391 | id : <id_given_in_input> |
|
1394 | 1392 | result : null |
|
1395 | 1393 | error : { |
|
1396 | 1394 | 'Error occurred during cache invalidation action' |
|
1397 | 1395 | } |
|
1398 | 1396 | |
|
1399 | 1397 | """ |
|
1400 | 1398 | |
|
1401 | 1399 | repo = get_repo_or_error(repoid) |
|
1402 | 1400 | if not has_superadmin_permission(apiuser): |
|
1403 | 1401 | _perms = ('repository.admin', 'repository.write',) |
|
1404 | 1402 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1405 | 1403 | |
|
1406 | 1404 | delete = Optional.extract(delete_keys) |
|
1407 | 1405 | try: |
|
1408 | 1406 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1409 | 1407 | return { |
|
1410 | 1408 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1411 | 1409 | 'repository': repo.repo_name |
|
1412 | 1410 | } |
|
1413 | 1411 | except Exception: |
|
1414 | 1412 | log.exception( |
|
1415 | 1413 | "Exception occurred while trying to invalidate repo cache") |
|
1416 | 1414 | raise JSONRPCError( |
|
1417 | 1415 | 'Error occurred during cache invalidation action' |
|
1418 | 1416 | ) |
|
1419 | 1417 | |
|
1420 | 1418 | |
|
1421 | 1419 | #TODO: marcink, change name ? |
|
1422 | 1420 | @jsonrpc_method() |
|
1423 | 1421 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1424 | 1422 | userid=Optional(OAttr('apiuser'))): |
|
1425 | 1423 | """ |
|
1426 | 1424 | Sets the lock state of the specified |repo| by the given user. |
|
1427 | 1425 | From more information, see :ref:`repo-locking`. |
|
1428 | 1426 | |
|
1429 | 1427 | * If the ``userid`` option is not set, the repository is locked to the |
|
1430 | 1428 | user who called the method. |
|
1431 | 1429 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1432 | 1430 | repository is displayed. |
|
1433 | 1431 | |
|
1434 | 1432 | This command can only be run using an |authtoken| with admin rights to |
|
1435 | 1433 | the specified repository. |
|
1436 | 1434 | |
|
1437 | 1435 | This command takes the following options: |
|
1438 | 1436 | |
|
1439 | 1437 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1440 | 1438 | :type apiuser: AuthUser |
|
1441 | 1439 | :param repoid: Sets the repository name or repository ID. |
|
1442 | 1440 | :type repoid: str or int |
|
1443 | 1441 | :param locked: Sets the lock state. |
|
1444 | 1442 | :type locked: Optional(``True`` | ``False``) |
|
1445 | 1443 | :param userid: Set the repository lock to this user. |
|
1446 | 1444 | :type userid: Optional(str or int) |
|
1447 | 1445 | |
|
1448 | 1446 | Example error output: |
|
1449 | 1447 | |
|
1450 | 1448 | .. code-block:: bash |
|
1451 | 1449 | |
|
1452 | 1450 | id : <id_given_in_input> |
|
1453 | 1451 | result : { |
|
1454 | 1452 | 'repo': '<reponame>', |
|
1455 | 1453 | 'locked': <bool: lock state>, |
|
1456 | 1454 | 'locked_since': <int: lock timestamp>, |
|
1457 | 1455 | 'locked_by': <username of person who made the lock>, |
|
1458 | 1456 | 'lock_reason': <str: reason for locking>, |
|
1459 | 1457 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1460 | 1458 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1461 | 1459 | or |
|
1462 | 1460 | 'msg': 'Repo `<repository name>` not locked.' |
|
1463 | 1461 | or |
|
1464 | 1462 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1465 | 1463 | } |
|
1466 | 1464 | error : null |
|
1467 | 1465 | |
|
1468 | 1466 | Example error output: |
|
1469 | 1467 | |
|
1470 | 1468 | .. code-block:: bash |
|
1471 | 1469 | |
|
1472 | 1470 | id : <id_given_in_input> |
|
1473 | 1471 | result : null |
|
1474 | 1472 | error : { |
|
1475 | 1473 | 'Error occurred locking repository `<reponame>`' |
|
1476 | 1474 | } |
|
1477 | 1475 | """ |
|
1478 | 1476 | |
|
1479 | 1477 | repo = get_repo_or_error(repoid) |
|
1480 | 1478 | if not has_superadmin_permission(apiuser): |
|
1481 | 1479 | # check if we have at least write permission for this repo ! |
|
1482 | 1480 | _perms = ('repository.admin', 'repository.write',) |
|
1483 | 1481 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1484 | 1482 | |
|
1485 | 1483 | # make sure normal user does not pass someone else userid, |
|
1486 | 1484 | # he is not allowed to do that |
|
1487 | 1485 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1488 | 1486 | raise JSONRPCError('userid is not the same as your user') |
|
1489 | 1487 | |
|
1490 | 1488 | if isinstance(userid, Optional): |
|
1491 | 1489 | userid = apiuser.user_id |
|
1492 | 1490 | |
|
1493 | 1491 | user = get_user_or_error(userid) |
|
1494 | 1492 | |
|
1495 | 1493 | if isinstance(locked, Optional): |
|
1496 | 1494 | lockobj = repo.locked |
|
1497 | 1495 | |
|
1498 | 1496 | if lockobj[0] is None: |
|
1499 | 1497 | _d = { |
|
1500 | 1498 | 'repo': repo.repo_name, |
|
1501 | 1499 | 'locked': False, |
|
1502 | 1500 | 'locked_since': None, |
|
1503 | 1501 | 'locked_by': None, |
|
1504 | 1502 | 'lock_reason': None, |
|
1505 | 1503 | 'lock_state_changed': False, |
|
1506 | 1504 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1507 | 1505 | } |
|
1508 | 1506 | return _d |
|
1509 | 1507 | else: |
|
1510 | 1508 | _user_id, _time, _reason = lockobj |
|
1511 | 1509 | lock_user = get_user_or_error(userid) |
|
1512 | 1510 | _d = { |
|
1513 | 1511 | 'repo': repo.repo_name, |
|
1514 | 1512 | 'locked': True, |
|
1515 | 1513 | 'locked_since': _time, |
|
1516 | 1514 | 'locked_by': lock_user.username, |
|
1517 | 1515 | 'lock_reason': _reason, |
|
1518 | 1516 | 'lock_state_changed': False, |
|
1519 | 1517 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1520 | 1518 | % (repo.repo_name, lock_user.username, |
|
1521 | 1519 | json.dumps(time_to_datetime(_time)))) |
|
1522 | 1520 | } |
|
1523 | 1521 | return _d |
|
1524 | 1522 | |
|
1525 | 1523 | # force locked state through a flag |
|
1526 | 1524 | else: |
|
1527 | 1525 | locked = str2bool(locked) |
|
1528 | 1526 | lock_reason = Repository.LOCK_API |
|
1529 | 1527 | try: |
|
1530 | 1528 | if locked: |
|
1531 | 1529 | lock_time = time.time() |
|
1532 | 1530 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1533 | 1531 | else: |
|
1534 | 1532 | lock_time = None |
|
1535 | 1533 | Repository.unlock(repo) |
|
1536 | 1534 | _d = { |
|
1537 | 1535 | 'repo': repo.repo_name, |
|
1538 | 1536 | 'locked': locked, |
|
1539 | 1537 | 'locked_since': lock_time, |
|
1540 | 1538 | 'locked_by': user.username, |
|
1541 | 1539 | 'lock_reason': lock_reason, |
|
1542 | 1540 | 'lock_state_changed': True, |
|
1543 | 1541 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1544 | 1542 | % (user.username, repo.repo_name, locked)) |
|
1545 | 1543 | } |
|
1546 | 1544 | return _d |
|
1547 | 1545 | except Exception: |
|
1548 | 1546 | log.exception( |
|
1549 | 1547 | "Exception occurred while trying to lock repository") |
|
1550 | 1548 | raise JSONRPCError( |
|
1551 | 1549 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1552 | 1550 | ) |
|
1553 | 1551 | |
|
1554 | 1552 | |
|
1555 | 1553 | @jsonrpc_method() |
|
1556 | 1554 | def comment_commit( |
|
1557 | 1555 | request, apiuser, repoid, commit_id, message, status=Optional(None), |
|
1558 | 1556 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
1559 | 1557 | resolves_comment_id=Optional(None), extra_recipients=Optional([]), |
|
1560 | 1558 | userid=Optional(OAttr('apiuser')), send_email=Optional(True)): |
|
1561 | 1559 | """ |
|
1562 | 1560 | Set a commit comment, and optionally change the status of the commit. |
|
1563 | 1561 | |
|
1564 | 1562 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1565 | 1563 | :type apiuser: AuthUser |
|
1566 | 1564 | :param repoid: Set the repository name or repository ID. |
|
1567 | 1565 | :type repoid: str or int |
|
1568 | 1566 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1569 | 1567 | :type commit_id: str |
|
1570 | 1568 | :param message: The comment text. |
|
1571 | 1569 | :type message: str |
|
1572 | 1570 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', |
|
1573 | 1571 | 'approved', 'rejected', 'under_review' |
|
1574 | 1572 | :type status: str |
|
1575 | 1573 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
1576 | 1574 | :type comment_type: Optional(str), default: 'note' |
|
1577 | 1575 | :param resolves_comment_id: id of comment which this one will resolve |
|
1578 | 1576 | :type resolves_comment_id: Optional(int) |
|
1579 | 1577 | :param extra_recipients: list of user ids or usernames to add |
|
1580 | 1578 | notifications for this comment. Acts like a CC for notification |
|
1581 | 1579 | :type extra_recipients: Optional(list) |
|
1582 | 1580 | :param userid: Set the user name of the comment creator. |
|
1583 | 1581 | :type userid: Optional(str or int) |
|
1584 | 1582 | :param send_email: Define if this comment should also send email notification |
|
1585 | 1583 | :type send_email: Optional(bool) |
|
1586 | 1584 | |
|
1587 | 1585 | Example error output: |
|
1588 | 1586 | |
|
1589 | 1587 | .. code-block:: bash |
|
1590 | 1588 | |
|
1591 | 1589 | { |
|
1592 | 1590 | "id" : <id_given_in_input>, |
|
1593 | 1591 | "result" : { |
|
1594 | 1592 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1595 | 1593 | "status_change": null or <status>, |
|
1596 | 1594 | "success": true |
|
1597 | 1595 | }, |
|
1598 | 1596 | "error" : null |
|
1599 | 1597 | } |
|
1600 | 1598 | |
|
1601 | 1599 | """ |
|
1602 | 1600 | repo = get_repo_or_error(repoid) |
|
1603 | 1601 | if not has_superadmin_permission(apiuser): |
|
1604 | 1602 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1605 | 1603 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1606 | 1604 | |
|
1607 | 1605 | try: |
|
1608 | 1606 | commit = repo.scm_instance().get_commit(commit_id=commit_id) |
|
1609 | 1607 | commit_id = commit.raw_id |
|
1610 | 1608 | except Exception as e: |
|
1611 | 1609 | log.exception('Failed to fetch commit') |
|
1612 | 1610 | raise JSONRPCError(safe_str(e)) |
|
1613 | 1611 | |
|
1614 | 1612 | if isinstance(userid, Optional): |
|
1615 | 1613 | userid = apiuser.user_id |
|
1616 | 1614 | |
|
1617 | 1615 | user = get_user_or_error(userid) |
|
1618 | 1616 | status = Optional.extract(status) |
|
1619 | 1617 | comment_type = Optional.extract(comment_type) |
|
1620 | 1618 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
1621 | 1619 | extra_recipients = Optional.extract(extra_recipients) |
|
1622 | 1620 | send_email = Optional.extract(send_email, binary=True) |
|
1623 | 1621 | |
|
1624 | 1622 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1625 | 1623 | if status and status not in allowed_statuses: |
|
1626 | 1624 | raise JSONRPCError('Bad status, must be on ' |
|
1627 | 1625 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1628 | 1626 | |
|
1629 | 1627 | if resolves_comment_id: |
|
1630 | 1628 | comment = ChangesetComment.get(resolves_comment_id) |
|
1631 | 1629 | if not comment: |
|
1632 | 1630 | raise JSONRPCError( |
|
1633 | 1631 | 'Invalid resolves_comment_id `%s` for this commit.' |
|
1634 | 1632 | % resolves_comment_id) |
|
1635 | 1633 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
1636 | 1634 | raise JSONRPCError( |
|
1637 | 1635 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
1638 | 1636 | % resolves_comment_id) |
|
1639 | 1637 | |
|
1640 | 1638 | try: |
|
1641 | 1639 | rc_config = SettingsModel().get_all_settings() |
|
1642 | 1640 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1643 | 1641 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1644 | 1642 | comment = CommentsModel().create( |
|
1645 | 1643 | message, repo, user, commit_id=commit_id, |
|
1646 | 1644 | status_change=status_change_label, |
|
1647 | 1645 | status_change_type=status, |
|
1648 | 1646 | renderer=renderer, |
|
1649 | 1647 | comment_type=comment_type, |
|
1650 | 1648 | resolves_comment_id=resolves_comment_id, |
|
1651 | 1649 | auth_user=apiuser, |
|
1652 | 1650 | extra_recipients=extra_recipients, |
|
1653 | 1651 | send_email=send_email |
|
1654 | 1652 | ) |
|
1655 | 1653 | if status: |
|
1656 | 1654 | # also do a status change |
|
1657 | 1655 | try: |
|
1658 | 1656 | ChangesetStatusModel().set_status( |
|
1659 | 1657 | repo, status, user, comment, revision=commit_id, |
|
1660 | 1658 | dont_allow_on_closed_pull_request=True |
|
1661 | 1659 | ) |
|
1662 | 1660 | except StatusChangeOnClosedPullRequestError: |
|
1663 | 1661 | log.exception( |
|
1664 | 1662 | "Exception occurred while trying to change repo commit status") |
|
1665 | 1663 | msg = ('Changing status on a commit associated with ' |
|
1666 | 1664 | 'a closed pull request is not allowed') |
|
1667 | 1665 | raise JSONRPCError(msg) |
|
1668 | 1666 | |
|
1669 | 1667 | CommentsModel().trigger_commit_comment_hook( |
|
1670 | 1668 | repo, apiuser, 'create', |
|
1671 | 1669 | data={'comment': comment, 'commit': commit}) |
|
1672 | 1670 | |
|
1673 | 1671 | Session().commit() |
|
1674 | 1672 | return { |
|
1675 | 1673 | 'msg': ( |
|
1676 | 1674 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1677 | 1675 | comment.revision, repo.repo_name)), |
|
1678 | 1676 | 'status_change': status, |
|
1679 | 1677 | 'success': True, |
|
1680 | 1678 | } |
|
1681 | 1679 | except JSONRPCError: |
|
1682 | 1680 | # catch any inside errors, and re-raise them to prevent from |
|
1683 | 1681 | # below global catch to silence them |
|
1684 | 1682 | raise |
|
1685 | 1683 | except Exception: |
|
1686 | 1684 | log.exception("Exception occurred while trying to comment on commit") |
|
1687 | 1685 | raise JSONRPCError( |
|
1688 | 1686 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1689 | 1687 | ) |
|
1690 | 1688 | |
|
1691 | 1689 | |
|
1692 | 1690 | @jsonrpc_method() |
|
1693 | 1691 | def get_repo_comments(request, apiuser, repoid, |
|
1694 | 1692 | commit_id=Optional(None), comment_type=Optional(None), |
|
1695 | 1693 | userid=Optional(None)): |
|
1696 | 1694 | """ |
|
1697 | 1695 | Get all comments for a repository |
|
1698 | 1696 | |
|
1699 | 1697 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1700 | 1698 | :type apiuser: AuthUser |
|
1701 | 1699 | :param repoid: Set the repository name or repository ID. |
|
1702 | 1700 | :type repoid: str or int |
|
1703 | 1701 | :param commit_id: Optionally filter the comments by the commit_id |
|
1704 | 1702 | :type commit_id: Optional(str), default: None |
|
1705 | 1703 | :param comment_type: Optionally filter the comments by the comment_type |
|
1706 | 1704 | one of: 'note', 'todo' |
|
1707 | 1705 | :type comment_type: Optional(str), default: None |
|
1708 | 1706 | :param userid: Optionally filter the comments by the author of comment |
|
1709 | 1707 | :type userid: Optional(str or int), Default: None |
|
1710 | 1708 | |
|
1711 | 1709 | Example error output: |
|
1712 | 1710 | |
|
1713 | 1711 | .. code-block:: bash |
|
1714 | 1712 | |
|
1715 | 1713 | { |
|
1716 | 1714 | "id" : <id_given_in_input>, |
|
1717 | 1715 | "result" : [ |
|
1718 | 1716 | { |
|
1719 | 1717 | "comment_author": <USER_DETAILS>, |
|
1720 | 1718 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1721 | 1719 | "comment_f_path": "file.txt", |
|
1722 | 1720 | "comment_id": 282, |
|
1723 | 1721 | "comment_lineno": "n1", |
|
1724 | 1722 | "comment_resolved_by": null, |
|
1725 | 1723 | "comment_status": [], |
|
1726 | 1724 | "comment_text": "This file needs a header", |
|
1727 | 1725 | "comment_type": "todo", |
|
1728 | 1726 | "comment_last_version: 0 |
|
1729 | 1727 | } |
|
1730 | 1728 | ], |
|
1731 | 1729 | "error" : null |
|
1732 | 1730 | } |
|
1733 | 1731 | |
|
1734 | 1732 | """ |
|
1735 | 1733 | repo = get_repo_or_error(repoid) |
|
1736 | 1734 | if not has_superadmin_permission(apiuser): |
|
1737 | 1735 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1738 | 1736 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1739 | 1737 | |
|
1740 | 1738 | commit_id = Optional.extract(commit_id) |
|
1741 | 1739 | |
|
1742 | 1740 | userid = Optional.extract(userid) |
|
1743 | 1741 | if userid: |
|
1744 | 1742 | user = get_user_or_error(userid) |
|
1745 | 1743 | else: |
|
1746 | 1744 | user = None |
|
1747 | 1745 | |
|
1748 | 1746 | comment_type = Optional.extract(comment_type) |
|
1749 | 1747 | if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES: |
|
1750 | 1748 | raise JSONRPCError( |
|
1751 | 1749 | 'comment_type must be one of `{}` got {}'.format( |
|
1752 | 1750 | ChangesetComment.COMMENT_TYPES, comment_type) |
|
1753 | 1751 | ) |
|
1754 | 1752 | |
|
1755 | 1753 | comments = CommentsModel().get_repository_comments( |
|
1756 | 1754 | repo=repo, comment_type=comment_type, user=user, commit_id=commit_id) |
|
1757 | 1755 | return comments |
|
1758 | 1756 | |
|
1759 | 1757 | |
|
1760 | 1758 | @jsonrpc_method() |
|
1761 | 1759 | def get_comment(request, apiuser, comment_id): |
|
1762 | 1760 | """ |
|
1763 | 1761 | Get single comment from repository or pull_request |
|
1764 | 1762 | |
|
1765 | 1763 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1766 | 1764 | :type apiuser: AuthUser |
|
1767 | 1765 | :param comment_id: comment id found in the URL of comment |
|
1768 | 1766 | :type comment_id: str or int |
|
1769 | 1767 | |
|
1770 | 1768 | Example error output: |
|
1771 | 1769 | |
|
1772 | 1770 | .. code-block:: bash |
|
1773 | 1771 | |
|
1774 | 1772 | { |
|
1775 | 1773 | "id" : <id_given_in_input>, |
|
1776 | 1774 | "result" : { |
|
1777 | 1775 | "comment_author": <USER_DETAILS>, |
|
1778 | 1776 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1779 | 1777 | "comment_f_path": "file.txt", |
|
1780 | 1778 | "comment_id": 282, |
|
1781 | 1779 | "comment_lineno": "n1", |
|
1782 | 1780 | "comment_resolved_by": null, |
|
1783 | 1781 | "comment_status": [], |
|
1784 | 1782 | "comment_text": "This file needs a header", |
|
1785 | 1783 | "comment_type": "todo", |
|
1786 | 1784 | "comment_last_version: 0 |
|
1787 | 1785 | }, |
|
1788 | 1786 | "error" : null |
|
1789 | 1787 | } |
|
1790 | 1788 | |
|
1791 | 1789 | """ |
|
1792 | 1790 | |
|
1793 | 1791 | comment = ChangesetComment.get(comment_id) |
|
1794 | 1792 | if not comment: |
|
1795 | 1793 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1796 | 1794 | |
|
1797 | 1795 | perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1798 | 1796 | has_comment_perm = HasRepoPermissionAnyApi(*perms)\ |
|
1799 | 1797 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1800 | 1798 | |
|
1801 | 1799 | if not has_comment_perm: |
|
1802 | 1800 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1803 | 1801 | |
|
1804 | 1802 | return comment |
|
1805 | 1803 | |
|
1806 | 1804 | |
|
1807 | 1805 | @jsonrpc_method() |
|
1808 | 1806 | def edit_comment(request, apiuser, message, comment_id, version, |
|
1809 | 1807 | userid=Optional(OAttr('apiuser'))): |
|
1810 | 1808 | """ |
|
1811 | 1809 | Edit comment on the pull request or commit, |
|
1812 | 1810 | specified by the `comment_id` and version. Initially version should be 0 |
|
1813 | 1811 | |
|
1814 | 1812 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1815 | 1813 | :type apiuser: AuthUser |
|
1816 | 1814 | :param comment_id: Specify the comment_id for editing |
|
1817 | 1815 | :type comment_id: int |
|
1818 | 1816 | :param version: version of the comment that will be created, starts from 0 |
|
1819 | 1817 | :type version: int |
|
1820 | 1818 | :param message: The text content of the comment. |
|
1821 | 1819 | :type message: str |
|
1822 | 1820 | :param userid: Comment on the pull request as this user |
|
1823 | 1821 | :type userid: Optional(str or int) |
|
1824 | 1822 | |
|
1825 | 1823 | Example output: |
|
1826 | 1824 | |
|
1827 | 1825 | .. code-block:: bash |
|
1828 | 1826 | |
|
1829 | 1827 | id : <id_given_in_input> |
|
1830 | 1828 | result : { |
|
1831 | 1829 | "comment": "<comment data>", |
|
1832 | 1830 | "version": "<Integer>", |
|
1833 | 1831 | }, |
|
1834 | 1832 | error : null |
|
1835 | 1833 | """ |
|
1836 | 1834 | |
|
1837 | 1835 | auth_user = apiuser |
|
1838 | 1836 | comment = ChangesetComment.get(comment_id) |
|
1839 | 1837 | if not comment: |
|
1840 | 1838 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1841 | 1839 | |
|
1842 | 1840 | is_super_admin = has_superadmin_permission(apiuser) |
|
1843 | 1841 | is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1844 | 1842 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1845 | 1843 | |
|
1846 | 1844 | if not isinstance(userid, Optional): |
|
1847 | 1845 | if is_super_admin or is_repo_admin: |
|
1848 | 1846 | apiuser = get_user_or_error(userid) |
|
1849 | 1847 | auth_user = apiuser.AuthUser() |
|
1850 | 1848 | else: |
|
1851 | 1849 | raise JSONRPCError('userid is not the same as your user') |
|
1852 | 1850 | |
|
1853 | 1851 | comment_author = comment.author.user_id == auth_user.user_id |
|
1854 | 1852 | if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1855 | 1853 | raise JSONRPCError("you don't have access to edit this comment") |
|
1856 | 1854 | |
|
1857 | 1855 | try: |
|
1858 | 1856 | comment_history = CommentsModel().edit( |
|
1859 | 1857 | comment_id=comment_id, |
|
1860 | 1858 | text=message, |
|
1861 | 1859 | auth_user=auth_user, |
|
1862 | 1860 | version=version, |
|
1863 | 1861 | ) |
|
1864 | 1862 | Session().commit() |
|
1865 | 1863 | except CommentVersionMismatch: |
|
1866 | 1864 | raise JSONRPCError( |
|
1867 | 1865 | 'comment ({}) version ({}) mismatch'.format(comment_id, version) |
|
1868 | 1866 | ) |
|
1869 | 1867 | if not comment_history and not message: |
|
1870 | 1868 | raise JSONRPCError( |
|
1871 | 1869 | "comment ({}) can't be changed with empty string".format(comment_id) |
|
1872 | 1870 | ) |
|
1873 | 1871 | |
|
1874 | 1872 | if comment.pull_request: |
|
1875 | 1873 | pull_request = comment.pull_request |
|
1876 | 1874 | PullRequestModel().trigger_pull_request_hook( |
|
1877 | 1875 | pull_request, apiuser, 'comment_edit', |
|
1878 | 1876 | data={'comment': comment}) |
|
1879 | 1877 | else: |
|
1880 | 1878 | db_repo = comment.repo |
|
1881 | 1879 | commit_id = comment.revision |
|
1882 | 1880 | commit = db_repo.get_commit(commit_id) |
|
1883 | 1881 | CommentsModel().trigger_commit_comment_hook( |
|
1884 | 1882 | db_repo, apiuser, 'edit', |
|
1885 | 1883 | data={'comment': comment, 'commit': commit}) |
|
1886 | 1884 | |
|
1887 | 1885 | data = { |
|
1888 | 1886 | 'comment': comment, |
|
1889 | 1887 | 'version': comment_history.version if comment_history else None, |
|
1890 | 1888 | } |
|
1891 | 1889 | return data |
|
1892 | 1890 | |
|
1893 | 1891 | |
|
1894 | 1892 | # TODO(marcink): write this with all required logic for deleting a comments in PR or commits |
|
1895 | 1893 | # @jsonrpc_method() |
|
1896 | 1894 | # def delete_comment(request, apiuser, comment_id): |
|
1897 | 1895 | # auth_user = apiuser |
|
1898 | 1896 | # |
|
1899 | 1897 | # comment = ChangesetComment.get(comment_id) |
|
1900 | 1898 | # if not comment: |
|
1901 | 1899 | # raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1902 | 1900 | # |
|
1903 | 1901 | # is_super_admin = has_superadmin_permission(apiuser) |
|
1904 | 1902 | # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1905 | 1903 | # (user=apiuser, repo_name=comment.repo.repo_name) |
|
1906 | 1904 | # |
|
1907 | 1905 | # comment_author = comment.author.user_id == auth_user.user_id |
|
1908 | 1906 | # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1909 | 1907 | # raise JSONRPCError("you don't have access to edit this comment") |
|
1910 | 1908 | |
|
1911 | 1909 | @jsonrpc_method() |
|
1912 | 1910 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1913 | 1911 | """ |
|
1914 | 1912 | Grant permissions for the specified user on the given repository, |
|
1915 | 1913 | or update existing permissions if found. |
|
1916 | 1914 | |
|
1917 | 1915 | This command can only be run using an |authtoken| with admin |
|
1918 | 1916 | permissions on the |repo|. |
|
1919 | 1917 | |
|
1920 | 1918 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1921 | 1919 | :type apiuser: AuthUser |
|
1922 | 1920 | :param repoid: Set the repository name or repository ID. |
|
1923 | 1921 | :type repoid: str or int |
|
1924 | 1922 | :param userid: Set the user name. |
|
1925 | 1923 | :type userid: str |
|
1926 | 1924 | :param perm: Set the user permissions, using the following format |
|
1927 | 1925 | ``(repository.(none|read|write|admin))`` |
|
1928 | 1926 | :type perm: str |
|
1929 | 1927 | |
|
1930 | 1928 | Example output: |
|
1931 | 1929 | |
|
1932 | 1930 | .. code-block:: bash |
|
1933 | 1931 | |
|
1934 | 1932 | id : <id_given_in_input> |
|
1935 | 1933 | result: { |
|
1936 | 1934 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1937 | 1935 | "success": true |
|
1938 | 1936 | } |
|
1939 | 1937 | error: null |
|
1940 | 1938 | """ |
|
1941 | 1939 | |
|
1942 | 1940 | repo = get_repo_or_error(repoid) |
|
1943 | 1941 | user = get_user_or_error(userid) |
|
1944 | 1942 | perm = get_perm_or_error(perm) |
|
1945 | 1943 | if not has_superadmin_permission(apiuser): |
|
1946 | 1944 | _perms = ('repository.admin',) |
|
1947 | 1945 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1948 | 1946 | |
|
1949 | 1947 | perm_additions = [[user.user_id, perm.permission_name, "user"]] |
|
1950 | 1948 | try: |
|
1951 | 1949 | changes = RepoModel().update_permissions( |
|
1952 | 1950 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
1953 | 1951 | |
|
1954 | 1952 | action_data = { |
|
1955 | 1953 | 'added': changes['added'], |
|
1956 | 1954 | 'updated': changes['updated'], |
|
1957 | 1955 | 'deleted': changes['deleted'], |
|
1958 | 1956 | } |
|
1959 | 1957 | audit_logger.store_api( |
|
1960 | 1958 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
1961 | 1959 | Session().commit() |
|
1962 | 1960 | PermissionModel().flush_user_permission_caches(changes) |
|
1963 | 1961 | |
|
1964 | 1962 | return { |
|
1965 | 1963 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1966 | 1964 | perm.permission_name, user.username, repo.repo_name |
|
1967 | 1965 | ), |
|
1968 | 1966 | 'success': True |
|
1969 | 1967 | } |
|
1970 | 1968 | except Exception: |
|
1971 | 1969 | log.exception("Exception occurred while trying edit permissions for repo") |
|
1972 | 1970 | raise JSONRPCError( |
|
1973 | 1971 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1974 | 1972 | userid, repoid |
|
1975 | 1973 | ) |
|
1976 | 1974 | ) |
|
1977 | 1975 | |
|
1978 | 1976 | |
|
1979 | 1977 | @jsonrpc_method() |
|
1980 | 1978 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1981 | 1979 | """ |
|
1982 | 1980 | Revoke permission for a user on the specified repository. |
|
1983 | 1981 | |
|
1984 | 1982 | This command can only be run using an |authtoken| with admin |
|
1985 | 1983 | permissions on the |repo|. |
|
1986 | 1984 | |
|
1987 | 1985 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1988 | 1986 | :type apiuser: AuthUser |
|
1989 | 1987 | :param repoid: Set the repository name or repository ID. |
|
1990 | 1988 | :type repoid: str or int |
|
1991 | 1989 | :param userid: Set the user name of revoked user. |
|
1992 | 1990 | :type userid: str or int |
|
1993 | 1991 | |
|
1994 | 1992 | Example error output: |
|
1995 | 1993 | |
|
1996 | 1994 | .. code-block:: bash |
|
1997 | 1995 | |
|
1998 | 1996 | id : <id_given_in_input> |
|
1999 | 1997 | result: { |
|
2000 | 1998 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
2001 | 1999 | "success": true |
|
2002 | 2000 | } |
|
2003 | 2001 | error: null |
|
2004 | 2002 | """ |
|
2005 | 2003 | |
|
2006 | 2004 | repo = get_repo_or_error(repoid) |
|
2007 | 2005 | user = get_user_or_error(userid) |
|
2008 | 2006 | if not has_superadmin_permission(apiuser): |
|
2009 | 2007 | _perms = ('repository.admin',) |
|
2010 | 2008 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2011 | 2009 | |
|
2012 | 2010 | perm_deletions = [[user.user_id, None, "user"]] |
|
2013 | 2011 | try: |
|
2014 | 2012 | changes = RepoModel().update_permissions( |
|
2015 | 2013 | repo=repo, perm_deletions=perm_deletions, cur_user=user) |
|
2016 | 2014 | |
|
2017 | 2015 | action_data = { |
|
2018 | 2016 | 'added': changes['added'], |
|
2019 | 2017 | 'updated': changes['updated'], |
|
2020 | 2018 | 'deleted': changes['deleted'], |
|
2021 | 2019 | } |
|
2022 | 2020 | audit_logger.store_api( |
|
2023 | 2021 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2024 | 2022 | Session().commit() |
|
2025 | 2023 | PermissionModel().flush_user_permission_caches(changes) |
|
2026 | 2024 | |
|
2027 | 2025 | return { |
|
2028 | 2026 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
2029 | 2027 | user.username, repo.repo_name |
|
2030 | 2028 | ), |
|
2031 | 2029 | 'success': True |
|
2032 | 2030 | } |
|
2033 | 2031 | except Exception: |
|
2034 | 2032 | log.exception("Exception occurred while trying revoke permissions to repo") |
|
2035 | 2033 | raise JSONRPCError( |
|
2036 | 2034 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
2037 | 2035 | userid, repoid |
|
2038 | 2036 | ) |
|
2039 | 2037 | ) |
|
2040 | 2038 | |
|
2041 | 2039 | |
|
2042 | 2040 | @jsonrpc_method() |
|
2043 | 2041 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
2044 | 2042 | """ |
|
2045 | 2043 | Grant permission for a user group on the specified repository, |
|
2046 | 2044 | or update existing permissions. |
|
2047 | 2045 | |
|
2048 | 2046 | This command can only be run using an |authtoken| with admin |
|
2049 | 2047 | permissions on the |repo|. |
|
2050 | 2048 | |
|
2051 | 2049 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2052 | 2050 | :type apiuser: AuthUser |
|
2053 | 2051 | :param repoid: Set the repository name or repository ID. |
|
2054 | 2052 | :type repoid: str or int |
|
2055 | 2053 | :param usergroupid: Specify the ID of the user group. |
|
2056 | 2054 | :type usergroupid: str or int |
|
2057 | 2055 | :param perm: Set the user group permissions using the following |
|
2058 | 2056 | format: (repository.(none|read|write|admin)) |
|
2059 | 2057 | :type perm: str |
|
2060 | 2058 | |
|
2061 | 2059 | Example output: |
|
2062 | 2060 | |
|
2063 | 2061 | .. code-block:: bash |
|
2064 | 2062 | |
|
2065 | 2063 | id : <id_given_in_input> |
|
2066 | 2064 | result : { |
|
2067 | 2065 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2068 | 2066 | "success": true |
|
2069 | 2067 | |
|
2070 | 2068 | } |
|
2071 | 2069 | error : null |
|
2072 | 2070 | |
|
2073 | 2071 | Example error output: |
|
2074 | 2072 | |
|
2075 | 2073 | .. code-block:: bash |
|
2076 | 2074 | |
|
2077 | 2075 | id : <id_given_in_input> |
|
2078 | 2076 | result : null |
|
2079 | 2077 | error : { |
|
2080 | 2078 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
2081 | 2079 | } |
|
2082 | 2080 | |
|
2083 | 2081 | """ |
|
2084 | 2082 | |
|
2085 | 2083 | repo = get_repo_or_error(repoid) |
|
2086 | 2084 | perm = get_perm_or_error(perm) |
|
2087 | 2085 | if not has_superadmin_permission(apiuser): |
|
2088 | 2086 | _perms = ('repository.admin',) |
|
2089 | 2087 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2090 | 2088 | |
|
2091 | 2089 | user_group = get_user_group_or_error(usergroupid) |
|
2092 | 2090 | if not has_superadmin_permission(apiuser): |
|
2093 | 2091 | # check if we have at least read permission for this user group ! |
|
2094 | 2092 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2095 | 2093 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2096 | 2094 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2097 | 2095 | raise JSONRPCError( |
|
2098 | 2096 | 'user group `%s` does not exist' % (usergroupid,)) |
|
2099 | 2097 | |
|
2100 | 2098 | perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]] |
|
2101 | 2099 | try: |
|
2102 | 2100 | changes = RepoModel().update_permissions( |
|
2103 | 2101 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
2104 | 2102 | action_data = { |
|
2105 | 2103 | 'added': changes['added'], |
|
2106 | 2104 | 'updated': changes['updated'], |
|
2107 | 2105 | 'deleted': changes['deleted'], |
|
2108 | 2106 | } |
|
2109 | 2107 | audit_logger.store_api( |
|
2110 | 2108 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2111 | 2109 | Session().commit() |
|
2112 | 2110 | PermissionModel().flush_user_permission_caches(changes) |
|
2113 | 2111 | |
|
2114 | 2112 | return { |
|
2115 | 2113 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
2116 | 2114 | 'repo: `%s`' % ( |
|
2117 | 2115 | perm.permission_name, user_group.users_group_name, |
|
2118 | 2116 | repo.repo_name |
|
2119 | 2117 | ), |
|
2120 | 2118 | 'success': True |
|
2121 | 2119 | } |
|
2122 | 2120 | except Exception: |
|
2123 | 2121 | log.exception( |
|
2124 | 2122 | "Exception occurred while trying change permission on repo") |
|
2125 | 2123 | raise JSONRPCError( |
|
2126 | 2124 | 'failed to edit permission for user group: `%s` in ' |
|
2127 | 2125 | 'repo: `%s`' % ( |
|
2128 | 2126 | usergroupid, repo.repo_name |
|
2129 | 2127 | ) |
|
2130 | 2128 | ) |
|
2131 | 2129 | |
|
2132 | 2130 | |
|
2133 | 2131 | @jsonrpc_method() |
|
2134 | 2132 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
2135 | 2133 | """ |
|
2136 | 2134 | Revoke the permissions of a user group on a given repository. |
|
2137 | 2135 | |
|
2138 | 2136 | This command can only be run using an |authtoken| with admin |
|
2139 | 2137 | permissions on the |repo|. |
|
2140 | 2138 | |
|
2141 | 2139 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2142 | 2140 | :type apiuser: AuthUser |
|
2143 | 2141 | :param repoid: Set the repository name or repository ID. |
|
2144 | 2142 | :type repoid: str or int |
|
2145 | 2143 | :param usergroupid: Specify the user group ID. |
|
2146 | 2144 | :type usergroupid: str or int |
|
2147 | 2145 | |
|
2148 | 2146 | Example output: |
|
2149 | 2147 | |
|
2150 | 2148 | .. code-block:: bash |
|
2151 | 2149 | |
|
2152 | 2150 | id : <id_given_in_input> |
|
2153 | 2151 | result: { |
|
2154 | 2152 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2155 | 2153 | "success": true |
|
2156 | 2154 | } |
|
2157 | 2155 | error: null |
|
2158 | 2156 | """ |
|
2159 | 2157 | |
|
2160 | 2158 | repo = get_repo_or_error(repoid) |
|
2161 | 2159 | if not has_superadmin_permission(apiuser): |
|
2162 | 2160 | _perms = ('repository.admin',) |
|
2163 | 2161 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2164 | 2162 | |
|
2165 | 2163 | user_group = get_user_group_or_error(usergroupid) |
|
2166 | 2164 | if not has_superadmin_permission(apiuser): |
|
2167 | 2165 | # check if we have at least read permission for this user group ! |
|
2168 | 2166 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2169 | 2167 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2170 | 2168 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2171 | 2169 | raise JSONRPCError( |
|
2172 | 2170 | 'user group `%s` does not exist' % (usergroupid,)) |
|
2173 | 2171 | |
|
2174 | 2172 | perm_deletions = [[user_group.users_group_id, None, "user_group"]] |
|
2175 | 2173 | try: |
|
2176 | 2174 | changes = RepoModel().update_permissions( |
|
2177 | 2175 | repo=repo, perm_deletions=perm_deletions, cur_user=apiuser) |
|
2178 | 2176 | action_data = { |
|
2179 | 2177 | 'added': changes['added'], |
|
2180 | 2178 | 'updated': changes['updated'], |
|
2181 | 2179 | 'deleted': changes['deleted'], |
|
2182 | 2180 | } |
|
2183 | 2181 | audit_logger.store_api( |
|
2184 | 2182 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2185 | 2183 | Session().commit() |
|
2186 | 2184 | PermissionModel().flush_user_permission_caches(changes) |
|
2187 | 2185 | |
|
2188 | 2186 | return { |
|
2189 | 2187 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
2190 | 2188 | user_group.users_group_name, repo.repo_name |
|
2191 | 2189 | ), |
|
2192 | 2190 | 'success': True |
|
2193 | 2191 | } |
|
2194 | 2192 | except Exception: |
|
2195 | 2193 | log.exception("Exception occurred while trying revoke " |
|
2196 | 2194 | "user group permission on repo") |
|
2197 | 2195 | raise JSONRPCError( |
|
2198 | 2196 | 'failed to edit permission for user group: `%s` in ' |
|
2199 | 2197 | 'repo: `%s`' % ( |
|
2200 | 2198 | user_group.users_group_name, repo.repo_name |
|
2201 | 2199 | ) |
|
2202 | 2200 | ) |
|
2203 | 2201 | |
|
2204 | 2202 | |
|
2205 | 2203 | @jsonrpc_method() |
|
2206 | 2204 | def pull(request, apiuser, repoid, remote_uri=Optional(None)): |
|
2207 | 2205 | """ |
|
2208 | 2206 | Triggers a pull on the given repository from a remote location. You |
|
2209 | 2207 | can use this to keep remote repositories up-to-date. |
|
2210 | 2208 | |
|
2211 | 2209 | This command can only be run using an |authtoken| with admin |
|
2212 | 2210 | rights to the specified repository. For more information, |
|
2213 | 2211 | see :ref:`config-token-ref`. |
|
2214 | 2212 | |
|
2215 | 2213 | This command takes the following options: |
|
2216 | 2214 | |
|
2217 | 2215 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2218 | 2216 | :type apiuser: AuthUser |
|
2219 | 2217 | :param repoid: The repository name or repository ID. |
|
2220 | 2218 | :type repoid: str or int |
|
2221 | 2219 | :param remote_uri: Optional remote URI to pass in for pull |
|
2222 | 2220 | :type remote_uri: str |
|
2223 | 2221 | |
|
2224 | 2222 | Example output: |
|
2225 | 2223 | |
|
2226 | 2224 | .. code-block:: bash |
|
2227 | 2225 | |
|
2228 | 2226 | id : <id_given_in_input> |
|
2229 | 2227 | result : { |
|
2230 | 2228 | "msg": "Pulled from url `<remote_url>` on repo `<repository name>`" |
|
2231 | 2229 | "repository": "<repository name>" |
|
2232 | 2230 | } |
|
2233 | 2231 | error : null |
|
2234 | 2232 | |
|
2235 | 2233 | Example error output: |
|
2236 | 2234 | |
|
2237 | 2235 | .. code-block:: bash |
|
2238 | 2236 | |
|
2239 | 2237 | id : <id_given_in_input> |
|
2240 | 2238 | result : null |
|
2241 | 2239 | error : { |
|
2242 | 2240 | "Unable to push changes from `<remote_url>`" |
|
2243 | 2241 | } |
|
2244 | 2242 | |
|
2245 | 2243 | """ |
|
2246 | 2244 | |
|
2247 | 2245 | repo = get_repo_or_error(repoid) |
|
2248 | 2246 | remote_uri = Optional.extract(remote_uri) |
|
2249 | 2247 | remote_uri_display = remote_uri or repo.clone_uri_hidden |
|
2250 | 2248 | if not has_superadmin_permission(apiuser): |
|
2251 | 2249 | _perms = ('repository.admin',) |
|
2252 | 2250 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2253 | 2251 | |
|
2254 | 2252 | try: |
|
2255 | 2253 | ScmModel().pull_changes( |
|
2256 | 2254 | repo.repo_name, apiuser.username, remote_uri=remote_uri) |
|
2257 | 2255 | return { |
|
2258 | 2256 | 'msg': 'Pulled from url `%s` on repo `%s`' % ( |
|
2259 | 2257 | remote_uri_display, repo.repo_name), |
|
2260 | 2258 | 'repository': repo.repo_name |
|
2261 | 2259 | } |
|
2262 | 2260 | except Exception: |
|
2263 | 2261 | log.exception("Exception occurred while trying to " |
|
2264 | 2262 | "pull changes from remote location") |
|
2265 | 2263 | raise JSONRPCError( |
|
2266 | 2264 | 'Unable to pull changes from `%s`' % remote_uri_display |
|
2267 | 2265 | ) |
|
2268 | 2266 | |
|
2269 | 2267 | |
|
2270 | 2268 | @jsonrpc_method() |
|
2271 | 2269 | def strip(request, apiuser, repoid, revision, branch): |
|
2272 | 2270 | """ |
|
2273 | 2271 | Strips the given revision from the specified repository. |
|
2274 | 2272 | |
|
2275 | 2273 | * This will remove the revision and all of its decendants. |
|
2276 | 2274 | |
|
2277 | 2275 | This command can only be run using an |authtoken| with admin rights to |
|
2278 | 2276 | the specified repository. |
|
2279 | 2277 | |
|
2280 | 2278 | This command takes the following options: |
|
2281 | 2279 | |
|
2282 | 2280 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2283 | 2281 | :type apiuser: AuthUser |
|
2284 | 2282 | :param repoid: The repository name or repository ID. |
|
2285 | 2283 | :type repoid: str or int |
|
2286 | 2284 | :param revision: The revision you wish to strip. |
|
2287 | 2285 | :type revision: str |
|
2288 | 2286 | :param branch: The branch from which to strip the revision. |
|
2289 | 2287 | :type branch: str |
|
2290 | 2288 | |
|
2291 | 2289 | Example output: |
|
2292 | 2290 | |
|
2293 | 2291 | .. code-block:: bash |
|
2294 | 2292 | |
|
2295 | 2293 | id : <id_given_in_input> |
|
2296 | 2294 | result : { |
|
2297 | 2295 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
2298 | 2296 | "repository": "<repository name>" |
|
2299 | 2297 | } |
|
2300 | 2298 | error : null |
|
2301 | 2299 | |
|
2302 | 2300 | Example error output: |
|
2303 | 2301 | |
|
2304 | 2302 | .. code-block:: bash |
|
2305 | 2303 | |
|
2306 | 2304 | id : <id_given_in_input> |
|
2307 | 2305 | result : null |
|
2308 | 2306 | error : { |
|
2309 | 2307 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
2310 | 2308 | } |
|
2311 | 2309 | |
|
2312 | 2310 | """ |
|
2313 | 2311 | |
|
2314 | 2312 | repo = get_repo_or_error(repoid) |
|
2315 | 2313 | if not has_superadmin_permission(apiuser): |
|
2316 | 2314 | _perms = ('repository.admin',) |
|
2317 | 2315 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2318 | 2316 | |
|
2319 | 2317 | try: |
|
2320 | 2318 | ScmModel().strip(repo, revision, branch) |
|
2321 | 2319 | audit_logger.store_api( |
|
2322 | 2320 | 'repo.commit.strip', action_data={'commit_id': revision}, |
|
2323 | 2321 | repo=repo, |
|
2324 | 2322 | user=apiuser, commit=True) |
|
2325 | 2323 | |
|
2326 | 2324 | return { |
|
2327 | 2325 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
2328 | 2326 | revision, repo.repo_name), |
|
2329 | 2327 | 'repository': repo.repo_name |
|
2330 | 2328 | } |
|
2331 | 2329 | except Exception: |
|
2332 | 2330 | log.exception("Exception while trying to strip") |
|
2333 | 2331 | raise JSONRPCError( |
|
2334 | 2332 | 'Unable to strip commit %s from repo `%s`' % ( |
|
2335 | 2333 | revision, repo.repo_name) |
|
2336 | 2334 | ) |
|
2337 | 2335 | |
|
2338 | 2336 | |
|
2339 | 2337 | @jsonrpc_method() |
|
2340 | 2338 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
2341 | 2339 | """ |
|
2342 | 2340 | Returns all settings for a repository. If key is given it only returns the |
|
2343 | 2341 | setting identified by the key or null. |
|
2344 | 2342 | |
|
2345 | 2343 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2346 | 2344 | :type apiuser: AuthUser |
|
2347 | 2345 | :param repoid: The repository name or repository id. |
|
2348 | 2346 | :type repoid: str or int |
|
2349 | 2347 | :param key: Key of the setting to return. |
|
2350 | 2348 | :type: key: Optional(str) |
|
2351 | 2349 | |
|
2352 | 2350 | Example output: |
|
2353 | 2351 | |
|
2354 | 2352 | .. code-block:: bash |
|
2355 | 2353 | |
|
2356 | 2354 | { |
|
2357 | 2355 | "error": null, |
|
2358 | 2356 | "id": 237, |
|
2359 | 2357 | "result": { |
|
2360 | 2358 | "extensions_largefiles": true, |
|
2361 | 2359 | "extensions_evolve": true, |
|
2362 | 2360 | "hooks_changegroup_push_logger": true, |
|
2363 | 2361 | "hooks_changegroup_repo_size": false, |
|
2364 | 2362 | "hooks_outgoing_pull_logger": true, |
|
2365 | 2363 | "phases_publish": "True", |
|
2366 | 2364 | "rhodecode_hg_use_rebase_for_merging": true, |
|
2367 | 2365 | "rhodecode_pr_merge_enabled": true, |
|
2368 | 2366 | "rhodecode_use_outdated_comments": true |
|
2369 | 2367 | } |
|
2370 | 2368 | } |
|
2371 | 2369 | """ |
|
2372 | 2370 | |
|
2373 | # Restrict access to this api method to admins only. | |
|
2371 | # Restrict access to this api method to super-admins, and repo admins only. | |
|
2372 | repo = get_repo_or_error(repoid) | |
|
2374 | 2373 | if not has_superadmin_permission(apiuser): |
|
2375 | raise JSONRPCForbidden() | |
|
2374 | _perms = ('repository.admin',) | |
|
2375 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
2376 | 2376 | |
|
2377 | 2377 | try: |
|
2378 | repo = get_repo_or_error(repoid) | |
|
2379 | 2378 | settings_model = VcsSettingsModel(repo=repo) |
|
2380 | 2379 | settings = settings_model.get_global_settings() |
|
2381 | 2380 | settings.update(settings_model.get_repo_settings()) |
|
2382 | 2381 | |
|
2383 | 2382 | # If only a single setting is requested fetch it from all settings. |
|
2384 | 2383 | key = Optional.extract(key) |
|
2385 | 2384 | if key is not None: |
|
2386 | 2385 | settings = settings.get(key, None) |
|
2387 | 2386 | except Exception: |
|
2388 | 2387 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
2389 | 2388 | log.exception(msg) |
|
2390 | 2389 | raise JSONRPCError(msg) |
|
2391 | 2390 | |
|
2392 | 2391 | return settings |
|
2393 | 2392 | |
|
2394 | 2393 | |
|
2395 | 2394 | @jsonrpc_method() |
|
2396 | 2395 | def set_repo_settings(request, apiuser, repoid, settings): |
|
2397 | 2396 | """ |
|
2398 | 2397 | Update repository settings. Returns true on success. |
|
2399 | 2398 | |
|
2400 | 2399 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2401 | 2400 | :type apiuser: AuthUser |
|
2402 | 2401 | :param repoid: The repository name or repository id. |
|
2403 | 2402 | :type repoid: str or int |
|
2404 | 2403 | :param settings: The new settings for the repository. |
|
2405 | 2404 | :type: settings: dict |
|
2406 | 2405 | |
|
2407 | 2406 | Example output: |
|
2408 | 2407 | |
|
2409 | 2408 | .. code-block:: bash |
|
2410 | 2409 | |
|
2411 | 2410 | { |
|
2412 | 2411 | "error": null, |
|
2413 | 2412 | "id": 237, |
|
2414 | 2413 | "result": true |
|
2415 | 2414 | } |
|
2416 | 2415 | """ |
|
2417 | # Restrict access to this api method to admins only. | |
|
2416 | # Restrict access to this api method to super-admins, and repo admins only. | |
|
2417 | repo = get_repo_or_error(repoid) | |
|
2418 | 2418 | if not has_superadmin_permission(apiuser): |
|
2419 | raise JSONRPCForbidden() | |
|
2419 | _perms = ('repository.admin',) | |
|
2420 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
2420 | 2421 | |
|
2421 | 2422 | if type(settings) is not dict: |
|
2422 | 2423 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
2423 | 2424 | |
|
2424 | 2425 | try: |
|
2425 | 2426 | settings_model = VcsSettingsModel(repo=repoid) |
|
2426 | 2427 | |
|
2427 | 2428 | # Merge global, repo and incoming settings. |
|
2428 | 2429 | new_settings = settings_model.get_global_settings() |
|
2429 | 2430 | new_settings.update(settings_model.get_repo_settings()) |
|
2430 | 2431 | new_settings.update(settings) |
|
2431 | 2432 | |
|
2432 | 2433 | # Update the settings. |
|
2433 | 2434 | inherit_global_settings = new_settings.get( |
|
2434 | 2435 | 'inherit_global_settings', False) |
|
2435 | 2436 | settings_model.create_or_update_repo_settings( |
|
2436 | 2437 | new_settings, inherit_global_settings=inherit_global_settings) |
|
2437 | 2438 | Session().commit() |
|
2438 | 2439 | except Exception: |
|
2439 | 2440 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
2440 | 2441 | log.exception(msg) |
|
2441 | 2442 | raise JSONRPCError(msg) |
|
2442 | 2443 | |
|
2443 | 2444 | # Indicate success. |
|
2444 | 2445 | return True |
|
2445 | 2446 | |
|
2446 | 2447 | |
|
2447 | 2448 | @jsonrpc_method() |
|
2448 | 2449 | def maintenance(request, apiuser, repoid): |
|
2449 | 2450 | """ |
|
2450 | 2451 | Triggers a maintenance on the given repository. |
|
2451 | 2452 | |
|
2452 | 2453 | This command can only be run using an |authtoken| with admin |
|
2453 | 2454 | rights to the specified repository. For more information, |
|
2454 | 2455 | see :ref:`config-token-ref`. |
|
2455 | 2456 | |
|
2456 | 2457 | This command takes the following options: |
|
2457 | 2458 | |
|
2458 | 2459 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2459 | 2460 | :type apiuser: AuthUser |
|
2460 | 2461 | :param repoid: The repository name or repository ID. |
|
2461 | 2462 | :type repoid: str or int |
|
2462 | 2463 | |
|
2463 | 2464 | Example output: |
|
2464 | 2465 | |
|
2465 | 2466 | .. code-block:: bash |
|
2466 | 2467 | |
|
2467 | 2468 | id : <id_given_in_input> |
|
2468 | 2469 | result : { |
|
2469 | 2470 | "msg": "executed maintenance command", |
|
2470 | 2471 | "executed_actions": [ |
|
2471 | 2472 | <action_message>, <action_message2>... |
|
2472 | 2473 | ], |
|
2473 | 2474 | "repository": "<repository name>" |
|
2474 | 2475 | } |
|
2475 | 2476 | error : null |
|
2476 | 2477 | |
|
2477 | 2478 | Example error output: |
|
2478 | 2479 | |
|
2479 | 2480 | .. code-block:: bash |
|
2480 | 2481 | |
|
2481 | 2482 | id : <id_given_in_input> |
|
2482 | 2483 | result : null |
|
2483 | 2484 | error : { |
|
2484 | 2485 | "Unable to execute maintenance on `<reponame>`" |
|
2485 | 2486 | } |
|
2486 | 2487 | |
|
2487 | 2488 | """ |
|
2488 | 2489 | |
|
2489 | 2490 | repo = get_repo_or_error(repoid) |
|
2490 | 2491 | if not has_superadmin_permission(apiuser): |
|
2491 | 2492 | _perms = ('repository.admin',) |
|
2492 | 2493 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2493 | 2494 | |
|
2494 | 2495 | try: |
|
2495 | 2496 | maintenance = repo_maintenance.RepoMaintenance() |
|
2496 | 2497 | executed_actions = maintenance.execute(repo) |
|
2497 | 2498 | |
|
2498 | 2499 | return { |
|
2499 | 2500 | 'msg': 'executed maintenance command', |
|
2500 | 2501 | 'executed_actions': executed_actions, |
|
2501 | 2502 | 'repository': repo.repo_name |
|
2502 | 2503 | } |
|
2503 | 2504 | except Exception: |
|
2504 | 2505 | log.exception("Exception occurred while trying to run maintenance") |
|
2505 | 2506 | raise JSONRPCError( |
|
2506 | 2507 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
@@ -1,177 +1,187 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import uuid |
|
23 | 23 | |
|
24 | 24 | from pyramid.view import view_config |
|
25 | 25 | from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPBadGateway |
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import BaseAppView |
|
28 | 28 | from rhodecode.lib.channelstream import ( |
|
29 | 29 | channelstream_request, get_channelstream_server_url, |
|
30 | 30 | ChannelstreamConnectionException, |
|
31 | 31 | ChannelstreamPermissionException, |
|
32 | 32 | check_channel_permissions, |
|
33 | 33 | get_connection_validators, |
|
34 | 34 | get_user_data, |
|
35 | 35 | parse_channels_info, |
|
36 | 36 | update_history_from_logs, |
|
37 | STATE_PUBLIC_KEYS) | |
|
37 | USER_STATE_PUBLIC_KEYS) | |
|
38 | 38 | |
|
39 | 39 | from rhodecode.lib.auth import NotAnonymous |
|
40 | 40 | |
|
41 | 41 | log = logging.getLogger(__name__) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | class ChannelstreamView(BaseAppView): |
|
45 | 45 | |
|
46 | 46 | def load_default_context(self): |
|
47 | 47 | c = self._get_local_tmpl_context() |
|
48 | 48 | self.channelstream_config = \ |
|
49 | 49 | self.request.registry.rhodecode_plugins['channelstream'] |
|
50 | 50 | if not self.channelstream_config.get('enabled'): |
|
51 | 51 | log.error('Channelstream plugin is disabled') |
|
52 | 52 | raise HTTPBadRequest() |
|
53 | 53 | |
|
54 | 54 | return c |
|
55 | 55 | |
|
56 | 56 | @NotAnonymous() |
|
57 | 57 | @view_config(route_name='channelstream_connect', renderer='json_ext') |
|
58 | 58 | def connect(self): |
|
59 | 59 | """ handle authorization of users trying to connect """ |
|
60 | 60 | |
|
61 | 61 | self.load_default_context() |
|
62 | 62 | try: |
|
63 | 63 | json_body = self.request.json_body |
|
64 | 64 | except Exception: |
|
65 | 65 | log.exception('Failed to decode json from request') |
|
66 | 66 | raise HTTPBadRequest() |
|
67 | 67 | |
|
68 | 68 | try: |
|
69 | 69 | channels = check_channel_permissions( |
|
70 | 70 | json_body.get('channels'), |
|
71 | 71 | get_connection_validators(self.request.registry)) |
|
72 | 72 | except ChannelstreamPermissionException: |
|
73 | 73 | log.error('Incorrect permissions for requested channels') |
|
74 | 74 | raise HTTPForbidden() |
|
75 | 75 | |
|
76 | 76 | user = self._rhodecode_user |
|
77 | 77 | if user.user_id: |
|
78 | 78 | user_data = get_user_data(user.user_id) |
|
79 | 79 | else: |
|
80 | 80 | user_data = { |
|
81 | 81 | 'id': None, |
|
82 | 82 | 'username': None, |
|
83 | 83 | 'first_name': None, |
|
84 | 84 | 'last_name': None, |
|
85 | 85 | 'icon_link': None, |
|
86 | 86 | 'display_name': None, |
|
87 | 87 | 'display_link': None, |
|
88 | 88 | } |
|
89 | user_data['permissions'] = self._rhodecode_user.permissions_safe | |
|
89 | ||
|
90 | #user_data['permissions'] = self._rhodecode_user.permissions_safe | |
|
91 | ||
|
90 | 92 | payload = { |
|
91 | 93 | 'username': user.username, |
|
92 | 94 | 'user_state': user_data, |
|
93 | 95 | 'conn_id': str(uuid.uuid4()), |
|
94 | 96 | 'channels': channels, |
|
95 | 97 | 'channel_configs': {}, |
|
96 | 'state_public_keys': STATE_PUBLIC_KEYS, | |
|
98 | 'state_public_keys': USER_STATE_PUBLIC_KEYS, | |
|
97 | 99 | 'info': { |
|
98 | 100 | 'exclude_channels': ['broadcast'] |
|
99 | 101 | } |
|
100 | 102 | } |
|
101 | 103 | filtered_channels = [channel for channel in channels |
|
102 | 104 | if channel != 'broadcast'] |
|
103 | 105 | for channel in filtered_channels: |
|
104 | 106 | payload['channel_configs'][channel] = { |
|
105 | 107 | 'notify_presence': True, |
|
106 | 108 | 'history_size': 100, |
|
107 | 109 | 'store_history': True, |
|
108 | 110 | 'broadcast_presence_with_user_lists': True |
|
109 | 111 | } |
|
110 | 112 | # connect user to server |
|
111 | 113 | channelstream_url = get_channelstream_server_url( |
|
112 | 114 | self.channelstream_config, '/connect') |
|
113 | 115 | try: |
|
114 | 116 | connect_result = channelstream_request( |
|
115 | 117 | self.channelstream_config, payload, '/connect') |
|
116 | 118 | except ChannelstreamConnectionException: |
|
117 | 119 | log.exception( |
|
118 | 120 | 'Channelstream service at {} is down'.format(channelstream_url)) |
|
119 | 121 | return HTTPBadGateway() |
|
120 | 122 | |
|
123 | channel_info = connect_result.get('channels_info') | |
|
124 | if not channel_info: | |
|
125 | raise HTTPBadRequest() | |
|
126 | ||
|
121 | 127 | connect_result['channels'] = channels |
|
122 | 128 | connect_result['channels_info'] = parse_channels_info( |
|
123 | connect_result['channels_info'], | |
|
124 | include_channel_info=filtered_channels) | |
|
129 | channel_info, include_channel_info=filtered_channels) | |
|
125 | 130 | update_history_from_logs(self.channelstream_config, |
|
126 | 131 | filtered_channels, connect_result) |
|
127 | 132 | return connect_result |
|
128 | 133 | |
|
129 | 134 | @NotAnonymous() |
|
130 | 135 | @view_config(route_name='channelstream_subscribe', renderer='json_ext') |
|
131 | 136 | def subscribe(self): |
|
132 | 137 | """ can be used to subscribe specific connection to other channels """ |
|
133 | 138 | self.load_default_context() |
|
134 | 139 | try: |
|
135 | 140 | json_body = self.request.json_body |
|
136 | 141 | except Exception: |
|
137 | 142 | log.exception('Failed to decode json from request') |
|
138 | 143 | raise HTTPBadRequest() |
|
139 | 144 | try: |
|
140 | 145 | channels = check_channel_permissions( |
|
141 | 146 | json_body.get('channels'), |
|
142 | 147 | get_connection_validators(self.request.registry)) |
|
143 | 148 | except ChannelstreamPermissionException: |
|
144 | 149 | log.error('Incorrect permissions for requested channels') |
|
145 | 150 | raise HTTPForbidden() |
|
146 | 151 | payload = {'conn_id': json_body.get('conn_id', ''), |
|
147 | 152 | 'channels': channels, |
|
148 | 153 | 'channel_configs': {}, |
|
149 | 154 | 'info': { |
|
150 | 155 | 'exclude_channels': ['broadcast']} |
|
151 | 156 | } |
|
152 | 157 | filtered_channels = [chan for chan in channels if chan != 'broadcast'] |
|
153 | 158 | for channel in filtered_channels: |
|
154 | 159 | payload['channel_configs'][channel] = { |
|
155 | 160 | 'notify_presence': True, |
|
156 | 161 | 'history_size': 100, |
|
157 | 162 | 'store_history': True, |
|
158 | 163 | 'broadcast_presence_with_user_lists': True |
|
159 | 164 | } |
|
160 | 165 | |
|
161 | 166 | channelstream_url = get_channelstream_server_url( |
|
162 | 167 | self.channelstream_config, '/subscribe') |
|
163 | 168 | try: |
|
164 | 169 | connect_result = channelstream_request( |
|
165 | 170 | self.channelstream_config, payload, '/subscribe') |
|
166 | 171 | except ChannelstreamConnectionException: |
|
167 | 172 | log.exception( |
|
168 | 173 | 'Channelstream service at {} is down'.format(channelstream_url)) |
|
169 | 174 | return HTTPBadGateway() |
|
175 | ||
|
176 | channel_info = connect_result.get('channels_info') | |
|
177 | if not channel_info: | |
|
178 | raise HTTPBadRequest() | |
|
179 | ||
|
170 | 180 | # include_channel_info will limit history only to new channel |
|
171 | 181 | # to not overwrite histories on other channels in client |
|
172 | 182 | connect_result['channels_info'] = parse_channels_info( |
|
173 |
|
|
|
183 | channel_info, | |
|
174 | 184 | include_channel_info=filtered_channels) |
|
175 | 185 | update_history_from_logs( |
|
176 | 186 | self.channelstream_config, filtered_channels, connect_result) |
|
177 | 187 | return connect_result |
@@ -1,52 +1,52 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import os |
|
21 | 21 | from rhodecode.apps.file_store import config_keys |
|
22 | 22 | from rhodecode.config.middleware import _bool_setting, _string_setting |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def _sanitize_settings_and_apply_defaults(settings): |
|
26 | 26 | """ |
|
27 | 27 | Set defaults, convert to python types and validate settings. |
|
28 | 28 | """ |
|
29 | 29 | _bool_setting(settings, config_keys.enabled, 'true') |
|
30 | 30 | |
|
31 | 31 | _string_setting(settings, config_keys.backend, 'local') |
|
32 | 32 | |
|
33 | 33 | default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store') |
|
34 | 34 | _string_setting(settings, config_keys.store_path, default_store) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def includeme(config): |
|
38 | 38 | settings = config.registry.settings |
|
39 | 39 | _sanitize_settings_and_apply_defaults(settings) |
|
40 | 40 | |
|
41 | 41 | config.add_route( |
|
42 | 42 | name='upload_file', |
|
43 | 43 | pattern='/_file_store/upload') |
|
44 | 44 | config.add_route( |
|
45 | 45 | name='download_file', |
|
46 | pattern='/_file_store/download/{fid}') | |
|
46 | pattern='/_file_store/download/{fid:.*}') | |
|
47 | 47 | config.add_route( |
|
48 | 48 | name='download_file_by_token', |
|
49 | pattern='/_file_store/token-download/{_auth_token}/{fid}') | |
|
49 | pattern='/_file_store/token-download/{_auth_token}/{fid:.*}') | |
|
50 | 50 | |
|
51 | 51 | # Scan module for configuration decorators. |
|
52 | 52 | config.scan('.views', ignore='.tests') |
@@ -1,240 +1,261 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import time |
|
23 | import errno | |
|
23 | 24 | import shutil |
|
24 | 25 | import hashlib |
|
25 | 26 | |
|
26 | 27 | from rhodecode.lib.ext_json import json |
|
27 | 28 | from rhodecode.apps.file_store import utils |
|
28 | 29 | from rhodecode.apps.file_store.extensions import resolve_extensions |
|
29 | 30 | from rhodecode.apps.file_store.exceptions import ( |
|
30 | 31 | FileNotAllowedException, FileOverSizeException) |
|
31 | 32 | |
|
32 | 33 | METADATA_VER = 'v1' |
|
33 | 34 | |
|
34 | 35 | |
|
36 | def safe_make_dirs(dir_path): | |
|
37 | if not os.path.exists(dir_path): | |
|
38 | try: | |
|
39 | os.makedirs(dir_path) | |
|
40 | except OSError as e: | |
|
41 | if e.errno != errno.EEXIST: | |
|
42 | raise | |
|
43 | return | |
|
44 | ||
|
45 | ||
|
35 | 46 | class LocalFileStorage(object): |
|
36 | 47 | |
|
37 | 48 | @classmethod |
|
49 | def apply_counter(cls, counter, filename): | |
|
50 | name_counted = '%d-%s' % (counter, filename) | |
|
51 | return name_counted | |
|
52 | ||
|
53 | @classmethod | |
|
38 | 54 | def resolve_name(cls, name, directory): |
|
39 | 55 | """ |
|
40 | 56 | Resolves a unique name and the correct path. If a filename |
|
41 | 57 | for that path already exists then a numeric prefix with values > 0 will be |
|
42 | 58 | added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix. |
|
43 | 59 | |
|
44 | 60 | :param name: base name of file |
|
45 | 61 | :param directory: absolute directory path |
|
46 | 62 | """ |
|
47 | 63 | |
|
48 | 64 | counter = 0 |
|
49 | 65 | while True: |
|
50 |
name = |
|
|
66 | name_counted = cls.apply_counter(counter, name) | |
|
51 | 67 | |
|
52 | 68 | # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file |
|
53 | sub_store = cls._sub_store_from_filename(name) | |
|
69 | sub_store = cls._sub_store_from_filename(name_counted) | |
|
54 | 70 | sub_store_path = os.path.join(directory, sub_store) |
|
55 |
|
|
|
56 | os.makedirs(sub_store_path) | |
|
71 | safe_make_dirs(sub_store_path) | |
|
57 | 72 | |
|
58 | path = os.path.join(sub_store_path, name) | |
|
73 | path = os.path.join(sub_store_path, name_counted) | |
|
59 | 74 | if not os.path.exists(path): |
|
60 | return name, path | |
|
75 | return name_counted, path | |
|
61 | 76 | counter += 1 |
|
62 | 77 | |
|
63 | 78 | @classmethod |
|
64 | 79 | def _sub_store_from_filename(cls, filename): |
|
65 | 80 | return filename[:2] |
|
66 | 81 | |
|
67 | 82 | @classmethod |
|
68 | 83 | def calculate_path_hash(cls, file_path): |
|
69 | 84 | """ |
|
70 | 85 | Efficient calculation of file_path sha256 sum |
|
71 | 86 | |
|
72 | 87 | :param file_path: |
|
73 | 88 | :return: sha256sum |
|
74 | 89 | """ |
|
75 | 90 | digest = hashlib.sha256() |
|
76 | 91 | with open(file_path, 'rb') as f: |
|
77 | 92 | for chunk in iter(lambda: f.read(1024 * 100), b""): |
|
78 | 93 | digest.update(chunk) |
|
79 | 94 | |
|
80 | 95 | return digest.hexdigest() |
|
81 | 96 | |
|
82 | 97 | def __init__(self, base_path, extension_groups=None): |
|
83 | 98 | |
|
84 | 99 | """ |
|
85 | 100 | Local file storage |
|
86 | 101 | |
|
87 | 102 | :param base_path: the absolute base path where uploads are stored |
|
88 | 103 | :param extension_groups: extensions string |
|
89 | 104 | """ |
|
90 | 105 | |
|
91 | 106 | extension_groups = extension_groups or ['any'] |
|
92 | 107 | self.base_path = base_path |
|
93 | 108 | self.extensions = resolve_extensions([], groups=extension_groups) |
|
94 | 109 | |
|
95 | 110 | def __repr__(self): |
|
96 | 111 | return '{}@{}'.format(self.__class__, self.base_path) |
|
97 | 112 | |
|
98 | 113 | def store_path(self, filename): |
|
99 | 114 | """ |
|
100 | 115 | Returns absolute file path of the filename, joined to the |
|
101 | 116 | base_path. |
|
102 | 117 | |
|
103 | 118 | :param filename: base name of file |
|
104 | 119 | """ |
|
105 | sub_store = self._sub_store_from_filename(filename) | |
|
106 | return os.path.join(self.base_path, sub_store, filename) | |
|
120 | prefix_dir = '' | |
|
121 | if '/' in filename: | |
|
122 | prefix_dir, filename = filename.split('/') | |
|
123 | sub_store = self._sub_store_from_filename(filename) | |
|
124 | else: | |
|
125 | sub_store = self._sub_store_from_filename(filename) | |
|
126 | return os.path.join(self.base_path, prefix_dir, sub_store, filename) | |
|
107 | 127 | |
|
108 | 128 | def delete(self, filename): |
|
109 | 129 | """ |
|
110 | 130 | Deletes the filename. Filename is resolved with the |
|
111 | 131 | absolute path based on base_path. If file does not exist, |
|
112 | 132 | returns **False**, otherwise **True** |
|
113 | 133 | |
|
114 | 134 | :param filename: base name of file |
|
115 | 135 | """ |
|
116 | 136 | if self.exists(filename): |
|
117 | 137 | os.remove(self.store_path(filename)) |
|
118 | 138 | return True |
|
119 | 139 | return False |
|
120 | 140 | |
|
121 | 141 | def exists(self, filename): |
|
122 | 142 | """ |
|
123 | 143 | Checks if file exists. Resolves filename's absolute |
|
124 | 144 | path based on base_path. |
|
125 | 145 | |
|
126 | :param filename: base name of file | |
|
146 | :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg | |
|
127 | 147 | """ |
|
128 | 148 | return os.path.exists(self.store_path(filename)) |
|
129 | 149 | |
|
130 | 150 | def filename_allowed(self, filename, extensions=None): |
|
131 | 151 | """Checks if a filename has an allowed extension |
|
132 | 152 | |
|
133 | 153 | :param filename: base name of file |
|
134 | 154 | :param extensions: iterable of extensions (or self.extensions) |
|
135 | 155 | """ |
|
136 | 156 | _, ext = os.path.splitext(filename) |
|
137 | 157 | return self.extension_allowed(ext, extensions) |
|
138 | 158 | |
|
139 | 159 | def extension_allowed(self, ext, extensions=None): |
|
140 | 160 | """ |
|
141 | 161 | Checks if an extension is permitted. Both e.g. ".jpg" and |
|
142 | 162 | "jpg" can be passed in. Extension lookup is case-insensitive. |
|
143 | 163 | |
|
144 | 164 | :param ext: extension to check |
|
145 | 165 | :param extensions: iterable of extensions to validate against (or self.extensions) |
|
146 | 166 | """ |
|
147 | 167 | def normalize_ext(_ext): |
|
148 | 168 | if _ext.startswith('.'): |
|
149 | 169 | _ext = _ext[1:] |
|
150 | 170 | return _ext.lower() |
|
151 | 171 | |
|
152 | 172 | extensions = extensions or self.extensions |
|
153 | 173 | if not extensions: |
|
154 | 174 | return True |
|
155 | 175 | |
|
156 | 176 | ext = normalize_ext(ext) |
|
157 | 177 | |
|
158 | 178 | return ext in [normalize_ext(x) for x in extensions] |
|
159 | 179 | |
|
160 | 180 | def save_file(self, file_obj, filename, directory=None, extensions=None, |
|
161 | extra_metadata=None, max_filesize=None, **kwargs): | |
|
181 | extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs): | |
|
162 | 182 | """ |
|
163 | 183 | Saves a file object to the uploads location. |
|
164 | 184 | Returns the resolved filename, i.e. the directory + |
|
165 | 185 | the (randomized/incremented) base name. |
|
166 | 186 | |
|
167 | 187 | :param file_obj: **cgi.FieldStorage** object (or similar) |
|
168 | 188 | :param filename: original filename |
|
169 | 189 | :param directory: relative path of sub-directory |
|
170 | 190 | :param extensions: iterable of allowed extensions, if not default |
|
171 | 191 | :param max_filesize: maximum size of file that should be allowed |
|
192 | :param randomized_name: generate random generated UID or fixed based on the filename | |
|
172 | 193 | :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix |
|
173 | 194 | |
|
174 | 195 | """ |
|
175 | 196 | |
|
176 | 197 | extensions = extensions or self.extensions |
|
177 | 198 | |
|
178 | 199 | if not self.filename_allowed(filename, extensions): |
|
179 | 200 | raise FileNotAllowedException() |
|
180 | 201 | |
|
181 | 202 | if directory: |
|
182 | 203 | dest_directory = os.path.join(self.base_path, directory) |
|
183 | 204 | else: |
|
184 | 205 | dest_directory = self.base_path |
|
185 | 206 | |
|
186 |
|
|
|
187 | os.makedirs(dest_directory) | |
|
207 | safe_make_dirs(dest_directory) | |
|
188 | 208 | |
|
189 | filename = utils.uid_filename(filename) | |
|
209 | uid_filename = utils.uid_filename(filename, randomized=randomized_name) | |
|
190 | 210 | |
|
191 | 211 | # resolve also produces special sub-dir for file optimized store |
|
192 | filename, path = self.resolve_name(filename, dest_directory) | |
|
212 | filename, path = self.resolve_name(uid_filename, dest_directory) | |
|
193 | 213 | stored_file_dir = os.path.dirname(path) |
|
194 | 214 | |
|
195 | 215 | file_obj.seek(0) |
|
196 | 216 | |
|
197 | 217 | with open(path, "wb") as dest: |
|
198 | 218 | shutil.copyfileobj(file_obj, dest) |
|
199 | 219 | |
|
200 | 220 | metadata = {} |
|
201 | 221 | if extra_metadata: |
|
202 | 222 | metadata = extra_metadata |
|
203 | 223 | |
|
204 | 224 | size = os.stat(path).st_size |
|
205 | 225 | |
|
206 | 226 | if max_filesize and size > max_filesize: |
|
207 | 227 | # free up the copied file, and raise exc |
|
208 | 228 | os.remove(path) |
|
209 | 229 | raise FileOverSizeException() |
|
210 | 230 | |
|
211 | 231 | file_hash = self.calculate_path_hash(path) |
|
212 | 232 | |
|
213 | metadata.update( | |
|
214 |
|
|
|
233 | metadata.update({ | |
|
234 | "filename": filename, | |
|
215 | 235 | "size": size, |
|
216 | 236 | "time": time.time(), |
|
217 | 237 | "sha256": file_hash, |
|
218 |
"meta_ver": METADATA_VER |
|
|
238 | "meta_ver": METADATA_VER | |
|
239 | }) | |
|
219 | 240 | |
|
220 | 241 | filename_meta = filename + '.meta' |
|
221 | 242 | with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta: |
|
222 | 243 | dest_meta.write(json.dumps(metadata)) |
|
223 | 244 | |
|
224 | 245 | if directory: |
|
225 | 246 | filename = os.path.join(directory, filename) |
|
226 | 247 | |
|
227 | 248 | return filename, metadata |
|
228 | 249 | |
|
229 | 250 | def get_metadata(self, filename): |
|
230 | 251 | """ |
|
231 | 252 | Reads JSON stored metadata for a file |
|
232 | 253 | |
|
233 | 254 | :param filename: |
|
234 | 255 | :return: |
|
235 | 256 | """ |
|
236 | 257 | filename = self.store_path(filename) |
|
237 | 258 | filename_meta = filename + '.meta' |
|
238 | 259 | |
|
239 | 260 | with open(filename_meta, "rb") as source_meta: |
|
240 | 261 | return json.loads(source_meta.read()) |
@@ -1,54 +1,58 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import uuid |
|
23 | ||
|
23 | import StringIO | |
|
24 | 24 | import pathlib2 |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def get_file_storage(settings): |
|
28 | 28 | from rhodecode.apps.file_store.backends.local_store import LocalFileStorage |
|
29 | 29 | from rhodecode.apps.file_store import config_keys |
|
30 | 30 | store_path = settings.get(config_keys.store_path) |
|
31 | 31 | return LocalFileStorage(base_path=store_path) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def splitext(filename): |
|
35 | 35 | ext = ''.join(pathlib2.Path(filename).suffixes) |
|
36 | 36 | return filename, ext |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | def uid_filename(filename, randomized=True): |
|
40 | 40 | """ |
|
41 | 41 | Generates a randomized or stable (uuid) filename, |
|
42 | 42 | preserving the original extension. |
|
43 | 43 | |
|
44 | 44 | :param filename: the original filename |
|
45 | 45 | :param randomized: define if filename should be stable (sha1 based) or randomized |
|
46 | 46 | """ |
|
47 | 47 | |
|
48 | 48 | _, ext = splitext(filename) |
|
49 | 49 | if randomized: |
|
50 | 50 | uid = uuid.uuid4() |
|
51 | 51 | else: |
|
52 | 52 | hash_key = '{}.{}'.format(filename, 'store') |
|
53 | 53 | uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) |
|
54 | 54 | return str(uid) + ext.lower() |
|
55 | ||
|
56 | ||
|
57 | def bytes_to_file_obj(bytes_data): | |
|
58 | return StringIO.StringIO(bytes_data) |
@@ -1,195 +1,195 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import logging |
|
21 | 21 | |
|
22 | 22 | from pyramid.view import view_config |
|
23 | 23 | from pyramid.response import FileResponse |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import BaseAppView |
|
27 | 27 | from rhodecode.apps.file_store import utils |
|
28 | 28 | from rhodecode.apps.file_store.exceptions import ( |
|
29 | 29 | FileNotAllowedException, FileOverSizeException) |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib import audit_logger |
|
33 | 33 | from rhodecode.lib.auth import ( |
|
34 | 34 | CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
35 | 35 | LoginRequired) |
|
36 | 36 | from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db |
|
37 | 37 | from rhodecode.model.db import Session, FileStore, UserApiKeys |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class FileStoreView(BaseAppView): |
|
43 | 43 | upload_key = 'store_file' |
|
44 | 44 | |
|
45 | 45 | def load_default_context(self): |
|
46 | 46 | c = self._get_local_tmpl_context() |
|
47 | 47 | self.storage = utils.get_file_storage(self.request.registry.settings) |
|
48 | 48 | return c |
|
49 | 49 | |
|
50 | 50 | def _guess_type(self, file_name): |
|
51 | 51 | """ |
|
52 | 52 | Our own type guesser for mimetypes using the rich DB |
|
53 | 53 | """ |
|
54 | 54 | if not hasattr(self, 'db'): |
|
55 | 55 | self.db = get_mimetypes_db() |
|
56 | 56 | _content_type, _encoding = self.db.guess_type(file_name, strict=False) |
|
57 | 57 | return _content_type, _encoding |
|
58 | 58 | |
|
59 | 59 | def _serve_file(self, file_uid): |
|
60 | 60 | |
|
61 | 61 | if not self.storage.exists(file_uid): |
|
62 | 62 | store_path = self.storage.store_path(file_uid) |
|
63 | 63 | log.debug('File with FID:%s not found in the store under `%s`', |
|
64 | 64 | file_uid, store_path) |
|
65 | 65 | raise HTTPNotFound() |
|
66 | 66 | |
|
67 |
db_obj = FileStore( |
|
|
67 | db_obj = FileStore.get_by_store_uid(file_uid, safe=True) | |
|
68 | 68 | if not db_obj: |
|
69 | 69 | raise HTTPNotFound() |
|
70 | 70 | |
|
71 | 71 | # private upload for user |
|
72 | 72 | if db_obj.check_acl and db_obj.scope_user_id: |
|
73 | 73 | log.debug('Artifact: checking scope access for bound artifact user: `%s`', |
|
74 | 74 | db_obj.scope_user_id) |
|
75 | 75 | user = db_obj.user |
|
76 | 76 | if self._rhodecode_db_user.user_id != user.user_id: |
|
77 | 77 | log.warning('Access to file store object forbidden') |
|
78 | 78 | raise HTTPNotFound() |
|
79 | 79 | |
|
80 | 80 | # scoped to repository permissions |
|
81 | 81 | if db_obj.check_acl and db_obj.scope_repo_id: |
|
82 | 82 | log.debug('Artifact: checking scope access for bound artifact repo: `%s`', |
|
83 | 83 | db_obj.scope_repo_id) |
|
84 | 84 | repo = db_obj.repo |
|
85 | 85 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
86 | 86 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check') |
|
87 | 87 | if not has_perm: |
|
88 | 88 | log.warning('Access to file store object `%s` forbidden', file_uid) |
|
89 | 89 | raise HTTPNotFound() |
|
90 | 90 | |
|
91 | 91 | # scoped to repository group permissions |
|
92 | 92 | if db_obj.check_acl and db_obj.scope_repo_group_id: |
|
93 | 93 | log.debug('Artifact: checking scope access for bound artifact repo group: `%s`', |
|
94 | 94 | db_obj.scope_repo_group_id) |
|
95 | 95 | repo_group = db_obj.repo_group |
|
96 | 96 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
97 | 97 | has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check') |
|
98 | 98 | if not has_perm: |
|
99 | 99 | log.warning('Access to file store object `%s` forbidden', file_uid) |
|
100 | 100 | raise HTTPNotFound() |
|
101 | 101 | |
|
102 | 102 | FileStore.bump_access_counter(file_uid) |
|
103 | 103 | |
|
104 | 104 | file_path = self.storage.store_path(file_uid) |
|
105 | 105 | content_type = 'application/octet-stream' |
|
106 | 106 | content_encoding = None |
|
107 | 107 | |
|
108 | 108 | _content_type, _encoding = self._guess_type(file_path) |
|
109 | 109 | if _content_type: |
|
110 | 110 | content_type = _content_type |
|
111 | 111 | |
|
112 | 112 | # For file store we don't submit any session data, this logic tells the |
|
113 | 113 | # Session lib to skip it |
|
114 | 114 | setattr(self.request, '_file_response', True) |
|
115 | 115 | return FileResponse(file_path, request=self.request, |
|
116 | 116 | content_type=content_type, content_encoding=content_encoding) |
|
117 | 117 | |
|
118 | 118 | @LoginRequired() |
|
119 | 119 | @NotAnonymous() |
|
120 | 120 | @CSRFRequired() |
|
121 | 121 | @view_config(route_name='upload_file', request_method='POST', renderer='json_ext') |
|
122 | 122 | def upload_file(self): |
|
123 | 123 | self.load_default_context() |
|
124 | 124 | file_obj = self.request.POST.get(self.upload_key) |
|
125 | 125 | |
|
126 | 126 | if file_obj is None: |
|
127 | 127 | return {'store_fid': None, |
|
128 | 128 | 'access_path': None, |
|
129 | 129 | 'error': '{} data field is missing'.format(self.upload_key)} |
|
130 | 130 | |
|
131 | 131 | if not hasattr(file_obj, 'filename'): |
|
132 | 132 | return {'store_fid': None, |
|
133 | 133 | 'access_path': None, |
|
134 | 134 | 'error': 'filename cannot be read from the data field'} |
|
135 | 135 | |
|
136 | 136 | filename = file_obj.filename |
|
137 | 137 | |
|
138 | 138 | metadata = { |
|
139 | 139 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
140 | 140 | 'user_id': self._rhodecode_user.user_id, |
|
141 | 141 | 'ip': self._rhodecode_user.ip_addr}} |
|
142 | 142 | try: |
|
143 | 143 | store_uid, metadata = self.storage.save_file( |
|
144 | 144 | file_obj.file, filename, extra_metadata=metadata) |
|
145 | 145 | except FileNotAllowedException: |
|
146 | 146 | return {'store_fid': None, |
|
147 | 147 | 'access_path': None, |
|
148 | 148 | 'error': 'File {} is not allowed.'.format(filename)} |
|
149 | 149 | |
|
150 | 150 | except FileOverSizeException: |
|
151 | 151 | return {'store_fid': None, |
|
152 | 152 | 'access_path': None, |
|
153 | 153 | 'error': 'File {} is exceeding allowed limit.'.format(filename)} |
|
154 | 154 | |
|
155 | 155 | try: |
|
156 | 156 | entry = FileStore.create( |
|
157 | 157 | file_uid=store_uid, filename=metadata["filename"], |
|
158 | 158 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
159 | 159 | file_description=u'upload attachment', |
|
160 | 160 | check_acl=False, user_id=self._rhodecode_user.user_id |
|
161 | 161 | ) |
|
162 | 162 | Session().add(entry) |
|
163 | 163 | Session().commit() |
|
164 | 164 | log.debug('Stored upload in DB as %s', entry) |
|
165 | 165 | except Exception: |
|
166 | 166 | log.exception('Failed to store file %s', filename) |
|
167 | 167 | return {'store_fid': None, |
|
168 | 168 | 'access_path': None, |
|
169 | 169 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
170 | 170 | |
|
171 | 171 | return {'store_fid': store_uid, |
|
172 | 172 | 'access_path': h.route_path('download_file', fid=store_uid)} |
|
173 | 173 | |
|
174 | 174 | # ACL is checked by scopes, if no scope the file is accessible to all |
|
175 | 175 | @view_config(route_name='download_file') |
|
176 | 176 | def download_file(self): |
|
177 | 177 | self.load_default_context() |
|
178 | 178 | file_uid = self.request.matchdict['fid'] |
|
179 | 179 | log.debug('Requesting FID:%s from store %s', file_uid, self.storage) |
|
180 | 180 | return self._serve_file(file_uid) |
|
181 | 181 | |
|
182 | 182 | # in addition to @LoginRequired ACL is checked by scopes |
|
183 | 183 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD]) |
|
184 | 184 | @NotAnonymous() |
|
185 | 185 | @view_config(route_name='download_file_by_token') |
|
186 | 186 | def download_file_by_token(self): |
|
187 | 187 | """ |
|
188 | 188 | Special view that allows to access the download file by special URL that |
|
189 | 189 | is stored inside the URL. |
|
190 | 190 | |
|
191 | 191 | http://example.com/_file_store/token-download/TOKEN/FILE_UID |
|
192 | 192 | """ |
|
193 | 193 | self.load_default_context() |
|
194 | 194 | file_uid = self.request.matchdict['fid'] |
|
195 | 195 | return self._serve_file(file_uid) |
@@ -1,533 +1,543 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.apps._base import add_route_with_slash |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | |
|
25 | 25 | # repo creating checks, special cases that aren't repo routes |
|
26 | 26 | config.add_route( |
|
27 | 27 | name='repo_creating', |
|
28 | 28 | pattern='/{repo_name:.*?[^/]}/repo_creating') |
|
29 | 29 | |
|
30 | 30 | config.add_route( |
|
31 | 31 | name='repo_creating_check', |
|
32 | 32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') |
|
33 | 33 | |
|
34 | 34 | # Summary |
|
35 | 35 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
36 | 36 | # all pattern |
|
37 | 37 | config.add_route( |
|
38 | 38 | name='repo_summary_explicit', |
|
39 | 39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
40 | 40 | config.add_route( |
|
41 | 41 | name='repo_summary_commits', |
|
42 | 42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
43 | 43 | |
|
44 | 44 | # Commits |
|
45 | 45 | config.add_route( |
|
46 | 46 | name='repo_commit', |
|
47 | 47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
48 | 48 | |
|
49 | 49 | config.add_route( |
|
50 | 50 | name='repo_commit_children', |
|
51 | 51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) |
|
52 | 52 | |
|
53 | 53 | config.add_route( |
|
54 | 54 | name='repo_commit_parents', |
|
55 | 55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) |
|
56 | 56 | |
|
57 | 57 | config.add_route( |
|
58 | 58 | name='repo_commit_raw', |
|
59 | 59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) |
|
60 | 60 | |
|
61 | 61 | config.add_route( |
|
62 | 62 | name='repo_commit_patch', |
|
63 | 63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) |
|
64 | 64 | |
|
65 | 65 | config.add_route( |
|
66 | 66 | name='repo_commit_download', |
|
67 | 67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) |
|
68 | 68 | |
|
69 | 69 | config.add_route( |
|
70 | 70 | name='repo_commit_data', |
|
71 | 71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) |
|
72 | 72 | |
|
73 | 73 | config.add_route( |
|
74 | 74 | name='repo_commit_comment_create', |
|
75 | 75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) |
|
76 | 76 | |
|
77 | 77 | config.add_route( |
|
78 | 78 | name='repo_commit_comment_preview', |
|
79 | 79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) |
|
80 | 80 | |
|
81 | 81 | config.add_route( |
|
82 | 82 | name='repo_commit_comment_history_view', |
|
83 | 83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True) |
|
84 | 84 | |
|
85 | 85 | config.add_route( |
|
86 | 86 | name='repo_commit_comment_attachment_upload', |
|
87 | 87 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True) |
|
88 | 88 | |
|
89 | 89 | config.add_route( |
|
90 | 90 | name='repo_commit_comment_delete', |
|
91 | 91 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) |
|
92 | 92 | |
|
93 | 93 | config.add_route( |
|
94 | 94 | name='repo_commit_comment_edit', |
|
95 | 95 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True) |
|
96 | 96 | |
|
97 | 97 | # still working url for backward compat. |
|
98 | 98 | config.add_route( |
|
99 | 99 | name='repo_commit_raw_deprecated', |
|
100 | 100 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) |
|
101 | 101 | |
|
102 | 102 | # Files |
|
103 | 103 | config.add_route( |
|
104 | 104 | name='repo_archivefile', |
|
105 | 105 | pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True) |
|
106 | 106 | |
|
107 | 107 | config.add_route( |
|
108 | 108 | name='repo_files_diff', |
|
109 | 109 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
110 | 110 | config.add_route( # legacy route to make old links work |
|
111 | 111 | name='repo_files_diff_2way_redirect', |
|
112 | 112 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
113 | 113 | |
|
114 | 114 | config.add_route( |
|
115 | 115 | name='repo_files', |
|
116 | 116 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
117 | 117 | config.add_route( |
|
118 | 118 | name='repo_files:default_path', |
|
119 | 119 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
120 | 120 | config.add_route( |
|
121 | 121 | name='repo_files:default_commit', |
|
122 | 122 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
123 | 123 | |
|
124 | 124 | config.add_route( |
|
125 | 125 | name='repo_files:rendered', |
|
126 | 126 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
127 | 127 | |
|
128 | 128 | config.add_route( |
|
129 | 129 | name='repo_files:annotated', |
|
130 | 130 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
131 | 131 | config.add_route( |
|
132 | 132 | name='repo_files:annotated_previous', |
|
133 | 133 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
134 | 134 | |
|
135 | 135 | config.add_route( |
|
136 | 136 | name='repo_nodetree_full', |
|
137 | 137 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
138 | 138 | config.add_route( |
|
139 | 139 | name='repo_nodetree_full:default_path', |
|
140 | 140 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
141 | 141 | |
|
142 | 142 | config.add_route( |
|
143 | 143 | name='repo_files_nodelist', |
|
144 | 144 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
145 | 145 | |
|
146 | 146 | config.add_route( |
|
147 | 147 | name='repo_file_raw', |
|
148 | 148 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
149 | 149 | |
|
150 | 150 | config.add_route( |
|
151 | 151 | name='repo_file_download', |
|
152 | 152 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
153 | 153 | config.add_route( # backward compat to keep old links working |
|
154 | 154 | name='repo_file_download:legacy', |
|
155 | 155 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
156 | 156 | repo_route=True) |
|
157 | 157 | |
|
158 | 158 | config.add_route( |
|
159 | 159 | name='repo_file_history', |
|
160 | 160 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
161 | 161 | |
|
162 | 162 | config.add_route( |
|
163 | 163 | name='repo_file_authors', |
|
164 | 164 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
165 | 165 | |
|
166 | 166 | config.add_route( |
|
167 | 167 | name='repo_files_check_head', |
|
168 | 168 | pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}', |
|
169 | 169 | repo_route=True) |
|
170 | 170 | config.add_route( |
|
171 | 171 | name='repo_files_remove_file', |
|
172 | 172 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
173 | 173 | repo_route=True) |
|
174 | 174 | config.add_route( |
|
175 | 175 | name='repo_files_delete_file', |
|
176 | 176 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
177 | 177 | repo_route=True) |
|
178 | 178 | config.add_route( |
|
179 | 179 | name='repo_files_edit_file', |
|
180 | 180 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
181 | 181 | repo_route=True) |
|
182 | 182 | config.add_route( |
|
183 | 183 | name='repo_files_update_file', |
|
184 | 184 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
185 | 185 | repo_route=True) |
|
186 | 186 | config.add_route( |
|
187 | 187 | name='repo_files_add_file', |
|
188 | 188 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
189 | 189 | repo_route=True) |
|
190 | 190 | config.add_route( |
|
191 | 191 | name='repo_files_upload_file', |
|
192 | 192 | pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}', |
|
193 | 193 | repo_route=True) |
|
194 | 194 | config.add_route( |
|
195 | 195 | name='repo_files_create_file', |
|
196 | 196 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
197 | 197 | repo_route=True) |
|
198 | 198 | |
|
199 | 199 | # Refs data |
|
200 | 200 | config.add_route( |
|
201 | 201 | name='repo_refs_data', |
|
202 | 202 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
203 | 203 | |
|
204 | 204 | config.add_route( |
|
205 | 205 | name='repo_refs_changelog_data', |
|
206 | 206 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
207 | 207 | |
|
208 | 208 | config.add_route( |
|
209 | 209 | name='repo_stats', |
|
210 | 210 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
211 | 211 | |
|
212 | 212 | # Commits |
|
213 | 213 | config.add_route( |
|
214 | 214 | name='repo_commits', |
|
215 | 215 | pattern='/{repo_name:.*?[^/]}/commits', repo_route=True) |
|
216 | 216 | config.add_route( |
|
217 | 217 | name='repo_commits_file', |
|
218 | 218 | pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True) |
|
219 | 219 | config.add_route( |
|
220 | 220 | name='repo_commits_elements', |
|
221 | 221 | pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True) |
|
222 | 222 | config.add_route( |
|
223 | 223 | name='repo_commits_elements_file', |
|
224 | 224 | pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True) |
|
225 | 225 | |
|
226 | 226 | # Changelog (old deprecated name for commits page) |
|
227 | 227 | config.add_route( |
|
228 | 228 | name='repo_changelog', |
|
229 | 229 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
230 | 230 | config.add_route( |
|
231 | 231 | name='repo_changelog_file', |
|
232 | 232 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
233 | 233 | |
|
234 | 234 | # Compare |
|
235 | 235 | config.add_route( |
|
236 | 236 | name='repo_compare_select', |
|
237 | 237 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) |
|
238 | 238 | |
|
239 | 239 | config.add_route( |
|
240 | 240 | name='repo_compare', |
|
241 | 241 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) |
|
242 | 242 | |
|
243 | 243 | # Tags |
|
244 | 244 | config.add_route( |
|
245 | 245 | name='tags_home', |
|
246 | 246 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
247 | 247 | |
|
248 | 248 | # Branches |
|
249 | 249 | config.add_route( |
|
250 | 250 | name='branches_home', |
|
251 | 251 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
252 | 252 | |
|
253 | 253 | # Bookmarks |
|
254 | 254 | config.add_route( |
|
255 | 255 | name='bookmarks_home', |
|
256 | 256 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
257 | 257 | |
|
258 | 258 | # Forks |
|
259 | 259 | config.add_route( |
|
260 | 260 | name='repo_fork_new', |
|
261 | 261 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, |
|
262 | 262 | repo_forbid_when_archived=True, |
|
263 | 263 | repo_accepted_types=['hg', 'git']) |
|
264 | 264 | |
|
265 | 265 | config.add_route( |
|
266 | 266 | name='repo_fork_create', |
|
267 | 267 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, |
|
268 | 268 | repo_forbid_when_archived=True, |
|
269 | 269 | repo_accepted_types=['hg', 'git']) |
|
270 | 270 | |
|
271 | 271 | config.add_route( |
|
272 | 272 | name='repo_forks_show_all', |
|
273 | 273 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, |
|
274 | 274 | repo_accepted_types=['hg', 'git']) |
|
275 | 275 | config.add_route( |
|
276 | 276 | name='repo_forks_data', |
|
277 | 277 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, |
|
278 | 278 | repo_accepted_types=['hg', 'git']) |
|
279 | 279 | |
|
280 | 280 | # Pull Requests |
|
281 | 281 | config.add_route( |
|
282 | 282 | name='pullrequest_show', |
|
283 | 283 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', |
|
284 | 284 | repo_route=True) |
|
285 | 285 | |
|
286 | 286 | config.add_route( |
|
287 | 287 | name='pullrequest_show_all', |
|
288 | 288 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
289 | 289 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
290 | 290 | |
|
291 | 291 | config.add_route( |
|
292 | 292 | name='pullrequest_show_all_data', |
|
293 | 293 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
294 | 294 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
295 | 295 | |
|
296 | 296 | config.add_route( |
|
297 | 297 | name='pullrequest_repo_refs', |
|
298 | 298 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
299 | 299 | repo_route=True) |
|
300 | 300 | |
|
301 | 301 | config.add_route( |
|
302 | 302 | name='pullrequest_repo_targets', |
|
303 | 303 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets', |
|
304 | 304 | repo_route=True) |
|
305 | 305 | |
|
306 | 306 | config.add_route( |
|
307 | 307 | name='pullrequest_new', |
|
308 | 308 | pattern='/{repo_name:.*?[^/]}/pull-request/new', |
|
309 | 309 | repo_route=True, repo_accepted_types=['hg', 'git'], |
|
310 | 310 | repo_forbid_when_archived=True) |
|
311 | 311 | |
|
312 | 312 | config.add_route( |
|
313 | 313 | name='pullrequest_create', |
|
314 | 314 | pattern='/{repo_name:.*?[^/]}/pull-request/create', |
|
315 | 315 | repo_route=True, repo_accepted_types=['hg', 'git'], |
|
316 | 316 | repo_forbid_when_archived=True) |
|
317 | 317 | |
|
318 | 318 | config.add_route( |
|
319 | 319 | name='pullrequest_update', |
|
320 | 320 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', |
|
321 | 321 | repo_route=True, repo_forbid_when_archived=True) |
|
322 | 322 | |
|
323 | 323 | config.add_route( |
|
324 | 324 | name='pullrequest_merge', |
|
325 | 325 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', |
|
326 | 326 | repo_route=True, repo_forbid_when_archived=True) |
|
327 | 327 | |
|
328 | 328 | config.add_route( |
|
329 | 329 | name='pullrequest_delete', |
|
330 | 330 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', |
|
331 | 331 | repo_route=True, repo_forbid_when_archived=True) |
|
332 | 332 | |
|
333 | 333 | config.add_route( |
|
334 | 334 | name='pullrequest_comment_create', |
|
335 | 335 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', |
|
336 | 336 | repo_route=True) |
|
337 | 337 | |
|
338 | 338 | config.add_route( |
|
339 | 339 | name='pullrequest_comment_edit', |
|
340 | 340 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit', |
|
341 | 341 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
342 | 342 | |
|
343 | 343 | config.add_route( |
|
344 | 344 | name='pullrequest_comment_delete', |
|
345 | 345 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', |
|
346 | 346 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
347 | 347 | |
|
348 | config.add_route( | |
|
349 | name='pullrequest_comments', | |
|
350 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments', | |
|
351 | repo_route=True) | |
|
352 | ||
|
353 | config.add_route( | |
|
354 | name='pullrequest_todos', | |
|
355 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos', | |
|
356 | repo_route=True) | |
|
357 | ||
|
348 | 358 | # Artifacts, (EE feature) |
|
349 | 359 | config.add_route( |
|
350 | 360 | name='repo_artifacts_list', |
|
351 | 361 | pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True) |
|
352 | 362 | |
|
353 | 363 | # Settings |
|
354 | 364 | config.add_route( |
|
355 | 365 | name='edit_repo', |
|
356 | 366 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
357 | 367 | # update is POST on edit_repo |
|
358 | 368 | |
|
359 | 369 | # Settings advanced |
|
360 | 370 | config.add_route( |
|
361 | 371 | name='edit_repo_advanced', |
|
362 | 372 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
363 | 373 | config.add_route( |
|
364 | 374 | name='edit_repo_advanced_archive', |
|
365 | 375 | pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True) |
|
366 | 376 | config.add_route( |
|
367 | 377 | name='edit_repo_advanced_delete', |
|
368 | 378 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
369 | 379 | config.add_route( |
|
370 | 380 | name='edit_repo_advanced_locking', |
|
371 | 381 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
372 | 382 | config.add_route( |
|
373 | 383 | name='edit_repo_advanced_journal', |
|
374 | 384 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
375 | 385 | config.add_route( |
|
376 | 386 | name='edit_repo_advanced_fork', |
|
377 | 387 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
378 | 388 | |
|
379 | 389 | config.add_route( |
|
380 | 390 | name='edit_repo_advanced_hooks', |
|
381 | 391 | pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True) |
|
382 | 392 | |
|
383 | 393 | # Caches |
|
384 | 394 | config.add_route( |
|
385 | 395 | name='edit_repo_caches', |
|
386 | 396 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
387 | 397 | |
|
388 | 398 | # Permissions |
|
389 | 399 | config.add_route( |
|
390 | 400 | name='edit_repo_perms', |
|
391 | 401 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
392 | 402 | |
|
393 | 403 | config.add_route( |
|
394 | 404 | name='edit_repo_perms_set_private', |
|
395 | 405 | pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True) |
|
396 | 406 | |
|
397 | 407 | # Permissions Branch (EE feature) |
|
398 | 408 | config.add_route( |
|
399 | 409 | name='edit_repo_perms_branch', |
|
400 | 410 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True) |
|
401 | 411 | config.add_route( |
|
402 | 412 | name='edit_repo_perms_branch_delete', |
|
403 | 413 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete', |
|
404 | 414 | repo_route=True) |
|
405 | 415 | |
|
406 | 416 | # Maintenance |
|
407 | 417 | config.add_route( |
|
408 | 418 | name='edit_repo_maintenance', |
|
409 | 419 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
410 | 420 | |
|
411 | 421 | config.add_route( |
|
412 | 422 | name='edit_repo_maintenance_execute', |
|
413 | 423 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
414 | 424 | |
|
415 | 425 | # Fields |
|
416 | 426 | config.add_route( |
|
417 | 427 | name='edit_repo_fields', |
|
418 | 428 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) |
|
419 | 429 | config.add_route( |
|
420 | 430 | name='edit_repo_fields_create', |
|
421 | 431 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) |
|
422 | 432 | config.add_route( |
|
423 | 433 | name='edit_repo_fields_delete', |
|
424 | 434 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) |
|
425 | 435 | |
|
426 | 436 | # Locking |
|
427 | 437 | config.add_route( |
|
428 | 438 | name='repo_edit_toggle_locking', |
|
429 | 439 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) |
|
430 | 440 | |
|
431 | 441 | # Remote |
|
432 | 442 | config.add_route( |
|
433 | 443 | name='edit_repo_remote', |
|
434 | 444 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) |
|
435 | 445 | config.add_route( |
|
436 | 446 | name='edit_repo_remote_pull', |
|
437 | 447 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) |
|
438 | 448 | config.add_route( |
|
439 | 449 | name='edit_repo_remote_push', |
|
440 | 450 | pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True) |
|
441 | 451 | |
|
442 | 452 | # Statistics |
|
443 | 453 | config.add_route( |
|
444 | 454 | name='edit_repo_statistics', |
|
445 | 455 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) |
|
446 | 456 | config.add_route( |
|
447 | 457 | name='edit_repo_statistics_reset', |
|
448 | 458 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) |
|
449 | 459 | |
|
450 | 460 | # Issue trackers |
|
451 | 461 | config.add_route( |
|
452 | 462 | name='edit_repo_issuetracker', |
|
453 | 463 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) |
|
454 | 464 | config.add_route( |
|
455 | 465 | name='edit_repo_issuetracker_test', |
|
456 | 466 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) |
|
457 | 467 | config.add_route( |
|
458 | 468 | name='edit_repo_issuetracker_delete', |
|
459 | 469 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) |
|
460 | 470 | config.add_route( |
|
461 | 471 | name='edit_repo_issuetracker_update', |
|
462 | 472 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) |
|
463 | 473 | |
|
464 | 474 | # VCS Settings |
|
465 | 475 | config.add_route( |
|
466 | 476 | name='edit_repo_vcs', |
|
467 | 477 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) |
|
468 | 478 | config.add_route( |
|
469 | 479 | name='edit_repo_vcs_update', |
|
470 | 480 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) |
|
471 | 481 | |
|
472 | 482 | # svn pattern |
|
473 | 483 | config.add_route( |
|
474 | 484 | name='edit_repo_vcs_svn_pattern_delete', |
|
475 | 485 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) |
|
476 | 486 | |
|
477 | 487 | # Repo Review Rules (EE feature) |
|
478 | 488 | config.add_route( |
|
479 | 489 | name='repo_reviewers', |
|
480 | 490 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
481 | 491 | |
|
482 | 492 | config.add_route( |
|
483 | 493 | name='repo_default_reviewers_data', |
|
484 | 494 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
485 | 495 | |
|
486 | 496 | # Repo Automation (EE feature) |
|
487 | 497 | config.add_route( |
|
488 | 498 | name='repo_automation', |
|
489 | 499 | pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True) |
|
490 | 500 | |
|
491 | 501 | # Strip |
|
492 | 502 | config.add_route( |
|
493 | 503 | name='edit_repo_strip', |
|
494 | 504 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
495 | 505 | |
|
496 | 506 | config.add_route( |
|
497 | 507 | name='strip_check', |
|
498 | 508 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
499 | 509 | |
|
500 | 510 | config.add_route( |
|
501 | 511 | name='strip_execute', |
|
502 | 512 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
503 | 513 | |
|
504 | 514 | # Audit logs |
|
505 | 515 | config.add_route( |
|
506 | 516 | name='edit_repo_audit_logs', |
|
507 | 517 | pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True) |
|
508 | 518 | |
|
509 | 519 | # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility |
|
510 | 520 | config.add_route( |
|
511 | 521 | name='rss_feed_home', |
|
512 | 522 | pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True) |
|
513 | 523 | |
|
514 | 524 | config.add_route( |
|
515 | 525 | name='atom_feed_home', |
|
516 | 526 | pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True) |
|
517 | 527 | |
|
518 | 528 | config.add_route( |
|
519 | 529 | name='rss_feed_home_old', |
|
520 | 530 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
521 | 531 | |
|
522 | 532 | config.add_route( |
|
523 | 533 | name='atom_feed_home_old', |
|
524 | 534 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
525 | 535 | |
|
526 | 536 | # NOTE(marcink): needs to be at the end for catch-all |
|
527 | 537 | add_route_with_slash( |
|
528 | 538 | config, |
|
529 | 539 | name='repo_summary', |
|
530 | 540 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
531 | 541 | |
|
532 | 542 | # Scan module for configuration decorators. |
|
533 | 543 | config.scan('.views', ignore='.tests') |
@@ -1,507 +1,494 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.tests import TestController |
|
24 | 24 | |
|
25 | 25 | from rhodecode.model.db import ChangesetComment, Notification |
|
26 | 26 | from rhodecode.model.meta import Session |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def route_path(name, params=None, **kwargs): |
|
31 | 31 | import urllib |
|
32 | 32 | |
|
33 | 33 | base_url = { |
|
34 | 34 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', |
|
35 | 35 | 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', |
|
36 | 36 | 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', |
|
37 | 37 | 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', |
|
38 | 38 | 'repo_commit_comment_edit': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit', |
|
39 | 39 | }[name].format(**kwargs) |
|
40 | 40 | |
|
41 | 41 | if params: |
|
42 | 42 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
43 | 43 | return base_url |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | @pytest.mark.backends("git", "hg", "svn") |
|
47 | 47 | class TestRepoCommitCommentsView(TestController): |
|
48 | 48 | |
|
49 | 49 | @pytest.fixture(autouse=True) |
|
50 | 50 | def prepare(self, request, baseapp): |
|
51 | 51 | for x in ChangesetComment.query().all(): |
|
52 | 52 | Session().delete(x) |
|
53 | 53 | Session().commit() |
|
54 | 54 | |
|
55 | 55 | for x in Notification.query().all(): |
|
56 | 56 | Session().delete(x) |
|
57 | 57 | Session().commit() |
|
58 | 58 | |
|
59 | 59 | request.addfinalizer(self.cleanup) |
|
60 | 60 | |
|
61 | 61 | def cleanup(self): |
|
62 | 62 | for x in ChangesetComment.query().all(): |
|
63 | 63 | Session().delete(x) |
|
64 | 64 | Session().commit() |
|
65 | 65 | |
|
66 | 66 | for x in Notification.query().all(): |
|
67 | 67 | Session().delete(x) |
|
68 | 68 | Session().commit() |
|
69 | 69 | |
|
70 | 70 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
71 | 71 | def test_create(self, comment_type, backend): |
|
72 | 72 | self.log_user() |
|
73 | 73 | commit = backend.repo.get_commit('300') |
|
74 | 74 | commit_id = commit.raw_id |
|
75 | 75 | text = u'CommentOnCommit' |
|
76 | 76 | |
|
77 | 77 | params = {'text': text, 'csrf_token': self.csrf_token, |
|
78 | 78 | 'comment_type': comment_type} |
|
79 | 79 | self.app.post( |
|
80 | 80 | route_path('repo_commit_comment_create', |
|
81 | 81 | repo_name=backend.repo_name, commit_id=commit_id), |
|
82 | 82 | params=params) |
|
83 | 83 | |
|
84 | 84 | response = self.app.get( |
|
85 | 85 | route_path('repo_commit', |
|
86 | 86 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
87 | 87 | |
|
88 | 88 | # test DB |
|
89 | 89 | assert ChangesetComment.query().count() == 1 |
|
90 | 90 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
91 | 91 | |
|
92 | 92 | assert Notification.query().count() == 1 |
|
93 | 93 | assert ChangesetComment.query().count() == 1 |
|
94 | 94 | |
|
95 | 95 | notification = Notification.query().all()[0] |
|
96 | 96 | |
|
97 | 97 | comment_id = ChangesetComment.query().first().comment_id |
|
98 | 98 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
99 | 99 | |
|
100 | 100 | author = notification.created_by_user.username_and_name |
|
101 | 101 | sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format( |
|
102 | 102 | author, comment_type, h.show_id(commit), backend.repo_name) |
|
103 | 103 | assert sbj == notification.subject |
|
104 | 104 | |
|
105 | 105 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
106 | 106 | backend.repo_name, commit_id, comment_id)) |
|
107 | 107 | assert lnk in notification.body |
|
108 | 108 | |
|
109 | 109 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
110 | 110 | def test_create_inline(self, comment_type, backend): |
|
111 | 111 | self.log_user() |
|
112 | 112 | commit = backend.repo.get_commit('300') |
|
113 | 113 | commit_id = commit.raw_id |
|
114 | 114 | text = u'CommentOnCommit' |
|
115 | 115 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
116 | 116 | line = 'n1' |
|
117 | 117 | |
|
118 | 118 | params = {'text': text, 'f_path': f_path, 'line': line, |
|
119 | 119 | 'comment_type': comment_type, |
|
120 | 120 | 'csrf_token': self.csrf_token} |
|
121 | 121 | |
|
122 | 122 | self.app.post( |
|
123 | 123 | route_path('repo_commit_comment_create', |
|
124 | 124 | repo_name=backend.repo_name, commit_id=commit_id), |
|
125 | 125 | params=params) |
|
126 | 126 | |
|
127 | 127 | response = self.app.get( |
|
128 | 128 | route_path('repo_commit', |
|
129 | 129 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
130 | 130 | |
|
131 | 131 | # test DB |
|
132 | 132 | assert ChangesetComment.query().count() == 1 |
|
133 | 133 | assert_comment_links(response, 0, ChangesetComment.query().count()) |
|
134 | 134 | |
|
135 | 135 | if backend.alias == 'svn': |
|
136 | 136 | response.mustcontain( |
|
137 | 137 | '''data-f-path="vcs/commands/summary.py" ''' |
|
138 | 138 | '''data-anchor-id="c-300-ad05457a43f8"''' |
|
139 | 139 | ) |
|
140 | 140 | if backend.alias == 'git': |
|
141 | 141 | response.mustcontain( |
|
142 | 142 | '''data-f-path="vcs/backends/hg.py" ''' |
|
143 | 143 | '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"''' |
|
144 | 144 | ) |
|
145 | 145 | |
|
146 | 146 | if backend.alias == 'hg': |
|
147 | 147 | response.mustcontain( |
|
148 | 148 | '''data-f-path="vcs/backends/hg.py" ''' |
|
149 | 149 | '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"''' |
|
150 | 150 | ) |
|
151 | 151 | |
|
152 | 152 | assert Notification.query().count() == 1 |
|
153 | 153 | assert ChangesetComment.query().count() == 1 |
|
154 | 154 | |
|
155 | 155 | notification = Notification.query().all()[0] |
|
156 | 156 | comment = ChangesetComment.query().first() |
|
157 | 157 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
158 | 158 | |
|
159 | 159 | assert comment.revision == commit_id |
|
160 | 160 | |
|
161 | 161 | author = notification.created_by_user.username_and_name |
|
162 | 162 | sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format( |
|
163 | 163 | author, comment_type, f_path, h.show_id(commit), backend.repo_name) |
|
164 | 164 | |
|
165 | 165 | assert sbj == notification.subject |
|
166 | 166 | |
|
167 | 167 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
168 | 168 | backend.repo_name, commit_id, comment.comment_id)) |
|
169 | 169 | assert lnk in notification.body |
|
170 | 170 | assert 'on line n1' in notification.body |
|
171 | 171 | |
|
172 | 172 | def test_create_with_mention(self, backend): |
|
173 | 173 | self.log_user() |
|
174 | 174 | |
|
175 | 175 | commit_id = backend.repo.get_commit('300').raw_id |
|
176 | 176 | text = u'@test_regular check CommentOnCommit' |
|
177 | 177 | |
|
178 | 178 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
179 | 179 | self.app.post( |
|
180 | 180 | route_path('repo_commit_comment_create', |
|
181 | 181 | repo_name=backend.repo_name, commit_id=commit_id), |
|
182 | 182 | params=params) |
|
183 | 183 | |
|
184 | 184 | response = self.app.get( |
|
185 | 185 | route_path('repo_commit', |
|
186 | 186 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
187 | 187 | # test DB |
|
188 | 188 | assert ChangesetComment.query().count() == 1 |
|
189 | 189 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
190 | 190 | |
|
191 | 191 | notification = Notification.query().one() |
|
192 | 192 | |
|
193 | 193 | assert len(notification.recipients) == 2 |
|
194 | 194 | users = [x.username for x in notification.recipients] |
|
195 | 195 | |
|
196 | 196 | # test_regular gets notification by @mention |
|
197 | 197 | assert sorted(users) == [u'test_admin', u'test_regular'] |
|
198 | 198 | |
|
199 | 199 | def test_create_with_status_change(self, backend): |
|
200 | 200 | self.log_user() |
|
201 | 201 | commit = backend.repo.get_commit('300') |
|
202 | 202 | commit_id = commit.raw_id |
|
203 | 203 | text = u'CommentOnCommit' |
|
204 | 204 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
205 | 205 | line = 'n1' |
|
206 | 206 | |
|
207 | 207 | params = {'text': text, 'changeset_status': 'approved', |
|
208 | 208 | 'csrf_token': self.csrf_token} |
|
209 | 209 | |
|
210 | 210 | self.app.post( |
|
211 | 211 | route_path( |
|
212 | 212 | 'repo_commit_comment_create', |
|
213 | 213 | repo_name=backend.repo_name, commit_id=commit_id), |
|
214 | 214 | params=params) |
|
215 | 215 | |
|
216 | 216 | response = self.app.get( |
|
217 | 217 | route_path('repo_commit', |
|
218 | 218 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
219 | 219 | |
|
220 | 220 | # test DB |
|
221 | 221 | assert ChangesetComment.query().count() == 1 |
|
222 | 222 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
223 | 223 | |
|
224 | 224 | assert Notification.query().count() == 1 |
|
225 | 225 | assert ChangesetComment.query().count() == 1 |
|
226 | 226 | |
|
227 | 227 | notification = Notification.query().all()[0] |
|
228 | 228 | |
|
229 | 229 | comment_id = ChangesetComment.query().first().comment_id |
|
230 | 230 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
231 | 231 | |
|
232 | 232 | author = notification.created_by_user.username_and_name |
|
233 | 233 | sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format( |
|
234 | 234 | author, h.show_id(commit), backend.repo_name) |
|
235 | 235 | assert sbj == notification.subject |
|
236 | 236 | |
|
237 | 237 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
238 | 238 | backend.repo_name, commit_id, comment_id)) |
|
239 | 239 | assert lnk in notification.body |
|
240 | 240 | |
|
241 | 241 | def test_delete(self, backend): |
|
242 | 242 | self.log_user() |
|
243 | 243 | commit_id = backend.repo.get_commit('300').raw_id |
|
244 | 244 | text = u'CommentOnCommit' |
|
245 | 245 | |
|
246 | 246 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
247 | 247 | self.app.post( |
|
248 | 248 | route_path( |
|
249 | 249 | 'repo_commit_comment_create', |
|
250 | 250 | repo_name=backend.repo_name, commit_id=commit_id), |
|
251 | 251 | params=params) |
|
252 | 252 | |
|
253 | 253 | comments = ChangesetComment.query().all() |
|
254 | 254 | assert len(comments) == 1 |
|
255 | 255 | comment_id = comments[0].comment_id |
|
256 | 256 | |
|
257 | 257 | self.app.post( |
|
258 | 258 | route_path('repo_commit_comment_delete', |
|
259 | 259 | repo_name=backend.repo_name, |
|
260 | 260 | commit_id=commit_id, |
|
261 | 261 | comment_id=comment_id), |
|
262 | 262 | params={'csrf_token': self.csrf_token}) |
|
263 | 263 | |
|
264 | 264 | comments = ChangesetComment.query().all() |
|
265 | 265 | assert len(comments) == 0 |
|
266 | 266 | |
|
267 | 267 | response = self.app.get( |
|
268 | 268 | route_path('repo_commit', |
|
269 | 269 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
270 | 270 | assert_comment_links(response, 0, 0) |
|
271 | 271 | |
|
272 | 272 | def test_edit(self, backend): |
|
273 | 273 | self.log_user() |
|
274 | 274 | commit_id = backend.repo.get_commit('300').raw_id |
|
275 | 275 | text = u'CommentOnCommit' |
|
276 | 276 | |
|
277 | 277 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
278 | 278 | self.app.post( |
|
279 | 279 | route_path( |
|
280 | 280 | 'repo_commit_comment_create', |
|
281 | 281 | repo_name=backend.repo_name, commit_id=commit_id), |
|
282 | 282 | params=params) |
|
283 | 283 | |
|
284 | 284 | comments = ChangesetComment.query().all() |
|
285 | 285 | assert len(comments) == 1 |
|
286 | 286 | comment_id = comments[0].comment_id |
|
287 | 287 | test_text = 'test_text' |
|
288 | 288 | self.app.post( |
|
289 | 289 | route_path( |
|
290 | 290 | 'repo_commit_comment_edit', |
|
291 | 291 | repo_name=backend.repo_name, |
|
292 | 292 | commit_id=commit_id, |
|
293 | 293 | comment_id=comment_id, |
|
294 | 294 | ), |
|
295 | 295 | params={ |
|
296 | 296 | 'csrf_token': self.csrf_token, |
|
297 | 297 | 'text': test_text, |
|
298 | 298 | 'version': '0', |
|
299 | 299 | }) |
|
300 | 300 | |
|
301 | 301 | text_form_db = ChangesetComment.query().filter( |
|
302 | 302 | ChangesetComment.comment_id == comment_id).first().text |
|
303 | 303 | assert test_text == text_form_db |
|
304 | 304 | |
|
305 | 305 | def test_edit_without_change(self, backend): |
|
306 | 306 | self.log_user() |
|
307 | 307 | commit_id = backend.repo.get_commit('300').raw_id |
|
308 | 308 | text = u'CommentOnCommit' |
|
309 | 309 | |
|
310 | 310 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
311 | 311 | self.app.post( |
|
312 | 312 | route_path( |
|
313 | 313 | 'repo_commit_comment_create', |
|
314 | 314 | repo_name=backend.repo_name, commit_id=commit_id), |
|
315 | 315 | params=params) |
|
316 | 316 | |
|
317 | 317 | comments = ChangesetComment.query().all() |
|
318 | 318 | assert len(comments) == 1 |
|
319 | 319 | comment_id = comments[0].comment_id |
|
320 | 320 | |
|
321 | 321 | response = self.app.post( |
|
322 | 322 | route_path( |
|
323 | 323 | 'repo_commit_comment_edit', |
|
324 | 324 | repo_name=backend.repo_name, |
|
325 | 325 | commit_id=commit_id, |
|
326 | 326 | comment_id=comment_id, |
|
327 | 327 | ), |
|
328 | 328 | params={ |
|
329 | 329 | 'csrf_token': self.csrf_token, |
|
330 | 330 | 'text': text, |
|
331 | 331 | 'version': '0', |
|
332 | 332 | }, |
|
333 | 333 | status=404, |
|
334 | 334 | ) |
|
335 | 335 | assert response.status_int == 404 |
|
336 | 336 | |
|
337 | 337 | def test_edit_try_edit_already_edited(self, backend): |
|
338 | 338 | self.log_user() |
|
339 | 339 | commit_id = backend.repo.get_commit('300').raw_id |
|
340 | 340 | text = u'CommentOnCommit' |
|
341 | 341 | |
|
342 | 342 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
343 | 343 | self.app.post( |
|
344 | 344 | route_path( |
|
345 | 345 | 'repo_commit_comment_create', |
|
346 | 346 | repo_name=backend.repo_name, commit_id=commit_id |
|
347 | 347 | ), |
|
348 | 348 | params=params, |
|
349 | 349 | ) |
|
350 | 350 | |
|
351 | 351 | comments = ChangesetComment.query().all() |
|
352 | 352 | assert len(comments) == 1 |
|
353 | 353 | comment_id = comments[0].comment_id |
|
354 | 354 | test_text = 'test_text' |
|
355 | 355 | self.app.post( |
|
356 | 356 | route_path( |
|
357 | 357 | 'repo_commit_comment_edit', |
|
358 | 358 | repo_name=backend.repo_name, |
|
359 | 359 | commit_id=commit_id, |
|
360 | 360 | comment_id=comment_id, |
|
361 | 361 | ), |
|
362 | 362 | params={ |
|
363 | 363 | 'csrf_token': self.csrf_token, |
|
364 | 364 | 'text': test_text, |
|
365 | 365 | 'version': '0', |
|
366 | 366 | } |
|
367 | 367 | ) |
|
368 | 368 | test_text_v2 = 'test_v2' |
|
369 | 369 | response = self.app.post( |
|
370 | 370 | route_path( |
|
371 | 371 | 'repo_commit_comment_edit', |
|
372 | 372 | repo_name=backend.repo_name, |
|
373 | 373 | commit_id=commit_id, |
|
374 | 374 | comment_id=comment_id, |
|
375 | 375 | ), |
|
376 | 376 | params={ |
|
377 | 377 | 'csrf_token': self.csrf_token, |
|
378 | 378 | 'text': test_text_v2, |
|
379 | 379 | 'version': '0', |
|
380 | 380 | }, |
|
381 | 381 | status=409, |
|
382 | 382 | ) |
|
383 | 383 | assert response.status_int == 409 |
|
384 | 384 | |
|
385 | 385 | text_form_db = ChangesetComment.query().filter( |
|
386 | 386 | ChangesetComment.comment_id == comment_id).first().text |
|
387 | 387 | |
|
388 | 388 | assert test_text == text_form_db |
|
389 | 389 | assert test_text_v2 != text_form_db |
|
390 | 390 | |
|
391 | 391 | def test_edit_forbidden_for_immutable_comments(self, backend): |
|
392 | 392 | self.log_user() |
|
393 | 393 | commit_id = backend.repo.get_commit('300').raw_id |
|
394 | 394 | text = u'CommentOnCommit' |
|
395 | 395 | |
|
396 | 396 | params = {'text': text, 'csrf_token': self.csrf_token, 'version': '0'} |
|
397 | 397 | self.app.post( |
|
398 | 398 | route_path( |
|
399 | 399 | 'repo_commit_comment_create', |
|
400 | 400 | repo_name=backend.repo_name, |
|
401 | 401 | commit_id=commit_id, |
|
402 | 402 | ), |
|
403 | 403 | params=params |
|
404 | 404 | ) |
|
405 | 405 | |
|
406 | 406 | comments = ChangesetComment.query().all() |
|
407 | 407 | assert len(comments) == 1 |
|
408 | 408 | comment_id = comments[0].comment_id |
|
409 | 409 | |
|
410 | 410 | comment = ChangesetComment.get(comment_id) |
|
411 | 411 | comment.immutable_state = ChangesetComment.OP_IMMUTABLE |
|
412 | 412 | Session().add(comment) |
|
413 | 413 | Session().commit() |
|
414 | 414 | |
|
415 | 415 | response = self.app.post( |
|
416 | 416 | route_path( |
|
417 | 417 | 'repo_commit_comment_edit', |
|
418 | 418 | repo_name=backend.repo_name, |
|
419 | 419 | commit_id=commit_id, |
|
420 | 420 | comment_id=comment_id, |
|
421 | 421 | ), |
|
422 | 422 | params={ |
|
423 | 423 | 'csrf_token': self.csrf_token, |
|
424 | 424 | 'text': 'test_text', |
|
425 | 425 | }, |
|
426 | 426 | status=403, |
|
427 | 427 | ) |
|
428 | 428 | assert response.status_int == 403 |
|
429 | 429 | |
|
430 | 430 | def test_delete_forbidden_for_immutable_comments(self, backend): |
|
431 | 431 | self.log_user() |
|
432 | 432 | commit_id = backend.repo.get_commit('300').raw_id |
|
433 | 433 | text = u'CommentOnCommit' |
|
434 | 434 | |
|
435 | 435 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
436 | 436 | self.app.post( |
|
437 | 437 | route_path( |
|
438 | 438 | 'repo_commit_comment_create', |
|
439 | 439 | repo_name=backend.repo_name, commit_id=commit_id), |
|
440 | 440 | params=params) |
|
441 | 441 | |
|
442 | 442 | comments = ChangesetComment.query().all() |
|
443 | 443 | assert len(comments) == 1 |
|
444 | 444 | comment_id = comments[0].comment_id |
|
445 | 445 | |
|
446 | 446 | comment = ChangesetComment.get(comment_id) |
|
447 | 447 | comment.immutable_state = ChangesetComment.OP_IMMUTABLE |
|
448 | 448 | Session().add(comment) |
|
449 | 449 | Session().commit() |
|
450 | 450 | |
|
451 | 451 | self.app.post( |
|
452 | 452 | route_path('repo_commit_comment_delete', |
|
453 | 453 | repo_name=backend.repo_name, |
|
454 | 454 | commit_id=commit_id, |
|
455 | 455 | comment_id=comment_id), |
|
456 | 456 | params={'csrf_token': self.csrf_token}, |
|
457 | 457 | status=403) |
|
458 | 458 | |
|
459 | 459 | @pytest.mark.parametrize('renderer, text_input, output', [ |
|
460 | 460 | ('rst', 'plain text', '<p>plain text</p>'), |
|
461 | 461 | ('rst', 'header\n======', '<h1 class="title">header</h1>'), |
|
462 | 462 | ('rst', '*italics*', '<em>italics</em>'), |
|
463 | 463 | ('rst', '**bold**', '<strong>bold</strong>'), |
|
464 | 464 | ('markdown', 'plain text', '<p>plain text</p>'), |
|
465 | 465 | ('markdown', '# header', '<h1>header</h1>'), |
|
466 | 466 | ('markdown', '*italics*', '<em>italics</em>'), |
|
467 | 467 | ('markdown', '**bold**', '<strong>bold</strong>'), |
|
468 | 468 | ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain', |
|
469 | 469 | 'md-header', 'md-italics', 'md-bold', ]) |
|
470 | 470 | def test_preview(self, renderer, text_input, output, backend, xhr_header): |
|
471 | 471 | self.log_user() |
|
472 | 472 | params = { |
|
473 | 473 | 'renderer': renderer, |
|
474 | 474 | 'text': text_input, |
|
475 | 475 | 'csrf_token': self.csrf_token |
|
476 | 476 | } |
|
477 | 477 | commit_id = '0' * 16 # fake this for tests |
|
478 | 478 | response = self.app.post( |
|
479 | 479 | route_path('repo_commit_comment_preview', |
|
480 | 480 | repo_name=backend.repo_name, commit_id=commit_id,), |
|
481 | 481 | params=params, |
|
482 | 482 | extra_environ=xhr_header) |
|
483 | 483 | |
|
484 | 484 | response.mustcontain(output) |
|
485 | 485 | |
|
486 | 486 | |
|
487 | 487 | def assert_comment_links(response, comments, inline_comments): |
|
488 | if comments == 1: | |
|
489 | comments_text = "%d General" % comments | |
|
490 | else: | |
|
491 | comments_text = "%d General" % comments | |
|
492 | ||
|
493 | if inline_comments == 1: | |
|
494 | inline_comments_text = "%d Inline" % inline_comments | |
|
495 | else: | |
|
496 | inline_comments_text = "%d Inline" % inline_comments | |
|
488 | response.mustcontain( | |
|
489 | '<span class="display-none" id="general-comments-count">{}</span>'.format(comments)) | |
|
490 | response.mustcontain( | |
|
491 | '<span class="display-none" id="inline-comments-count">{}</span>'.format(inline_comments)) | |
|
497 | 492 | |
|
498 | if comments: | |
|
499 | response.mustcontain('<a href="#comments">%s</a>,' % comments_text) | |
|
500 | else: | |
|
501 | response.mustcontain(comments_text) | |
|
502 | 493 | |
|
503 | if inline_comments: | |
|
504 | response.mustcontain( | |
|
505 | 'id="inline-comments-counter">%s' % inline_comments_text) | |
|
506 | else: | |
|
507 | response.mustcontain(inline_comments_text) | |
|
494 |
@@ -1,667 +1,672 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | import lxml.html |
|
24 | 24 | |
|
25 | 25 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
26 | 26 | from rhodecode.tests import assert_session_flash |
|
27 | 27 | from rhodecode.tests.utils import AssertResponse, commit_change |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def route_path(name, params=None, **kwargs): |
|
31 | 31 | import urllib |
|
32 | 32 | |
|
33 | 33 | base_url = { |
|
34 | 34 | 'repo_compare_select': '/{repo_name}/compare', |
|
35 | 35 | 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}', |
|
36 | 36 | }[name].format(**kwargs) |
|
37 | 37 | |
|
38 | 38 | if params: |
|
39 | 39 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
40 | 40 | return base_url |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | @pytest.mark.usefixtures("autologin_user", "app") |
|
44 | 44 | class TestCompareView(object): |
|
45 | 45 | |
|
46 | 46 | def test_compare_index_is_reached_at_least_once(self, backend): |
|
47 | 47 | repo = backend.repo |
|
48 | 48 | self.app.get( |
|
49 | 49 | route_path('repo_compare_select', repo_name=repo.repo_name)) |
|
50 | 50 | |
|
51 | 51 | @pytest.mark.xfail_backends("svn", reason="Requires pull") |
|
52 | 52 | def test_compare_remote_with_different_commit_indexes(self, backend): |
|
53 | 53 | # Preparing the following repository structure: |
|
54 | 54 | # |
|
55 | 55 | # Origin repository has two commits: |
|
56 | 56 | # |
|
57 | 57 | # 0 1 |
|
58 | 58 | # A -- D |
|
59 | 59 | # |
|
60 | 60 | # The fork of it has a few more commits and "D" has a commit index |
|
61 | 61 | # which does not exist in origin. |
|
62 | 62 | # |
|
63 | 63 | # 0 1 2 3 4 |
|
64 | 64 | # A -- -- -- D -- E |
|
65 | 65 | # \- B -- C |
|
66 | 66 | # |
|
67 | 67 | |
|
68 | 68 | fork = backend.create_repo() |
|
69 | 69 | |
|
70 | 70 | # prepare fork |
|
71 | 71 | commit0 = commit_change( |
|
72 | 72 | fork.repo_name, filename='file1', content='A', |
|
73 | 73 | message='A', vcs_type=backend.alias, parent=None, newfile=True) |
|
74 | 74 | |
|
75 | 75 | commit1 = commit_change( |
|
76 | 76 | fork.repo_name, filename='file1', content='B', |
|
77 | 77 | message='B, child of A', vcs_type=backend.alias, parent=commit0) |
|
78 | 78 | |
|
79 | 79 | commit_change( # commit 2 |
|
80 | 80 | fork.repo_name, filename='file1', content='C', |
|
81 | 81 | message='C, child of B', vcs_type=backend.alias, parent=commit1) |
|
82 | 82 | |
|
83 | 83 | commit3 = commit_change( |
|
84 | 84 | fork.repo_name, filename='file1', content='D', |
|
85 | 85 | message='D, child of A', vcs_type=backend.alias, parent=commit0) |
|
86 | 86 | |
|
87 | 87 | commit4 = commit_change( |
|
88 | 88 | fork.repo_name, filename='file1', content='E', |
|
89 | 89 | message='E, child of D', vcs_type=backend.alias, parent=commit3) |
|
90 | 90 | |
|
91 | 91 | # prepare origin repository, taking just the history up to D |
|
92 | 92 | origin = backend.create_repo() |
|
93 | 93 | |
|
94 | 94 | origin_repo = origin.scm_instance(cache=False) |
|
95 | 95 | origin_repo.config.clear_section('hooks') |
|
96 | 96 | origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id]) |
|
97 | 97 | origin_repo = origin.scm_instance(cache=False) # cache rebuild |
|
98 | 98 | |
|
99 | 99 | # Verify test fixture setup |
|
100 | 100 | # This does not work for git |
|
101 | 101 | if backend.alias != 'git': |
|
102 | 102 | assert 5 == len(fork.scm_instance().commit_ids) |
|
103 | 103 | assert 2 == len(origin_repo.commit_ids) |
|
104 | 104 | |
|
105 | 105 | # Comparing the revisions |
|
106 | 106 | response = self.app.get( |
|
107 | 107 | route_path('repo_compare', |
|
108 | 108 | repo_name=origin.repo_name, |
|
109 | 109 | source_ref_type="rev", source_ref=commit3.raw_id, |
|
110 | 110 | target_ref_type="rev", target_ref=commit4.raw_id, |
|
111 | 111 | params=dict(merge='1', target_repo=fork.repo_name) |
|
112 | 112 | )) |
|
113 | 113 | |
|
114 | 114 | compare_page = ComparePage(response) |
|
115 | 115 | compare_page.contains_commits([commit4]) |
|
116 | 116 | |
|
117 | 117 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
118 | 118 | def test_compare_forks_on_branch_extra_commits(self, backend): |
|
119 | 119 | repo1 = backend.create_repo() |
|
120 | 120 | |
|
121 | 121 | # commit something ! |
|
122 | 122 | commit0 = commit_change( |
|
123 | 123 | repo1.repo_name, filename='file1', content='line1\n', |
|
124 | 124 | message='commit1', vcs_type=backend.alias, parent=None, |
|
125 | 125 | newfile=True) |
|
126 | 126 | |
|
127 | 127 | # fork this repo |
|
128 | 128 | repo2 = backend.create_fork() |
|
129 | 129 | |
|
130 | 130 | # add two extra commit into fork |
|
131 | 131 | commit1 = commit_change( |
|
132 | 132 | repo2.repo_name, filename='file1', content='line1\nline2\n', |
|
133 | 133 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
134 | 134 | |
|
135 | 135 | commit2 = commit_change( |
|
136 | 136 | repo2.repo_name, filename='file1', content='line1\nline2\nline3\n', |
|
137 | 137 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
138 | 138 | |
|
139 | 139 | commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME |
|
140 | 140 | commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME |
|
141 | 141 | |
|
142 | 142 | response = self.app.get( |
|
143 | 143 | route_path('repo_compare', |
|
144 | 144 | repo_name=repo1.repo_name, |
|
145 | 145 | source_ref_type="branch", source_ref=commit_id2, |
|
146 | 146 | target_ref_type="branch", target_ref=commit_id1, |
|
147 | 147 | params=dict(merge='1', target_repo=repo2.repo_name) |
|
148 | 148 | )) |
|
149 | 149 | |
|
150 | 150 | response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2)) |
|
151 | 151 | response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1)) |
|
152 | 152 | |
|
153 | 153 | compare_page = ComparePage(response) |
|
154 | 154 | compare_page.contains_change_summary(1, 2, 0) |
|
155 | 155 | compare_page.contains_commits([commit1, commit2]) |
|
156 | 156 | |
|
157 | 157 | anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id) |
|
158 | 158 | compare_page.contains_file_links_and_anchors([('file1', anchor), ]) |
|
159 | 159 | |
|
160 | 160 | # Swap is removed when comparing branches since it's a PR feature and |
|
161 | 161 | # it is then a preview mode |
|
162 | 162 | compare_page.swap_is_hidden() |
|
163 | 163 | compare_page.target_source_are_disabled() |
|
164 | 164 | |
|
165 | 165 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
166 | 166 | def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend): |
|
167 | 167 | repo1 = backend.create_repo() |
|
168 | 168 | |
|
169 | 169 | # commit something ! |
|
170 | 170 | commit0 = commit_change( |
|
171 | 171 | repo1.repo_name, filename='file1', content='line1\n', |
|
172 | 172 | message='commit1', vcs_type=backend.alias, parent=None, |
|
173 | 173 | newfile=True) |
|
174 | 174 | |
|
175 | 175 | # fork this repo |
|
176 | 176 | repo2 = backend.create_fork() |
|
177 | 177 | |
|
178 | 178 | # now commit something to origin repo |
|
179 | 179 | commit_change( |
|
180 | 180 | repo1.repo_name, filename='file2', content='line1file2\n', |
|
181 | 181 | message='commit2', vcs_type=backend.alias, parent=commit0, |
|
182 | 182 | newfile=True) |
|
183 | 183 | |
|
184 | 184 | # add two extra commit into fork |
|
185 | 185 | commit1 = commit_change( |
|
186 | 186 | repo2.repo_name, filename='file1', content='line1\nline2\n', |
|
187 | 187 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
188 | 188 | |
|
189 | 189 | commit2 = commit_change( |
|
190 | 190 | repo2.repo_name, filename='file1', content='line1\nline2\nline3\n', |
|
191 | 191 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
192 | 192 | |
|
193 | 193 | commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME |
|
194 | 194 | commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME |
|
195 | 195 | |
|
196 | 196 | response = self.app.get( |
|
197 | 197 | route_path('repo_compare', |
|
198 | 198 | repo_name=repo1.repo_name, |
|
199 | 199 | source_ref_type="branch", source_ref=commit_id2, |
|
200 | 200 | target_ref_type="branch", target_ref=commit_id1, |
|
201 | 201 | params=dict(merge='1', target_repo=repo2.repo_name), |
|
202 | 202 | )) |
|
203 | 203 | |
|
204 | 204 | response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2)) |
|
205 | 205 | response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1)) |
|
206 | 206 | |
|
207 | 207 | compare_page = ComparePage(response) |
|
208 | 208 | compare_page.contains_change_summary(1, 2, 0) |
|
209 | 209 | compare_page.contains_commits([commit1, commit2]) |
|
210 | 210 | anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id) |
|
211 | 211 | compare_page.contains_file_links_and_anchors([('file1', anchor), ]) |
|
212 | 212 | |
|
213 | 213 | # Swap is removed when comparing branches since it's a PR feature and |
|
214 | 214 | # it is then a preview mode |
|
215 | 215 | compare_page.swap_is_hidden() |
|
216 | 216 | compare_page.target_source_are_disabled() |
|
217 | 217 | |
|
218 | 218 | @pytest.mark.xfail_backends("svn") |
|
219 | 219 | # TODO(marcink): no svn support for compare two seperate repos |
|
220 | 220 | def test_compare_of_unrelated_forks(self, backend): |
|
221 | 221 | orig = backend.create_repo(number_of_commits=1) |
|
222 | 222 | fork = backend.create_repo(number_of_commits=1) |
|
223 | 223 | |
|
224 | 224 | response = self.app.get( |
|
225 | 225 | route_path('repo_compare', |
|
226 | 226 | repo_name=orig.repo_name, |
|
227 | 227 | source_ref_type="rev", source_ref="tip", |
|
228 | 228 | target_ref_type="rev", target_ref="tip", |
|
229 | 229 | params=dict(merge='1', target_repo=fork.repo_name), |
|
230 | 230 | ), |
|
231 | 231 | status=302) |
|
232 | 232 | response = response.follow() |
|
233 | 233 | response.mustcontain("Repositories unrelated.") |
|
234 | 234 | |
|
235 | 235 | @pytest.mark.xfail_backends("svn") |
|
236 | 236 | def test_compare_cherry_pick_commits_from_bottom(self, backend): |
|
237 | 237 | |
|
238 | 238 | # repo1: |
|
239 | 239 | # commit0: |
|
240 | 240 | # commit1: |
|
241 | 241 | # repo1-fork- in which we will cherry pick bottom commits |
|
242 | 242 | # commit0: |
|
243 | 243 | # commit1: |
|
244 | 244 | # commit2: x |
|
245 | 245 | # commit3: x |
|
246 | 246 | # commit4: x |
|
247 | 247 | # commit5: |
|
248 | 248 | # make repo1, and commit1+commit2 |
|
249 | 249 | |
|
250 | 250 | repo1 = backend.create_repo() |
|
251 | 251 | |
|
252 | 252 | # commit something ! |
|
253 | 253 | commit0 = commit_change( |
|
254 | 254 | repo1.repo_name, filename='file1', content='line1\n', |
|
255 | 255 | message='commit1', vcs_type=backend.alias, parent=None, |
|
256 | 256 | newfile=True) |
|
257 | 257 | commit1 = commit_change( |
|
258 | 258 | repo1.repo_name, filename='file1', content='line1\nline2\n', |
|
259 | 259 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
260 | 260 | |
|
261 | 261 | # fork this repo |
|
262 | 262 | repo2 = backend.create_fork() |
|
263 | 263 | |
|
264 | 264 | # now make commit3-6 |
|
265 | 265 | commit2 = commit_change( |
|
266 | 266 | repo1.repo_name, filename='file1', content='line1\nline2\nline3\n', |
|
267 | 267 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
268 | 268 | commit3 = commit_change( |
|
269 | 269 | repo1.repo_name, filename='file1', |
|
270 | 270 | content='line1\nline2\nline3\nline4\n', message='commit4', |
|
271 | 271 | vcs_type=backend.alias, parent=commit2) |
|
272 | 272 | commit4 = commit_change( |
|
273 | 273 | repo1.repo_name, filename='file1', |
|
274 | 274 | content='line1\nline2\nline3\nline4\nline5\n', message='commit5', |
|
275 | 275 | vcs_type=backend.alias, parent=commit3) |
|
276 | 276 | commit_change( # commit 5 |
|
277 | 277 | repo1.repo_name, filename='file1', |
|
278 | 278 | content='line1\nline2\nline3\nline4\nline5\nline6\n', |
|
279 | 279 | message='commit6', vcs_type=backend.alias, parent=commit4) |
|
280 | 280 | |
|
281 | 281 | response = self.app.get( |
|
282 | 282 | route_path('repo_compare', |
|
283 | 283 | repo_name=repo2.repo_name, |
|
284 | 284 | # parent of commit2, in target repo2 |
|
285 | 285 | source_ref_type="rev", source_ref=commit1.raw_id, |
|
286 | 286 | target_ref_type="rev", target_ref=commit4.raw_id, |
|
287 | 287 | params=dict(merge='1', target_repo=repo1.repo_name), |
|
288 | 288 | )) |
|
289 | 289 | response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id)) |
|
290 | 290 | response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id)) |
|
291 | 291 | |
|
292 | 292 | # files |
|
293 | 293 | compare_page = ComparePage(response) |
|
294 | 294 | compare_page.contains_change_summary(1, 3, 0) |
|
295 | 295 | compare_page.contains_commits([commit2, commit3, commit4]) |
|
296 | 296 | anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id) |
|
297 | 297 | compare_page.contains_file_links_and_anchors([('file1', anchor),]) |
|
298 | 298 | |
|
299 | 299 | @pytest.mark.xfail_backends("svn") |
|
300 | 300 | def test_compare_cherry_pick_commits_from_top(self, backend): |
|
301 | 301 | # repo1: |
|
302 | 302 | # commit0: |
|
303 | 303 | # commit1: |
|
304 | 304 | # repo1-fork- in which we will cherry pick bottom commits |
|
305 | 305 | # commit0: |
|
306 | 306 | # commit1: |
|
307 | 307 | # commit2: |
|
308 | 308 | # commit3: x |
|
309 | 309 | # commit4: x |
|
310 | 310 | # commit5: x |
|
311 | 311 | |
|
312 | 312 | # make repo1, and commit1+commit2 |
|
313 | 313 | repo1 = backend.create_repo() |
|
314 | 314 | |
|
315 | 315 | # commit something ! |
|
316 | 316 | commit0 = commit_change( |
|
317 | 317 | repo1.repo_name, filename='file1', content='line1\n', |
|
318 | 318 | message='commit1', vcs_type=backend.alias, parent=None, |
|
319 | 319 | newfile=True) |
|
320 | 320 | commit1 = commit_change( |
|
321 | 321 | repo1.repo_name, filename='file1', content='line1\nline2\n', |
|
322 | 322 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
323 | 323 | |
|
324 | 324 | # fork this repo |
|
325 | 325 | backend.create_fork() |
|
326 | 326 | |
|
327 | 327 | # now make commit3-6 |
|
328 | 328 | commit2 = commit_change( |
|
329 | 329 | repo1.repo_name, filename='file1', content='line1\nline2\nline3\n', |
|
330 | 330 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
331 | 331 | commit3 = commit_change( |
|
332 | 332 | repo1.repo_name, filename='file1', |
|
333 | 333 | content='line1\nline2\nline3\nline4\n', message='commit4', |
|
334 | 334 | vcs_type=backend.alias, parent=commit2) |
|
335 | 335 | commit4 = commit_change( |
|
336 | 336 | repo1.repo_name, filename='file1', |
|
337 | 337 | content='line1\nline2\nline3\nline4\nline5\n', message='commit5', |
|
338 | 338 | vcs_type=backend.alias, parent=commit3) |
|
339 | 339 | commit5 = commit_change( |
|
340 | 340 | repo1.repo_name, filename='file1', |
|
341 | 341 | content='line1\nline2\nline3\nline4\nline5\nline6\n', |
|
342 | 342 | message='commit6', vcs_type=backend.alias, parent=commit4) |
|
343 | 343 | |
|
344 | 344 | response = self.app.get( |
|
345 | 345 | route_path('repo_compare', |
|
346 | 346 | repo_name=repo1.repo_name, |
|
347 | 347 | # parent of commit3, not in source repo2 |
|
348 | 348 | source_ref_type="rev", source_ref=commit2.raw_id, |
|
349 | 349 | target_ref_type="rev", target_ref=commit5.raw_id, |
|
350 | 350 | params=dict(merge='1'),)) |
|
351 | 351 | |
|
352 | 352 | response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id)) |
|
353 | 353 | response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id)) |
|
354 | 354 | |
|
355 | 355 | compare_page = ComparePage(response) |
|
356 | 356 | compare_page.contains_change_summary(1, 3, 0) |
|
357 | 357 | compare_page.contains_commits([commit3, commit4, commit5]) |
|
358 | 358 | |
|
359 | 359 | # files |
|
360 | 360 | anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id) |
|
361 | 361 | compare_page.contains_file_links_and_anchors([('file1', anchor),]) |
|
362 | 362 | |
|
363 | 363 | @pytest.mark.xfail_backends("svn") |
|
364 | 364 | def test_compare_remote_branches(self, backend): |
|
365 | 365 | repo1 = backend.repo |
|
366 | 366 | repo2 = backend.create_fork() |
|
367 | 367 | |
|
368 | 368 | commit_id1 = repo1.get_commit(commit_idx=3).raw_id |
|
369 | 369 | commit_id1_short = repo1.get_commit(commit_idx=3).short_id |
|
370 | 370 | commit_id2 = repo1.get_commit(commit_idx=6).raw_id |
|
371 | 371 | commit_id2_short = repo1.get_commit(commit_idx=6).short_id |
|
372 | 372 | |
|
373 | 373 | response = self.app.get( |
|
374 | 374 | route_path('repo_compare', |
|
375 | 375 | repo_name=repo1.repo_name, |
|
376 | 376 | source_ref_type="rev", source_ref=commit_id1, |
|
377 | 377 | target_ref_type="rev", target_ref=commit_id2, |
|
378 | 378 | params=dict(merge='1', target_repo=repo2.repo_name), |
|
379 | 379 | )) |
|
380 | 380 | |
|
381 | 381 | response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1)) |
|
382 | 382 | response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2)) |
|
383 | 383 | |
|
384 | 384 | compare_page = ComparePage(response) |
|
385 | 385 | |
|
386 | 386 | # outgoing commits between those commits |
|
387 | 387 | compare_page.contains_commits( |
|
388 | 388 | [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]]) |
|
389 | 389 | |
|
390 | 390 | # files |
|
391 | 391 | compare_page.contains_file_links_and_anchors([ |
|
392 | 392 | ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)), |
|
393 | 393 | ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)), |
|
394 | 394 | ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)), |
|
395 | 395 | ]) |
|
396 | 396 | |
|
397 | 397 | @pytest.mark.xfail_backends("svn") |
|
398 | 398 | def test_source_repo_new_commits_after_forking_simple_diff(self, backend): |
|
399 | 399 | repo1 = backend.create_repo() |
|
400 | 400 | r1_name = repo1.repo_name |
|
401 | 401 | |
|
402 | 402 | commit0 = commit_change( |
|
403 | 403 | repo=r1_name, filename='file1', |
|
404 | 404 | content='line1', message='commit1', vcs_type=backend.alias, |
|
405 | 405 | newfile=True) |
|
406 | 406 | assert repo1.scm_instance().commit_ids == [commit0.raw_id] |
|
407 | 407 | |
|
408 | 408 | # fork the repo1 |
|
409 | 409 | repo2 = backend.create_fork() |
|
410 | 410 | assert repo2.scm_instance().commit_ids == [commit0.raw_id] |
|
411 | 411 | |
|
412 | 412 | self.r2_id = repo2.repo_id |
|
413 | 413 | r2_name = repo2.repo_name |
|
414 | 414 | |
|
415 | 415 | commit1 = commit_change( |
|
416 | 416 | repo=r2_name, filename='file1-fork', |
|
417 | 417 | content='file1-line1-from-fork', message='commit1-fork', |
|
418 | 418 | vcs_type=backend.alias, parent=repo2.scm_instance()[-1], |
|
419 | 419 | newfile=True) |
|
420 | 420 | |
|
421 | 421 | commit2 = commit_change( |
|
422 | 422 | repo=r2_name, filename='file2-fork', |
|
423 | 423 | content='file2-line1-from-fork', message='commit2-fork', |
|
424 | 424 | vcs_type=backend.alias, parent=commit1, |
|
425 | 425 | newfile=True) |
|
426 | 426 | |
|
427 | 427 | commit_change( # commit 3 |
|
428 | 428 | repo=r2_name, filename='file3-fork', |
|
429 | 429 | content='file3-line1-from-fork', message='commit3-fork', |
|
430 | 430 | vcs_type=backend.alias, parent=commit2, newfile=True) |
|
431 | 431 | |
|
432 | 432 | # compare ! |
|
433 | 433 | commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME |
|
434 | 434 | commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME |
|
435 | 435 | |
|
436 | 436 | response = self.app.get( |
|
437 | 437 | route_path('repo_compare', |
|
438 | 438 | repo_name=r2_name, |
|
439 | 439 | source_ref_type="branch", source_ref=commit_id1, |
|
440 | 440 | target_ref_type="branch", target_ref=commit_id2, |
|
441 | 441 | params=dict(merge='1', target_repo=r1_name), |
|
442 | 442 | )) |
|
443 | 443 | |
|
444 | 444 | response.mustcontain('%s@%s' % (r2_name, commit_id1)) |
|
445 | 445 | response.mustcontain('%s@%s' % (r1_name, commit_id2)) |
|
446 | 446 | response.mustcontain('No files') |
|
447 | 447 | response.mustcontain('No commits in this compare') |
|
448 | 448 | |
|
449 | 449 | commit0 = commit_change( |
|
450 | 450 | repo=r1_name, filename='file2', |
|
451 | 451 | content='line1-added-after-fork', message='commit2-parent', |
|
452 | 452 | vcs_type=backend.alias, parent=None, newfile=True) |
|
453 | 453 | |
|
454 | 454 | # compare ! |
|
455 | 455 | response = self.app.get( |
|
456 | 456 | route_path('repo_compare', |
|
457 | 457 | repo_name=r2_name, |
|
458 | 458 | source_ref_type="branch", source_ref=commit_id1, |
|
459 | 459 | target_ref_type="branch", target_ref=commit_id2, |
|
460 | 460 | params=dict(merge='1', target_repo=r1_name), |
|
461 | 461 | )) |
|
462 | 462 | |
|
463 | 463 | response.mustcontain('%s@%s' % (r2_name, commit_id1)) |
|
464 | 464 | response.mustcontain('%s@%s' % (r1_name, commit_id2)) |
|
465 | 465 | |
|
466 | 466 | response.mustcontain("""commit2-parent""") |
|
467 | 467 | response.mustcontain("""line1-added-after-fork""") |
|
468 | 468 | compare_page = ComparePage(response) |
|
469 | 469 | compare_page.contains_change_summary(1, 1, 0) |
|
470 | 470 | |
|
471 | 471 | @pytest.mark.xfail_backends("svn") |
|
472 | 472 | def test_compare_commits(self, backend, xhr_header): |
|
473 | 473 | commit0 = backend.repo.get_commit(commit_idx=0) |
|
474 | 474 | commit1 = backend.repo.get_commit(commit_idx=1) |
|
475 | 475 | |
|
476 | 476 | response = self.app.get( |
|
477 | 477 | route_path('repo_compare', |
|
478 | 478 | repo_name=backend.repo_name, |
|
479 | 479 | source_ref_type="rev", source_ref=commit0.raw_id, |
|
480 | 480 | target_ref_type="rev", target_ref=commit1.raw_id, |
|
481 | 481 | params=dict(merge='1') |
|
482 | 482 | ), |
|
483 | 483 | extra_environ=xhr_header, ) |
|
484 | 484 | |
|
485 | 485 | # outgoing commits between those commits |
|
486 | 486 | compare_page = ComparePage(response) |
|
487 | 487 | compare_page.contains_commits(commits=[commit1]) |
|
488 | 488 | |
|
489 | 489 | def test_errors_when_comparing_unknown_source_repo(self, backend): |
|
490 | 490 | repo = backend.repo |
|
491 | 491 | badrepo = 'badrepo' |
|
492 | 492 | |
|
493 | 493 | response = self.app.get( |
|
494 | 494 | route_path('repo_compare', |
|
495 | 495 | repo_name=badrepo, |
|
496 | 496 | source_ref_type="rev", source_ref='tip', |
|
497 | 497 | target_ref_type="rev", target_ref='tip', |
|
498 | 498 | params=dict(merge='1', target_repo=repo.repo_name) |
|
499 | 499 | ), |
|
500 | 500 | status=404) |
|
501 | 501 | |
|
502 | 502 | def test_errors_when_comparing_unknown_target_repo(self, backend): |
|
503 | 503 | repo = backend.repo |
|
504 | 504 | badrepo = 'badrepo' |
|
505 | 505 | |
|
506 | 506 | response = self.app.get( |
|
507 | 507 | route_path('repo_compare', |
|
508 | 508 | repo_name=repo.repo_name, |
|
509 | 509 | source_ref_type="rev", source_ref='tip', |
|
510 | 510 | target_ref_type="rev", target_ref='tip', |
|
511 | 511 | params=dict(merge='1', target_repo=badrepo), |
|
512 | 512 | ), |
|
513 | 513 | status=302) |
|
514 | 514 | redirected = response.follow() |
|
515 | 515 | redirected.mustcontain( |
|
516 | 516 | 'Could not find the target repo: `{}`'.format(badrepo)) |
|
517 | 517 | |
|
518 | 518 | def test_compare_not_in_preview_mode(self, backend_stub): |
|
519 | 519 | commit0 = backend_stub.repo.get_commit(commit_idx=0) |
|
520 | 520 | commit1 = backend_stub.repo.get_commit(commit_idx=1) |
|
521 | 521 | |
|
522 | 522 | response = self.app.get( |
|
523 | 523 | route_path('repo_compare', |
|
524 | 524 | repo_name=backend_stub.repo_name, |
|
525 | 525 | source_ref_type="rev", source_ref=commit0.raw_id, |
|
526 | 526 | target_ref_type="rev", target_ref=commit1.raw_id, |
|
527 | 527 | )) |
|
528 | 528 | |
|
529 | 529 | # outgoing commits between those commits |
|
530 | 530 | compare_page = ComparePage(response) |
|
531 | 531 | compare_page.swap_is_visible() |
|
532 | 532 | compare_page.target_source_are_enabled() |
|
533 | 533 | |
|
534 | 534 | def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util): |
|
535 | 535 | orig = backend_hg.create_repo(number_of_commits=1) |
|
536 | 536 | fork = backend_hg.create_fork() |
|
537 | 537 | |
|
538 | 538 | settings_util.create_repo_rhodecode_ui( |
|
539 | 539 | orig, 'extensions', value='', key='largefiles', active=False) |
|
540 | 540 | settings_util.create_repo_rhodecode_ui( |
|
541 | 541 | fork, 'extensions', value='', key='largefiles', active=True) |
|
542 | 542 | |
|
543 | 543 | compare_module = ('rhodecode.lib.vcs.backends.hg.repository.' |
|
544 | 544 | 'MercurialRepository.compare') |
|
545 | 545 | with mock.patch(compare_module) as compare_mock: |
|
546 | 546 | compare_mock.side_effect = RepositoryRequirementError() |
|
547 | 547 | |
|
548 | 548 | response = self.app.get( |
|
549 | 549 | route_path('repo_compare', |
|
550 | 550 | repo_name=orig.repo_name, |
|
551 | 551 | source_ref_type="rev", source_ref="tip", |
|
552 | 552 | target_ref_type="rev", target_ref="tip", |
|
553 | 553 | params=dict(merge='1', target_repo=fork.repo_name), |
|
554 | 554 | ), |
|
555 | 555 | status=302) |
|
556 | 556 | |
|
557 | 557 | assert_session_flash( |
|
558 | 558 | response, |
|
559 | 559 | 'Could not compare repos with different large file settings') |
|
560 | 560 | |
|
561 | 561 | |
|
562 | 562 | @pytest.mark.usefixtures("autologin_user") |
|
563 | 563 | class TestCompareControllerSvn(object): |
|
564 | 564 | |
|
565 | 565 | def test_supports_references_with_path(self, app, backend_svn): |
|
566 | 566 | repo = backend_svn['svn-simple-layout'] |
|
567 | 567 | commit_id = repo.get_commit(commit_idx=-1).raw_id |
|
568 | 568 | response = app.get( |
|
569 | 569 | route_path('repo_compare', |
|
570 | 570 | repo_name=repo.repo_name, |
|
571 | 571 | source_ref_type="tag", |
|
572 | 572 | source_ref="%s@%s" % ('tags/v0.1', commit_id), |
|
573 | 573 | target_ref_type="tag", |
|
574 | 574 | target_ref="%s@%s" % ('tags/v0.2', commit_id), |
|
575 | 575 | params=dict(merge='1'), |
|
576 | 576 | ), |
|
577 | 577 | status=200) |
|
578 | 578 | |
|
579 | 579 | # Expecting no commits, since both paths are at the same revision |
|
580 | 580 | response.mustcontain('No commits in this compare') |
|
581 | 581 | |
|
582 | 582 | # Should find only one file changed when comparing those two tags |
|
583 | 583 | response.mustcontain('example.py') |
|
584 | 584 | compare_page = ComparePage(response) |
|
585 | 585 | compare_page.contains_change_summary(1, 5, 1) |
|
586 | 586 | |
|
587 | 587 | def test_shows_commits_if_different_ids(self, app, backend_svn): |
|
588 | 588 | repo = backend_svn['svn-simple-layout'] |
|
589 | 589 | source_id = repo.get_commit(commit_idx=-6).raw_id |
|
590 | 590 | target_id = repo.get_commit(commit_idx=-1).raw_id |
|
591 | 591 | response = app.get( |
|
592 | 592 | route_path('repo_compare', |
|
593 | 593 | repo_name=repo.repo_name, |
|
594 | 594 | source_ref_type="tag", |
|
595 | 595 | source_ref="%s@%s" % ('tags/v0.1', source_id), |
|
596 | 596 | target_ref_type="tag", |
|
597 | 597 | target_ref="%s@%s" % ('tags/v0.2', target_id), |
|
598 | 598 | params=dict(merge='1') |
|
599 | 599 | ), |
|
600 | 600 | status=200) |
|
601 | 601 | |
|
602 | 602 | # It should show commits |
|
603 | 603 | assert 'No commits in this compare' not in response.body |
|
604 | 604 | |
|
605 | 605 | # Should find only one file changed when comparing those two tags |
|
606 | 606 | response.mustcontain('example.py') |
|
607 | 607 | compare_page = ComparePage(response) |
|
608 | 608 | compare_page.contains_change_summary(1, 5, 1) |
|
609 | 609 | |
|
610 | 610 | |
|
611 | 611 | class ComparePage(AssertResponse): |
|
612 | 612 | """ |
|
613 | 613 | Abstracts the page template from the tests |
|
614 | 614 | """ |
|
615 | 615 | |
|
616 | 616 | def contains_file_links_and_anchors(self, files): |
|
617 | 617 | doc = lxml.html.fromstring(self.response.body) |
|
618 | 618 | for filename, file_id in files: |
|
619 | 619 | self.contains_one_anchor(file_id) |
|
620 | 620 | diffblock = doc.cssselect('[data-f-path="%s"]' % filename) |
|
621 | 621 | assert len(diffblock) == 2 |
|
622 |
|
|
|
622 | for lnk in diffblock[0].cssselect('a'): | |
|
623 | if 'permalink' in lnk.text: | |
|
624 | assert '#{}'.format(file_id) in lnk.attrib['href'] | |
|
625 | break | |
|
626 | else: | |
|
627 | pytest.fail('Unable to find permalink') | |
|
623 | 628 | |
|
624 | 629 | def contains_change_summary(self, files_changed, inserted, deleted): |
|
625 | 630 | template = ( |
|
626 | 631 | '{files_changed} file{plural} changed: ' |
|
627 | 632 | '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>') |
|
628 | 633 | self.response.mustcontain(template.format( |
|
629 | 634 | files_changed=files_changed, |
|
630 | 635 | plural="s" if files_changed > 1 else "", |
|
631 | 636 | inserted=inserted, |
|
632 | 637 | deleted=deleted)) |
|
633 | 638 | |
|
634 | 639 | def contains_commits(self, commits, ancestors=None): |
|
635 | 640 | response = self.response |
|
636 | 641 | |
|
637 | 642 | for commit in commits: |
|
638 | 643 | # Expecting to see the commit message in an element which |
|
639 | 644 | # has the ID "c-{commit.raw_id}" |
|
640 | 645 | self.element_contains('#c-' + commit.raw_id, commit.message) |
|
641 | 646 | self.contains_one_link( |
|
642 | 647 | 'r%s:%s' % (commit.idx, commit.short_id), |
|
643 | 648 | self._commit_url(commit)) |
|
644 | 649 | |
|
645 | 650 | if ancestors: |
|
646 | 651 | response.mustcontain('Ancestor') |
|
647 | 652 | for ancestor in ancestors: |
|
648 | 653 | self.contains_one_link( |
|
649 | 654 | ancestor.short_id, self._commit_url(ancestor)) |
|
650 | 655 | |
|
651 | 656 | def _commit_url(self, commit): |
|
652 | 657 | return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id) |
|
653 | 658 | |
|
654 | 659 | def swap_is_hidden(self): |
|
655 | 660 | assert '<a id="btn-swap"' not in self.response.text |
|
656 | 661 | |
|
657 | 662 | def swap_is_visible(self): |
|
658 | 663 | assert '<a id="btn-swap"' in self.response.text |
|
659 | 664 | |
|
660 | 665 | def target_source_are_disabled(self): |
|
661 | 666 | response = self.response |
|
662 | 667 | response.mustcontain("var enable_fields = false;") |
|
663 | 668 | response.mustcontain('.select2("enable", enable_fields)') |
|
664 | 669 | |
|
665 | 670 | def target_source_are_enabled(self): |
|
666 | 671 | response = self.response |
|
667 | 672 | response.mustcontain("var enable_fields = true;") |
@@ -1,1652 +1,1658 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import mock |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | import rhodecode |
|
24 | 24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason |
|
25 | 25 | from rhodecode.lib.vcs.nodes import FileNode |
|
26 | 26 | from rhodecode.lib import helpers as h |
|
27 | 27 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
28 | 28 | from rhodecode.model.db import ( |
|
29 | 29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) |
|
30 | 30 | from rhodecode.model.meta import Session |
|
31 | 31 | from rhodecode.model.pull_request import PullRequestModel |
|
32 | 32 | from rhodecode.model.user import UserModel |
|
33 | 33 | from rhodecode.model.comment import CommentsModel |
|
34 | 34 | from rhodecode.tests import ( |
|
35 | 35 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | def route_path(name, params=None, **kwargs): |
|
39 | 39 | import urllib |
|
40 | 40 | |
|
41 | 41 | base_url = { |
|
42 | 42 | 'repo_changelog': '/{repo_name}/changelog', |
|
43 | 43 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', |
|
44 | 44 | 'repo_commits': '/{repo_name}/commits', |
|
45 | 45 | 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}', |
|
46 | 46 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', |
|
47 | 47 | 'pullrequest_show_all': '/{repo_name}/pull-request', |
|
48 | 48 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', |
|
49 | 49 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
50 | 50 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', |
|
51 | 51 | 'pullrequest_new': '/{repo_name}/pull-request/new', |
|
52 | 52 | 'pullrequest_create': '/{repo_name}/pull-request/create', |
|
53 | 53 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', |
|
54 | 54 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', |
|
55 | 55 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', |
|
56 | 56 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', |
|
57 | 57 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', |
|
58 | 58 | 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit', |
|
59 | 59 | }[name].format(**kwargs) |
|
60 | 60 | |
|
61 | 61 | if params: |
|
62 | 62 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
63 | 63 | return base_url |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
67 | 67 | @pytest.mark.backends("git", "hg") |
|
68 | 68 | class TestPullrequestsView(object): |
|
69 | 69 | |
|
70 | 70 | def test_index(self, backend): |
|
71 | 71 | self.app.get(route_path( |
|
72 | 72 | 'pullrequest_new', |
|
73 | 73 | repo_name=backend.repo_name)) |
|
74 | 74 | |
|
75 | 75 | def test_option_menu_create_pull_request_exists(self, backend): |
|
76 | 76 | repo_name = backend.repo_name |
|
77 | 77 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) |
|
78 | 78 | |
|
79 | 79 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( |
|
80 | 80 | 'pullrequest_new', repo_name=repo_name) |
|
81 | 81 | response.mustcontain(create_pr_link) |
|
82 | 82 | |
|
83 | 83 | def test_create_pr_form_with_raw_commit_id(self, backend): |
|
84 | 84 | repo = backend.repo |
|
85 | 85 | |
|
86 | 86 | self.app.get( |
|
87 | 87 | route_path('pullrequest_new', repo_name=repo.repo_name, |
|
88 | 88 | commit=repo.get_commit().raw_id), |
|
89 | 89 | status=200) |
|
90 | 90 | |
|
91 | 91 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) |
|
92 | 92 | @pytest.mark.parametrize('range_diff', ["0", "1"]) |
|
93 | 93 | def test_show(self, pr_util, pr_merge_enabled, range_diff): |
|
94 | 94 | pull_request = pr_util.create_pull_request( |
|
95 | 95 | mergeable=pr_merge_enabled, enable_notifications=False) |
|
96 | 96 | |
|
97 | 97 | response = self.app.get(route_path( |
|
98 | 98 | 'pullrequest_show', |
|
99 | 99 | repo_name=pull_request.target_repo.scm_instance().name, |
|
100 | 100 | pull_request_id=pull_request.pull_request_id, |
|
101 | 101 | params={'range-diff': range_diff})) |
|
102 | 102 | |
|
103 | 103 | for commit_id in pull_request.revisions: |
|
104 | 104 | response.mustcontain(commit_id) |
|
105 | 105 | |
|
106 | 106 | response.mustcontain(pull_request.target_ref_parts.type) |
|
107 | 107 | response.mustcontain(pull_request.target_ref_parts.name) |
|
108 | 108 | |
|
109 | 109 | response.mustcontain('class="pull-request-merge"') |
|
110 | 110 | |
|
111 | 111 | if pr_merge_enabled: |
|
112 | 112 | response.mustcontain('Pull request reviewer approval is pending') |
|
113 | 113 | else: |
|
114 | 114 | response.mustcontain('Server-side pull request merging is disabled.') |
|
115 | 115 | |
|
116 | 116 | if range_diff == "1": |
|
117 | 117 | response.mustcontain('Turn off: Show the diff as commit range') |
|
118 | 118 | |
|
119 | 119 | def test_show_versions_of_pr(self, backend, csrf_token): |
|
120 | 120 | commits = [ |
|
121 | 121 | {'message': 'initial-commit', |
|
122 | 122 | 'added': [FileNode('test-file.txt', 'LINE1\n')]}, |
|
123 | 123 | |
|
124 | 124 | {'message': 'commit-1', |
|
125 | 125 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]}, |
|
126 | 126 | # Above is the initial version of PR that changes a single line |
|
127 | 127 | |
|
128 | 128 | # from now on we'll add 3x commit adding a nother line on each step |
|
129 | 129 | {'message': 'commit-2', |
|
130 | 130 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]}, |
|
131 | 131 | |
|
132 | 132 | {'message': 'commit-3', |
|
133 | 133 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]}, |
|
134 | 134 | |
|
135 | 135 | {'message': 'commit-4', |
|
136 | 136 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]}, |
|
137 | 137 | ] |
|
138 | 138 | |
|
139 | 139 | commit_ids = backend.create_master_repo(commits) |
|
140 | 140 | target = backend.create_repo(heads=['initial-commit']) |
|
141 | 141 | source = backend.create_repo(heads=['commit-1']) |
|
142 | 142 | source_repo_name = source.repo_name |
|
143 | 143 | target_repo_name = target.repo_name |
|
144 | 144 | |
|
145 | 145 | target_ref = 'branch:{branch}:{commit_id}'.format( |
|
146 | 146 | branch=backend.default_branch_name, commit_id=commit_ids['initial-commit']) |
|
147 | 147 | source_ref = 'branch:{branch}:{commit_id}'.format( |
|
148 | 148 | branch=backend.default_branch_name, commit_id=commit_ids['commit-1']) |
|
149 | 149 | |
|
150 | 150 | response = self.app.post( |
|
151 | 151 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
152 | 152 | [ |
|
153 |
('source_repo', source |
|
|
153 | ('source_repo', source_repo_name), | |
|
154 | 154 | ('source_ref', source_ref), |
|
155 |
('target_repo', target |
|
|
155 | ('target_repo', target_repo_name), | |
|
156 | 156 | ('target_ref', target_ref), |
|
157 | 157 | ('common_ancestor', commit_ids['initial-commit']), |
|
158 | 158 | ('pullrequest_title', 'Title'), |
|
159 | 159 | ('pullrequest_desc', 'Description'), |
|
160 | 160 | ('description_renderer', 'markdown'), |
|
161 | 161 | ('__start__', 'review_members:sequence'), |
|
162 | 162 | ('__start__', 'reviewer:mapping'), |
|
163 | 163 | ('user_id', '1'), |
|
164 | 164 | ('__start__', 'reasons:sequence'), |
|
165 | 165 | ('reason', 'Some reason'), |
|
166 | 166 | ('__end__', 'reasons:sequence'), |
|
167 | 167 | ('__start__', 'rules:sequence'), |
|
168 | 168 | ('__end__', 'rules:sequence'), |
|
169 | 169 | ('mandatory', 'False'), |
|
170 | 170 | ('__end__', 'reviewer:mapping'), |
|
171 | 171 | ('__end__', 'review_members:sequence'), |
|
172 | 172 | ('__start__', 'revisions:sequence'), |
|
173 | 173 | ('revisions', commit_ids['commit-1']), |
|
174 | 174 | ('__end__', 'revisions:sequence'), |
|
175 | 175 | ('user', ''), |
|
176 | 176 | ('csrf_token', csrf_token), |
|
177 | 177 | ], |
|
178 | 178 | status=302) |
|
179 | 179 | |
|
180 | 180 | location = response.headers['Location'] |
|
181 | 181 | |
|
182 | 182 | pull_request_id = location.rsplit('/', 1)[1] |
|
183 | 183 | assert pull_request_id != 'new' |
|
184 | 184 | pull_request = PullRequest.get(int(pull_request_id)) |
|
185 | 185 | |
|
186 | 186 | pull_request_id = pull_request.pull_request_id |
|
187 | 187 | |
|
188 | 188 | # Show initial version of PR |
|
189 | 189 | response = self.app.get( |
|
190 | 190 | route_path('pullrequest_show', |
|
191 | 191 | repo_name=target_repo_name, |
|
192 | 192 | pull_request_id=pull_request_id)) |
|
193 | 193 | |
|
194 | 194 | response.mustcontain('commit-1') |
|
195 | 195 | response.mustcontain(no=['commit-2']) |
|
196 | 196 | response.mustcontain(no=['commit-3']) |
|
197 | 197 | response.mustcontain(no=['commit-4']) |
|
198 | 198 | |
|
199 | 199 | response.mustcontain('cb-addition"></span><span>LINE2</span>') |
|
200 | 200 | response.mustcontain(no=['LINE3']) |
|
201 | 201 | response.mustcontain(no=['LINE4']) |
|
202 | 202 | response.mustcontain(no=['LINE5']) |
|
203 | 203 | |
|
204 | 204 | # update PR #1 |
|
205 | 205 | source_repo = Repository.get_by_repo_name(source_repo_name) |
|
206 | 206 | backend.pull_heads(source_repo, heads=['commit-2']) |
|
207 | 207 | response = self.app.post( |
|
208 | 208 | route_path('pullrequest_update', |
|
209 | 209 | repo_name=target_repo_name, pull_request_id=pull_request_id), |
|
210 | 210 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
211 | 211 | |
|
212 | 212 | # update PR #2 |
|
213 | 213 | source_repo = Repository.get_by_repo_name(source_repo_name) |
|
214 | 214 | backend.pull_heads(source_repo, heads=['commit-3']) |
|
215 | 215 | response = self.app.post( |
|
216 | 216 | route_path('pullrequest_update', |
|
217 | 217 | repo_name=target_repo_name, pull_request_id=pull_request_id), |
|
218 | 218 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
219 | 219 | |
|
220 | 220 | # update PR #3 |
|
221 | 221 | source_repo = Repository.get_by_repo_name(source_repo_name) |
|
222 | 222 | backend.pull_heads(source_repo, heads=['commit-4']) |
|
223 | 223 | response = self.app.post( |
|
224 | 224 | route_path('pullrequest_update', |
|
225 | 225 | repo_name=target_repo_name, pull_request_id=pull_request_id), |
|
226 | 226 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
227 | 227 | |
|
228 | 228 | # Show final version ! |
|
229 | 229 | response = self.app.get( |
|
230 | 230 | route_path('pullrequest_show', |
|
231 | 231 | repo_name=target_repo_name, |
|
232 | 232 | pull_request_id=pull_request_id)) |
|
233 | 233 | |
|
234 | 234 | # 3 updates, and the latest == 4 |
|
235 | 235 | response.mustcontain('4 versions available for this pull request') |
|
236 | 236 | response.mustcontain(no=['rhodecode diff rendering error']) |
|
237 | 237 | |
|
238 | 238 | # initial show must have 3 commits, and 3 adds |
|
239 | 239 | response.mustcontain('commit-1') |
|
240 | 240 | response.mustcontain('commit-2') |
|
241 | 241 | response.mustcontain('commit-3') |
|
242 | 242 | response.mustcontain('commit-4') |
|
243 | 243 | |
|
244 | 244 | response.mustcontain('cb-addition"></span><span>LINE2</span>') |
|
245 | 245 | response.mustcontain('cb-addition"></span><span>LINE3</span>') |
|
246 | 246 | response.mustcontain('cb-addition"></span><span>LINE4</span>') |
|
247 | 247 | response.mustcontain('cb-addition"></span><span>LINE5</span>') |
|
248 | 248 | |
|
249 | 249 | # fetch versions |
|
250 | 250 | pr = PullRequest.get(pull_request_id) |
|
251 | 251 | versions = [x.pull_request_version_id for x in pr.versions.all()] |
|
252 | 252 | assert len(versions) == 3 |
|
253 | 253 | |
|
254 | 254 | # show v1,v2,v3,v4 |
|
255 | 255 | def cb_line(text): |
|
256 | 256 | return 'cb-addition"></span><span>{}</span>'.format(text) |
|
257 | 257 | |
|
258 | 258 | def cb_context(text): |
|
259 | 259 | return '<span class="cb-code"><span class="cb-action cb-context">' \ |
|
260 | 260 | '</span><span>{}</span></span>'.format(text) |
|
261 | 261 | |
|
262 | 262 | commit_tests = { |
|
263 | 263 | # in response, not in response |
|
264 | 264 | 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']), |
|
265 | 265 | 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']), |
|
266 | 266 | 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']), |
|
267 | 267 | 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []), |
|
268 | 268 | } |
|
269 | 269 | diff_tests = { |
|
270 | 270 | 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']), |
|
271 | 271 | 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']), |
|
272 | 272 | 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']), |
|
273 | 273 | 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []), |
|
274 | 274 | } |
|
275 | 275 | for idx, ver in enumerate(versions, 1): |
|
276 | 276 | |
|
277 | 277 | response = self.app.get( |
|
278 | 278 | route_path('pullrequest_show', |
|
279 | 279 | repo_name=target_repo_name, |
|
280 | 280 | pull_request_id=pull_request_id, |
|
281 | 281 | params={'version': ver})) |
|
282 | 282 | |
|
283 | 283 | response.mustcontain(no=['rhodecode diff rendering error']) |
|
284 | 284 | response.mustcontain('Showing changes at v{}'.format(idx)) |
|
285 | 285 | |
|
286 | 286 | yes, no = commit_tests[idx] |
|
287 | 287 | for y in yes: |
|
288 | 288 | response.mustcontain(y) |
|
289 | 289 | for n in no: |
|
290 | 290 | response.mustcontain(no=n) |
|
291 | 291 | |
|
292 | 292 | yes, no = diff_tests[idx] |
|
293 | 293 | for y in yes: |
|
294 | 294 | response.mustcontain(cb_line(y)) |
|
295 | 295 | for n in no: |
|
296 | 296 | response.mustcontain(no=n) |
|
297 | 297 | |
|
298 | 298 | # show diff between versions |
|
299 | 299 | diff_compare_tests = { |
|
300 | 300 | 1: (['LINE3'], ['LINE1', 'LINE2']), |
|
301 | 301 | 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']), |
|
302 | 302 | 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']), |
|
303 | 303 | } |
|
304 | 304 | for idx, ver in enumerate(versions, 1): |
|
305 | 305 | adds, context = diff_compare_tests[idx] |
|
306 | 306 | |
|
307 | 307 | to_ver = ver+1 |
|
308 | 308 | if idx == 3: |
|
309 | 309 | to_ver = 'latest' |
|
310 | 310 | |
|
311 | 311 | response = self.app.get( |
|
312 | 312 | route_path('pullrequest_show', |
|
313 | 313 | repo_name=target_repo_name, |
|
314 | 314 | pull_request_id=pull_request_id, |
|
315 | 315 | params={'from_version': versions[0], 'version': to_ver})) |
|
316 | 316 | |
|
317 | 317 | response.mustcontain(no=['rhodecode diff rendering error']) |
|
318 | 318 | |
|
319 | 319 | for a in adds: |
|
320 | 320 | response.mustcontain(cb_line(a)) |
|
321 | 321 | for c in context: |
|
322 | 322 | response.mustcontain(cb_context(c)) |
|
323 | 323 | |
|
324 | 324 | # test version v2 -> v3 |
|
325 | 325 | response = self.app.get( |
|
326 | 326 | route_path('pullrequest_show', |
|
327 | 327 | repo_name=target_repo_name, |
|
328 | 328 | pull_request_id=pull_request_id, |
|
329 | 329 | params={'from_version': versions[1], 'version': versions[2]})) |
|
330 | 330 | |
|
331 | 331 | response.mustcontain(cb_context('LINE1')) |
|
332 | 332 | response.mustcontain(cb_context('LINE2')) |
|
333 | 333 | response.mustcontain(cb_context('LINE3')) |
|
334 | 334 | response.mustcontain(cb_line('LINE4')) |
|
335 | 335 | |
|
336 | 336 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): |
|
337 | 337 | # Logout |
|
338 | 338 | response = self.app.post( |
|
339 | 339 | h.route_path('logout'), |
|
340 | 340 | params={'csrf_token': csrf_token}) |
|
341 | 341 | # Login as regular user |
|
342 | 342 | response = self.app.post(h.route_path('login'), |
|
343 | 343 | {'username': TEST_USER_REGULAR_LOGIN, |
|
344 | 344 | 'password': 'test12'}) |
|
345 | 345 | |
|
346 | 346 | pull_request = pr_util.create_pull_request( |
|
347 | 347 | author=TEST_USER_REGULAR_LOGIN) |
|
348 | 348 | |
|
349 | 349 | response = self.app.get(route_path( |
|
350 | 350 | 'pullrequest_show', |
|
351 | 351 | repo_name=pull_request.target_repo.scm_instance().name, |
|
352 | 352 | pull_request_id=pull_request.pull_request_id)) |
|
353 | 353 | |
|
354 | 354 | response.mustcontain('Server-side pull request merging is disabled.') |
|
355 | 355 | |
|
356 | 356 | assert_response = response.assert_response() |
|
357 | 357 | # for regular user without a merge permissions, we don't see it |
|
358 | 358 | assert_response.no_element_exists('#close-pull-request-action') |
|
359 | 359 | |
|
360 | 360 | user_util.grant_user_permission_to_repo( |
|
361 | 361 | pull_request.target_repo, |
|
362 | 362 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), |
|
363 | 363 | 'repository.write') |
|
364 | 364 | response = self.app.get(route_path( |
|
365 | 365 | 'pullrequest_show', |
|
366 | 366 | repo_name=pull_request.target_repo.scm_instance().name, |
|
367 | 367 | pull_request_id=pull_request.pull_request_id)) |
|
368 | 368 | |
|
369 | 369 | response.mustcontain('Server-side pull request merging is disabled.') |
|
370 | 370 | |
|
371 | 371 | assert_response = response.assert_response() |
|
372 | 372 | # now regular user has a merge permissions, we have CLOSE button |
|
373 | 373 | assert_response.one_element_exists('#close-pull-request-action') |
|
374 | 374 | |
|
375 | 375 | def test_show_invalid_commit_id(self, pr_util): |
|
376 | 376 | # Simulating invalid revisions which will cause a lookup error |
|
377 | 377 | pull_request = pr_util.create_pull_request() |
|
378 | 378 | pull_request.revisions = ['invalid'] |
|
379 | 379 | Session().add(pull_request) |
|
380 | 380 | Session().commit() |
|
381 | 381 | |
|
382 | 382 | response = self.app.get(route_path( |
|
383 | 383 | 'pullrequest_show', |
|
384 | 384 | repo_name=pull_request.target_repo.scm_instance().name, |
|
385 | 385 | pull_request_id=pull_request.pull_request_id)) |
|
386 | 386 | |
|
387 | 387 | for commit_id in pull_request.revisions: |
|
388 | 388 | response.mustcontain(commit_id) |
|
389 | 389 | |
|
390 | 390 | def test_show_invalid_source_reference(self, pr_util): |
|
391 | 391 | pull_request = pr_util.create_pull_request() |
|
392 | 392 | pull_request.source_ref = 'branch:b:invalid' |
|
393 | 393 | Session().add(pull_request) |
|
394 | 394 | Session().commit() |
|
395 | 395 | |
|
396 | 396 | self.app.get(route_path( |
|
397 | 397 | 'pullrequest_show', |
|
398 | 398 | repo_name=pull_request.target_repo.scm_instance().name, |
|
399 | 399 | pull_request_id=pull_request.pull_request_id)) |
|
400 | 400 | |
|
401 | 401 | def test_edit_title_description(self, pr_util, csrf_token): |
|
402 | 402 | pull_request = pr_util.create_pull_request() |
|
403 | 403 | pull_request_id = pull_request.pull_request_id |
|
404 | 404 | |
|
405 | 405 | response = self.app.post( |
|
406 | 406 | route_path('pullrequest_update', |
|
407 | 407 | repo_name=pull_request.target_repo.repo_name, |
|
408 | 408 | pull_request_id=pull_request_id), |
|
409 | 409 | params={ |
|
410 | 410 | 'edit_pull_request': 'true', |
|
411 | 411 | 'title': 'New title', |
|
412 | 412 | 'description': 'New description', |
|
413 | 413 | 'csrf_token': csrf_token}) |
|
414 | 414 | |
|
415 | 415 | assert_session_flash( |
|
416 | 416 | response, u'Pull request title & description updated.', |
|
417 | 417 | category='success') |
|
418 | 418 | |
|
419 | 419 | pull_request = PullRequest.get(pull_request_id) |
|
420 | 420 | assert pull_request.title == 'New title' |
|
421 | 421 | assert pull_request.description == 'New description' |
|
422 | 422 | |
|
423 | 423 | def test_edit_title_description_closed(self, pr_util, csrf_token): |
|
424 | 424 | pull_request = pr_util.create_pull_request() |
|
425 | 425 | pull_request_id = pull_request.pull_request_id |
|
426 | 426 | repo_name = pull_request.target_repo.repo_name |
|
427 | 427 | pr_util.close() |
|
428 | 428 | |
|
429 | 429 | response = self.app.post( |
|
430 | 430 | route_path('pullrequest_update', |
|
431 | 431 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
432 | 432 | params={ |
|
433 | 433 | 'edit_pull_request': 'true', |
|
434 | 434 | 'title': 'New title', |
|
435 | 435 | 'description': 'New description', |
|
436 | 436 | 'csrf_token': csrf_token}, status=200) |
|
437 | 437 | assert_session_flash( |
|
438 | 438 | response, u'Cannot update closed pull requests.', |
|
439 | 439 | category='error') |
|
440 | 440 | |
|
441 | 441 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
|
442 | 442 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason |
|
443 | 443 | |
|
444 | 444 | pull_request = pr_util.create_pull_request() |
|
445 | 445 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' |
|
446 | 446 | Session().add(pull_request) |
|
447 | 447 | Session().commit() |
|
448 | 448 | |
|
449 | 449 | pull_request_id = pull_request.pull_request_id |
|
450 | 450 | |
|
451 | 451 | response = self.app.post( |
|
452 | 452 | route_path('pullrequest_update', |
|
453 | 453 | repo_name=pull_request.target_repo.repo_name, |
|
454 | 454 | pull_request_id=pull_request_id), |
|
455 | 455 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
456 | 456 | |
|
457 | 457 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ |
|
458 | 458 | UpdateFailureReason.MISSING_SOURCE_REF]) |
|
459 | 459 | assert_session_flash(response, expected_msg, category='error') |
|
460 | 460 | |
|
461 | 461 | def test_missing_target_reference(self, pr_util, csrf_token): |
|
462 | 462 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
463 | 463 | pull_request = pr_util.create_pull_request( |
|
464 | 464 | approved=True, mergeable=True) |
|
465 | 465 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' |
|
466 | 466 | pull_request.target_ref = unicode_reference |
|
467 | 467 | Session().add(pull_request) |
|
468 | 468 | Session().commit() |
|
469 | 469 | |
|
470 | 470 | pull_request_id = pull_request.pull_request_id |
|
471 | 471 | pull_request_url = route_path( |
|
472 | 472 | 'pullrequest_show', |
|
473 | 473 | repo_name=pull_request.target_repo.repo_name, |
|
474 | 474 | pull_request_id=pull_request_id) |
|
475 | 475 | |
|
476 | 476 | response = self.app.get(pull_request_url) |
|
477 | 477 | target_ref_id = 'invalid-branch' |
|
478 | 478 | merge_resp = MergeResponse( |
|
479 | 479 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, |
|
480 | 480 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) |
|
481 | 481 | response.assert_response().element_contains( |
|
482 | 482 | 'div[data-role="merge-message"]', merge_resp.merge_status_message) |
|
483 | 483 | |
|
484 | 484 | def test_comment_and_close_pull_request_custom_message_approved( |
|
485 | 485 | self, pr_util, csrf_token, xhr_header): |
|
486 | 486 | |
|
487 | 487 | pull_request = pr_util.create_pull_request(approved=True) |
|
488 | 488 | pull_request_id = pull_request.pull_request_id |
|
489 | 489 | author = pull_request.user_id |
|
490 | 490 | repo = pull_request.target_repo.repo_id |
|
491 | 491 | |
|
492 | 492 | self.app.post( |
|
493 | 493 | route_path('pullrequest_comment_create', |
|
494 | 494 | repo_name=pull_request.target_repo.scm_instance().name, |
|
495 | 495 | pull_request_id=pull_request_id), |
|
496 | 496 | params={ |
|
497 | 497 | 'close_pull_request': '1', |
|
498 | 498 | 'text': 'Closing a PR', |
|
499 | 499 | 'csrf_token': csrf_token}, |
|
500 | 500 | extra_environ=xhr_header,) |
|
501 | 501 | |
|
502 | 502 | journal = UserLog.query()\ |
|
503 | 503 | .filter(UserLog.user_id == author)\ |
|
504 | 504 | .filter(UserLog.repository_id == repo) \ |
|
505 | 505 | .order_by(UserLog.user_log_id.asc()) \ |
|
506 | 506 | .all() |
|
507 | 507 | assert journal[-1].action == 'repo.pull_request.close' |
|
508 | 508 | |
|
509 | 509 | pull_request = PullRequest.get(pull_request_id) |
|
510 | 510 | assert pull_request.is_closed() |
|
511 | 511 | |
|
512 | 512 | status = ChangesetStatusModel().get_status( |
|
513 | 513 | pull_request.source_repo, pull_request=pull_request) |
|
514 | 514 | assert status == ChangesetStatus.STATUS_APPROVED |
|
515 | 515 | comments = ChangesetComment().query() \ |
|
516 | 516 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
517 | 517 | .order_by(ChangesetComment.comment_id.asc())\ |
|
518 | 518 | .all() |
|
519 | 519 | assert comments[-1].text == 'Closing a PR' |
|
520 | 520 | |
|
521 | 521 | def test_comment_force_close_pull_request_rejected( |
|
522 | 522 | self, pr_util, csrf_token, xhr_header): |
|
523 | 523 | pull_request = pr_util.create_pull_request() |
|
524 | 524 | pull_request_id = pull_request.pull_request_id |
|
525 | 525 | PullRequestModel().update_reviewers( |
|
526 | 526 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], |
|
527 | 527 | pull_request.author) |
|
528 | 528 | author = pull_request.user_id |
|
529 | 529 | repo = pull_request.target_repo.repo_id |
|
530 | 530 | |
|
531 | 531 | self.app.post( |
|
532 | 532 | route_path('pullrequest_comment_create', |
|
533 | 533 | repo_name=pull_request.target_repo.scm_instance().name, |
|
534 | 534 | pull_request_id=pull_request_id), |
|
535 | 535 | params={ |
|
536 | 536 | 'close_pull_request': '1', |
|
537 | 537 | 'csrf_token': csrf_token}, |
|
538 | 538 | extra_environ=xhr_header) |
|
539 | 539 | |
|
540 | 540 | pull_request = PullRequest.get(pull_request_id) |
|
541 | 541 | |
|
542 | 542 | journal = UserLog.query()\ |
|
543 | 543 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ |
|
544 | 544 | .order_by(UserLog.user_log_id.asc()) \ |
|
545 | 545 | .all() |
|
546 | 546 | assert journal[-1].action == 'repo.pull_request.close' |
|
547 | 547 | |
|
548 | 548 | # check only the latest status, not the review status |
|
549 | 549 | status = ChangesetStatusModel().get_status( |
|
550 | 550 | pull_request.source_repo, pull_request=pull_request) |
|
551 | 551 | assert status == ChangesetStatus.STATUS_REJECTED |
|
552 | 552 | |
|
553 | 553 | def test_comment_and_close_pull_request( |
|
554 | 554 | self, pr_util, csrf_token, xhr_header): |
|
555 | 555 | pull_request = pr_util.create_pull_request() |
|
556 | 556 | pull_request_id = pull_request.pull_request_id |
|
557 | 557 | |
|
558 | 558 | response = self.app.post( |
|
559 | 559 | route_path('pullrequest_comment_create', |
|
560 | 560 | repo_name=pull_request.target_repo.scm_instance().name, |
|
561 | 561 | pull_request_id=pull_request.pull_request_id), |
|
562 | 562 | params={ |
|
563 | 563 | 'close_pull_request': 'true', |
|
564 | 564 | 'csrf_token': csrf_token}, |
|
565 | 565 | extra_environ=xhr_header) |
|
566 | 566 | |
|
567 | 567 | assert response.json |
|
568 | 568 | |
|
569 | 569 | pull_request = PullRequest.get(pull_request_id) |
|
570 | 570 | assert pull_request.is_closed() |
|
571 | 571 | |
|
572 | 572 | # check only the latest status, not the review status |
|
573 | 573 | status = ChangesetStatusModel().get_status( |
|
574 | 574 | pull_request.source_repo, pull_request=pull_request) |
|
575 | 575 | assert status == ChangesetStatus.STATUS_REJECTED |
|
576 | 576 | |
|
577 | 577 | def test_comment_and_close_pull_request_try_edit_comment( |
|
578 | 578 | self, pr_util, csrf_token, xhr_header |
|
579 | 579 | ): |
|
580 | 580 | pull_request = pr_util.create_pull_request() |
|
581 | 581 | pull_request_id = pull_request.pull_request_id |
|
582 | 582 | target_scm = pull_request.target_repo.scm_instance() |
|
583 | 583 | target_scm_name = target_scm.name |
|
584 | 584 | |
|
585 | 585 | response = self.app.post( |
|
586 | 586 | route_path( |
|
587 | 587 | 'pullrequest_comment_create', |
|
588 | 588 | repo_name=target_scm_name, |
|
589 | 589 | pull_request_id=pull_request_id, |
|
590 | 590 | ), |
|
591 | 591 | params={ |
|
592 | 592 | 'close_pull_request': 'true', |
|
593 | 593 | 'csrf_token': csrf_token, |
|
594 | 594 | }, |
|
595 | 595 | extra_environ=xhr_header) |
|
596 | 596 | |
|
597 | 597 | assert response.json |
|
598 | 598 | |
|
599 | 599 | pull_request = PullRequest.get(pull_request_id) |
|
600 | 600 | target_scm = pull_request.target_repo.scm_instance() |
|
601 | 601 | target_scm_name = target_scm.name |
|
602 | 602 | assert pull_request.is_closed() |
|
603 | 603 | |
|
604 | 604 | # check only the latest status, not the review status |
|
605 | 605 | status = ChangesetStatusModel().get_status( |
|
606 | 606 | pull_request.source_repo, pull_request=pull_request) |
|
607 | 607 | assert status == ChangesetStatus.STATUS_REJECTED |
|
608 | 608 | |
|
609 | 609 | comment_id = response.json.get('comment_id', None) |
|
610 | 610 | test_text = 'test' |
|
611 | 611 | response = self.app.post( |
|
612 | 612 | route_path( |
|
613 | 613 | 'pullrequest_comment_edit', |
|
614 | 614 | repo_name=target_scm_name, |
|
615 | 615 | pull_request_id=pull_request_id, |
|
616 | 616 | comment_id=comment_id, |
|
617 | 617 | ), |
|
618 | 618 | extra_environ=xhr_header, |
|
619 | 619 | params={ |
|
620 | 620 | 'csrf_token': csrf_token, |
|
621 | 621 | 'text': test_text, |
|
622 | 622 | }, |
|
623 | 623 | status=403, |
|
624 | 624 | ) |
|
625 | 625 | assert response.status_int == 403 |
|
626 | 626 | |
|
627 | 627 | def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header): |
|
628 | 628 | pull_request = pr_util.create_pull_request() |
|
629 | 629 | target_scm = pull_request.target_repo.scm_instance() |
|
630 | 630 | target_scm_name = target_scm.name |
|
631 | 631 | |
|
632 | 632 | response = self.app.post( |
|
633 | 633 | route_path( |
|
634 | 634 | 'pullrequest_comment_create', |
|
635 | 635 | repo_name=target_scm_name, |
|
636 | 636 | pull_request_id=pull_request.pull_request_id), |
|
637 | 637 | params={ |
|
638 | 638 | 'csrf_token': csrf_token, |
|
639 | 639 | 'text': 'init', |
|
640 | 640 | }, |
|
641 | 641 | extra_environ=xhr_header, |
|
642 | 642 | ) |
|
643 | 643 | assert response.json |
|
644 | 644 | |
|
645 | 645 | comment_id = response.json.get('comment_id', None) |
|
646 | 646 | assert comment_id |
|
647 | 647 | test_text = 'test' |
|
648 | 648 | self.app.post( |
|
649 | 649 | route_path( |
|
650 | 650 | 'pullrequest_comment_edit', |
|
651 | 651 | repo_name=target_scm_name, |
|
652 | 652 | pull_request_id=pull_request.pull_request_id, |
|
653 | 653 | comment_id=comment_id, |
|
654 | 654 | ), |
|
655 | 655 | extra_environ=xhr_header, |
|
656 | 656 | params={ |
|
657 | 657 | 'csrf_token': csrf_token, |
|
658 | 658 | 'text': test_text, |
|
659 | 659 | 'version': '0', |
|
660 | 660 | }, |
|
661 | 661 | |
|
662 | 662 | ) |
|
663 | 663 | text_form_db = ChangesetComment.query().filter( |
|
664 | 664 | ChangesetComment.comment_id == comment_id).first().text |
|
665 | 665 | assert test_text == text_form_db |
|
666 | 666 | |
|
667 | 667 | def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header): |
|
668 | 668 | pull_request = pr_util.create_pull_request() |
|
669 | 669 | target_scm = pull_request.target_repo.scm_instance() |
|
670 | 670 | target_scm_name = target_scm.name |
|
671 | 671 | |
|
672 | 672 | response = self.app.post( |
|
673 | 673 | route_path( |
|
674 | 674 | 'pullrequest_comment_create', |
|
675 | 675 | repo_name=target_scm_name, |
|
676 | 676 | pull_request_id=pull_request.pull_request_id), |
|
677 | 677 | params={ |
|
678 | 678 | 'csrf_token': csrf_token, |
|
679 | 679 | 'text': 'init', |
|
680 | 680 | }, |
|
681 | 681 | extra_environ=xhr_header, |
|
682 | 682 | ) |
|
683 | 683 | assert response.json |
|
684 | 684 | |
|
685 | 685 | comment_id = response.json.get('comment_id', None) |
|
686 | 686 | assert comment_id |
|
687 | 687 | test_text = 'init' |
|
688 | 688 | response = self.app.post( |
|
689 | 689 | route_path( |
|
690 | 690 | 'pullrequest_comment_edit', |
|
691 | 691 | repo_name=target_scm_name, |
|
692 | 692 | pull_request_id=pull_request.pull_request_id, |
|
693 | 693 | comment_id=comment_id, |
|
694 | 694 | ), |
|
695 | 695 | extra_environ=xhr_header, |
|
696 | 696 | params={ |
|
697 | 697 | 'csrf_token': csrf_token, |
|
698 | 698 | 'text': test_text, |
|
699 | 699 | 'version': '0', |
|
700 | 700 | }, |
|
701 | 701 | status=404, |
|
702 | 702 | |
|
703 | 703 | ) |
|
704 | 704 | assert response.status_int == 404 |
|
705 | 705 | |
|
706 | 706 | def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header): |
|
707 | 707 | pull_request = pr_util.create_pull_request() |
|
708 | 708 | target_scm = pull_request.target_repo.scm_instance() |
|
709 | 709 | target_scm_name = target_scm.name |
|
710 | 710 | |
|
711 | 711 | response = self.app.post( |
|
712 | 712 | route_path( |
|
713 | 713 | 'pullrequest_comment_create', |
|
714 | 714 | repo_name=target_scm_name, |
|
715 | 715 | pull_request_id=pull_request.pull_request_id), |
|
716 | 716 | params={ |
|
717 | 717 | 'csrf_token': csrf_token, |
|
718 | 718 | 'text': 'init', |
|
719 | 719 | }, |
|
720 | 720 | extra_environ=xhr_header, |
|
721 | 721 | ) |
|
722 | 722 | assert response.json |
|
723 | 723 | comment_id = response.json.get('comment_id', None) |
|
724 | 724 | assert comment_id |
|
725 | 725 | |
|
726 | 726 | test_text = 'test' |
|
727 | 727 | self.app.post( |
|
728 | 728 | route_path( |
|
729 | 729 | 'pullrequest_comment_edit', |
|
730 | 730 | repo_name=target_scm_name, |
|
731 | 731 | pull_request_id=pull_request.pull_request_id, |
|
732 | 732 | comment_id=comment_id, |
|
733 | 733 | ), |
|
734 | 734 | extra_environ=xhr_header, |
|
735 | 735 | params={ |
|
736 | 736 | 'csrf_token': csrf_token, |
|
737 | 737 | 'text': test_text, |
|
738 | 738 | 'version': '0', |
|
739 | 739 | }, |
|
740 | 740 | |
|
741 | 741 | ) |
|
742 | 742 | test_text_v2 = 'test_v2' |
|
743 | 743 | response = self.app.post( |
|
744 | 744 | route_path( |
|
745 | 745 | 'pullrequest_comment_edit', |
|
746 | 746 | repo_name=target_scm_name, |
|
747 | 747 | pull_request_id=pull_request.pull_request_id, |
|
748 | 748 | comment_id=comment_id, |
|
749 | 749 | ), |
|
750 | 750 | extra_environ=xhr_header, |
|
751 | 751 | params={ |
|
752 | 752 | 'csrf_token': csrf_token, |
|
753 | 753 | 'text': test_text_v2, |
|
754 | 754 | 'version': '0', |
|
755 | 755 | }, |
|
756 | 756 | status=409, |
|
757 | 757 | ) |
|
758 | 758 | assert response.status_int == 409 |
|
759 | 759 | |
|
760 | 760 | text_form_db = ChangesetComment.query().filter( |
|
761 | 761 | ChangesetComment.comment_id == comment_id).first().text |
|
762 | 762 | |
|
763 | 763 | assert test_text == text_form_db |
|
764 | 764 | assert test_text_v2 != text_form_db |
|
765 | 765 | |
|
766 | 766 | def test_comment_and_comment_edit_permissions_forbidden( |
|
767 | 767 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
768 | 768 | csrf_token, xhr_header): |
|
769 | 769 | pull_request = pr_util.create_pull_request( |
|
770 | 770 | author=user_admin.username, enable_notifications=False) |
|
771 | 771 | comment = CommentsModel().create( |
|
772 | 772 | text='test', |
|
773 | 773 | repo=pull_request.target_repo.scm_instance().name, |
|
774 | 774 | user=user_admin, |
|
775 | 775 | pull_request=pull_request, |
|
776 | 776 | ) |
|
777 | 777 | response = self.app.post( |
|
778 | 778 | route_path( |
|
779 | 779 | 'pullrequest_comment_edit', |
|
780 | 780 | repo_name=pull_request.target_repo.scm_instance().name, |
|
781 | 781 | pull_request_id=pull_request.pull_request_id, |
|
782 | 782 | comment_id=comment.comment_id, |
|
783 | 783 | ), |
|
784 | 784 | extra_environ=xhr_header, |
|
785 | 785 | params={ |
|
786 | 786 | 'csrf_token': csrf_token, |
|
787 | 787 | 'text': 'test_text', |
|
788 | 788 | }, |
|
789 | 789 | status=403, |
|
790 | 790 | ) |
|
791 | 791 | assert response.status_int == 403 |
|
792 | 792 | |
|
793 | 793 | def test_create_pull_request(self, backend, csrf_token): |
|
794 | 794 | commits = [ |
|
795 | 795 | {'message': 'ancestor'}, |
|
796 | 796 | {'message': 'change'}, |
|
797 | 797 | {'message': 'change2'}, |
|
798 | 798 | ] |
|
799 | 799 | commit_ids = backend.create_master_repo(commits) |
|
800 | 800 | target = backend.create_repo(heads=['ancestor']) |
|
801 | 801 | source = backend.create_repo(heads=['change2']) |
|
802 | 802 | |
|
803 | 803 | response = self.app.post( |
|
804 | 804 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
805 | 805 | [ |
|
806 | 806 | ('source_repo', source.repo_name), |
|
807 | 807 | ('source_ref', 'branch:default:' + commit_ids['change2']), |
|
808 | 808 | ('target_repo', target.repo_name), |
|
809 | 809 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), |
|
810 | 810 | ('common_ancestor', commit_ids['ancestor']), |
|
811 | 811 | ('pullrequest_title', 'Title'), |
|
812 | 812 | ('pullrequest_desc', 'Description'), |
|
813 | 813 | ('description_renderer', 'markdown'), |
|
814 | 814 | ('__start__', 'review_members:sequence'), |
|
815 | 815 | ('__start__', 'reviewer:mapping'), |
|
816 | 816 | ('user_id', '1'), |
|
817 | 817 | ('__start__', 'reasons:sequence'), |
|
818 | 818 | ('reason', 'Some reason'), |
|
819 | 819 | ('__end__', 'reasons:sequence'), |
|
820 | 820 | ('__start__', 'rules:sequence'), |
|
821 | 821 | ('__end__', 'rules:sequence'), |
|
822 | 822 | ('mandatory', 'False'), |
|
823 | 823 | ('__end__', 'reviewer:mapping'), |
|
824 | 824 | ('__end__', 'review_members:sequence'), |
|
825 | 825 | ('__start__', 'revisions:sequence'), |
|
826 | 826 | ('revisions', commit_ids['change']), |
|
827 | 827 | ('revisions', commit_ids['change2']), |
|
828 | 828 | ('__end__', 'revisions:sequence'), |
|
829 | 829 | ('user', ''), |
|
830 | 830 | ('csrf_token', csrf_token), |
|
831 | 831 | ], |
|
832 | 832 | status=302) |
|
833 | 833 | |
|
834 | 834 | location = response.headers['Location'] |
|
835 | 835 | pull_request_id = location.rsplit('/', 1)[1] |
|
836 | 836 | assert pull_request_id != 'new' |
|
837 | 837 | pull_request = PullRequest.get(int(pull_request_id)) |
|
838 | 838 | |
|
839 | 839 | # check that we have now both revisions |
|
840 | 840 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] |
|
841 | 841 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] |
|
842 | 842 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
843 | 843 | assert pull_request.target_ref == expected_target_ref |
|
844 | 844 | |
|
845 | 845 | def test_reviewer_notifications(self, backend, csrf_token): |
|
846 | 846 | # We have to use the app.post for this test so it will create the |
|
847 | 847 | # notifications properly with the new PR |
|
848 | 848 | commits = [ |
|
849 | 849 | {'message': 'ancestor', |
|
850 | 850 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
851 | 851 | {'message': 'change', |
|
852 | 852 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
853 | 853 | {'message': 'change-child'}, |
|
854 | 854 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
855 | 855 | 'added': [ |
|
856 | 856 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
857 | 857 | {'message': 'ancestor-child-2'}, |
|
858 | 858 | ] |
|
859 | 859 | commit_ids = backend.create_master_repo(commits) |
|
860 | 860 | target = backend.create_repo(heads=['ancestor-child']) |
|
861 | 861 | source = backend.create_repo(heads=['change']) |
|
862 | 862 | |
|
863 | 863 | response = self.app.post( |
|
864 | 864 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
865 | 865 | [ |
|
866 | 866 | ('source_repo', source.repo_name), |
|
867 | 867 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
868 | 868 | ('target_repo', target.repo_name), |
|
869 | 869 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
870 | 870 | ('common_ancestor', commit_ids['ancestor']), |
|
871 | 871 | ('pullrequest_title', 'Title'), |
|
872 | 872 | ('pullrequest_desc', 'Description'), |
|
873 | 873 | ('description_renderer', 'markdown'), |
|
874 | 874 | ('__start__', 'review_members:sequence'), |
|
875 | 875 | ('__start__', 'reviewer:mapping'), |
|
876 | 876 | ('user_id', '2'), |
|
877 | 877 | ('__start__', 'reasons:sequence'), |
|
878 | 878 | ('reason', 'Some reason'), |
|
879 | 879 | ('__end__', 'reasons:sequence'), |
|
880 | 880 | ('__start__', 'rules:sequence'), |
|
881 | 881 | ('__end__', 'rules:sequence'), |
|
882 | 882 | ('mandatory', 'False'), |
|
883 | 883 | ('__end__', 'reviewer:mapping'), |
|
884 | 884 | ('__end__', 'review_members:sequence'), |
|
885 | 885 | ('__start__', 'revisions:sequence'), |
|
886 | 886 | ('revisions', commit_ids['change']), |
|
887 | 887 | ('__end__', 'revisions:sequence'), |
|
888 | 888 | ('user', ''), |
|
889 | 889 | ('csrf_token', csrf_token), |
|
890 | 890 | ], |
|
891 | 891 | status=302) |
|
892 | 892 | |
|
893 | 893 | location = response.headers['Location'] |
|
894 | 894 | |
|
895 | 895 | pull_request_id = location.rsplit('/', 1)[1] |
|
896 | 896 | assert pull_request_id != 'new' |
|
897 | 897 | pull_request = PullRequest.get(int(pull_request_id)) |
|
898 | 898 | |
|
899 | 899 | # Check that a notification was made |
|
900 | 900 | notifications = Notification.query()\ |
|
901 | 901 | .filter(Notification.created_by == pull_request.author.user_id, |
|
902 | 902 | Notification.type_ == Notification.TYPE_PULL_REQUEST, |
|
903 | 903 | Notification.subject.contains( |
|
904 | 904 | "requested a pull request review. !%s" % pull_request_id)) |
|
905 | 905 | assert len(notifications.all()) == 1 |
|
906 | 906 | |
|
907 | 907 | # Change reviewers and check that a notification was made |
|
908 | 908 | PullRequestModel().update_reviewers( |
|
909 | 909 | pull_request.pull_request_id, [(1, [], False, [])], |
|
910 | 910 | pull_request.author) |
|
911 | 911 | assert len(notifications.all()) == 2 |
|
912 | 912 | |
|
913 | 913 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, |
|
914 | 914 | csrf_token): |
|
915 | 915 | commits = [ |
|
916 | 916 | {'message': 'ancestor', |
|
917 | 917 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
918 | 918 | {'message': 'change', |
|
919 | 919 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
920 | 920 | {'message': 'change-child'}, |
|
921 | 921 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
922 | 922 | 'added': [ |
|
923 | 923 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
924 | 924 | {'message': 'ancestor-child-2'}, |
|
925 | 925 | ] |
|
926 | 926 | commit_ids = backend.create_master_repo(commits) |
|
927 | 927 | target = backend.create_repo(heads=['ancestor-child']) |
|
928 | 928 | source = backend.create_repo(heads=['change']) |
|
929 | 929 | |
|
930 | 930 | response = self.app.post( |
|
931 | 931 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
932 | 932 | [ |
|
933 | 933 | ('source_repo', source.repo_name), |
|
934 | 934 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
935 | 935 | ('target_repo', target.repo_name), |
|
936 | 936 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
937 | 937 | ('common_ancestor', commit_ids['ancestor']), |
|
938 | 938 | ('pullrequest_title', 'Title'), |
|
939 | 939 | ('pullrequest_desc', 'Description'), |
|
940 | 940 | ('description_renderer', 'markdown'), |
|
941 | 941 | ('__start__', 'review_members:sequence'), |
|
942 | 942 | ('__start__', 'reviewer:mapping'), |
|
943 | 943 | ('user_id', '1'), |
|
944 | 944 | ('__start__', 'reasons:sequence'), |
|
945 | 945 | ('reason', 'Some reason'), |
|
946 | 946 | ('__end__', 'reasons:sequence'), |
|
947 | 947 | ('__start__', 'rules:sequence'), |
|
948 | 948 | ('__end__', 'rules:sequence'), |
|
949 | 949 | ('mandatory', 'False'), |
|
950 | 950 | ('__end__', 'reviewer:mapping'), |
|
951 | 951 | ('__end__', 'review_members:sequence'), |
|
952 | 952 | ('__start__', 'revisions:sequence'), |
|
953 | 953 | ('revisions', commit_ids['change']), |
|
954 | 954 | ('__end__', 'revisions:sequence'), |
|
955 | 955 | ('user', ''), |
|
956 | 956 | ('csrf_token', csrf_token), |
|
957 | 957 | ], |
|
958 | 958 | status=302) |
|
959 | 959 | |
|
960 | 960 | location = response.headers['Location'] |
|
961 | 961 | |
|
962 | 962 | pull_request_id = location.rsplit('/', 1)[1] |
|
963 | 963 | assert pull_request_id != 'new' |
|
964 | 964 | pull_request = PullRequest.get(int(pull_request_id)) |
|
965 | 965 | |
|
966 | 966 | # target_ref has to point to the ancestor's commit_id in order to |
|
967 | 967 | # show the correct diff |
|
968 | 968 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
969 | 969 | assert pull_request.target_ref == expected_target_ref |
|
970 | 970 | |
|
971 | 971 | # Check generated diff contents |
|
972 | 972 | response = response.follow() |
|
973 | 973 | response.mustcontain(no=['content_of_ancestor']) |
|
974 | 974 | response.mustcontain(no=['content_of_ancestor-child']) |
|
975 | 975 | response.mustcontain('content_of_change') |
|
976 | 976 | |
|
977 | 977 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
978 | 978 | # Clear any previous calls to rcextensions |
|
979 | 979 | rhodecode.EXTENSIONS.calls.clear() |
|
980 | 980 | |
|
981 | 981 | pull_request = pr_util.create_pull_request( |
|
982 | 982 | approved=True, mergeable=True) |
|
983 | 983 | pull_request_id = pull_request.pull_request_id |
|
984 | 984 | repo_name = pull_request.target_repo.scm_instance().name, |
|
985 | 985 | |
|
986 | 986 | url = route_path('pullrequest_merge', |
|
987 | 987 | repo_name=str(repo_name[0]), |
|
988 | 988 | pull_request_id=pull_request_id) |
|
989 | 989 | response = self.app.post(url, params={'csrf_token': csrf_token}).follow() |
|
990 | 990 | |
|
991 | 991 | pull_request = PullRequest.get(pull_request_id) |
|
992 | 992 | |
|
993 | 993 | assert response.status_int == 200 |
|
994 | 994 | assert pull_request.is_closed() |
|
995 | 995 | assert_pull_request_status( |
|
996 | 996 | pull_request, ChangesetStatus.STATUS_APPROVED) |
|
997 | 997 | |
|
998 | 998 | # Check the relevant log entries were added |
|
999 | 999 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3) |
|
1000 | 1000 | actions = [log.action for log in user_logs] |
|
1001 | 1001 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
1002 | 1002 | expected_actions = [ |
|
1003 | 1003 | u'repo.pull_request.close', |
|
1004 | 1004 | u'repo.pull_request.merge', |
|
1005 | 1005 | u'repo.pull_request.comment.create' |
|
1006 | 1006 | ] |
|
1007 | 1007 | assert actions == expected_actions |
|
1008 | 1008 | |
|
1009 | 1009 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4) |
|
1010 | 1010 | actions = [log for log in user_logs] |
|
1011 | 1011 | assert actions[-1].action == 'user.push' |
|
1012 | 1012 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids |
|
1013 | 1013 | |
|
1014 | 1014 | # Check post_push rcextension was really executed |
|
1015 | 1015 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] |
|
1016 | 1016 | assert len(push_calls) == 1 |
|
1017 | 1017 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
1018 | 1018 | assert last_call_kwargs['action'] == 'push' |
|
1019 | 1019 | assert last_call_kwargs['commit_ids'] == pr_commit_ids |
|
1020 | 1020 | |
|
1021 | 1021 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
1022 | 1022 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
1023 | 1023 | pull_request_id = pull_request.pull_request_id |
|
1024 | 1024 | pull_request = PullRequest.get(pull_request_id) |
|
1025 | 1025 | |
|
1026 | 1026 | response = self.app.post( |
|
1027 | 1027 | route_path('pullrequest_merge', |
|
1028 | 1028 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1029 | 1029 | pull_request_id=pull_request.pull_request_id), |
|
1030 | 1030 | params={'csrf_token': csrf_token}).follow() |
|
1031 | 1031 | |
|
1032 | 1032 | assert response.status_int == 200 |
|
1033 | 1033 | response.mustcontain( |
|
1034 | 1034 | 'Merge is not currently possible because of below failed checks.') |
|
1035 | 1035 | response.mustcontain('Server-side pull request merging is disabled.') |
|
1036 | 1036 | |
|
1037 | 1037 | @pytest.mark.skip_backends('svn') |
|
1038 | 1038 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): |
|
1039 | 1039 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
1040 | 1040 | pull_request_id = pull_request.pull_request_id |
|
1041 | 1041 | repo_name = pull_request.target_repo.scm_instance().name |
|
1042 | 1042 | |
|
1043 | 1043 | response = self.app.post( |
|
1044 | 1044 | route_path('pullrequest_merge', |
|
1045 | 1045 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
1046 | 1046 | params={'csrf_token': csrf_token}).follow() |
|
1047 | 1047 | |
|
1048 | 1048 | assert response.status_int == 200 |
|
1049 | 1049 | |
|
1050 | 1050 | response.mustcontain( |
|
1051 | 1051 | 'Merge is not currently possible because of below failed checks.') |
|
1052 | 1052 | response.mustcontain('Pull request reviewer approval is pending.') |
|
1053 | 1053 | |
|
1054 | 1054 | def test_merge_pull_request_renders_failure_reason( |
|
1055 | 1055 | self, user_regular, csrf_token, pr_util): |
|
1056 | 1056 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) |
|
1057 | 1057 | pull_request_id = pull_request.pull_request_id |
|
1058 | 1058 | repo_name = pull_request.target_repo.scm_instance().name |
|
1059 | 1059 | |
|
1060 | 1060 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', |
|
1061 | 1061 | MergeFailureReason.PUSH_FAILED, |
|
1062 | 1062 | metadata={'target': 'shadow repo', |
|
1063 | 1063 | 'merge_commit': 'xxx'}) |
|
1064 | 1064 | model_patcher = mock.patch.multiple( |
|
1065 | 1065 | PullRequestModel, |
|
1066 | 1066 | merge_repo=mock.Mock(return_value=merge_resp), |
|
1067 | 1067 | merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE'))) |
|
1068 | 1068 | |
|
1069 | 1069 | with model_patcher: |
|
1070 | 1070 | response = self.app.post( |
|
1071 | 1071 | route_path('pullrequest_merge', |
|
1072 | 1072 | repo_name=repo_name, |
|
1073 | 1073 | pull_request_id=pull_request_id), |
|
1074 | 1074 | params={'csrf_token': csrf_token}, status=302) |
|
1075 | 1075 | |
|
1076 | 1076 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, |
|
1077 | 1077 | metadata={'target': 'shadow repo', |
|
1078 | 1078 | 'merge_commit': 'xxx'}) |
|
1079 | 1079 | assert_session_flash(response, merge_resp.merge_status_message) |
|
1080 | 1080 | |
|
1081 | 1081 | def test_update_source_revision(self, backend, csrf_token): |
|
1082 | 1082 | commits = [ |
|
1083 | 1083 | {'message': 'ancestor'}, |
|
1084 | 1084 | {'message': 'change'}, |
|
1085 | 1085 | {'message': 'change-2'}, |
|
1086 | 1086 | ] |
|
1087 | 1087 | commit_ids = backend.create_master_repo(commits) |
|
1088 | 1088 | target = backend.create_repo(heads=['ancestor']) |
|
1089 | 1089 | source = backend.create_repo(heads=['change']) |
|
1090 | 1090 | |
|
1091 | 1091 | # create pr from a in source to A in target |
|
1092 | 1092 | pull_request = PullRequest() |
|
1093 | 1093 | |
|
1094 | 1094 | pull_request.source_repo = source |
|
1095 | 1095 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1096 | 1096 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
1097 | 1097 | |
|
1098 | 1098 | pull_request.target_repo = target |
|
1099 | 1099 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1100 | 1100 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1101 | 1101 | |
|
1102 | 1102 | pull_request.revisions = [commit_ids['change']] |
|
1103 | 1103 | pull_request.title = u"Test" |
|
1104 | 1104 | pull_request.description = u"Description" |
|
1105 | 1105 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1106 | 1106 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1107 | 1107 | Session().add(pull_request) |
|
1108 | 1108 | Session().commit() |
|
1109 | 1109 | pull_request_id = pull_request.pull_request_id |
|
1110 | 1110 | |
|
1111 | 1111 | # source has ancestor - change - change-2 |
|
1112 | 1112 | backend.pull_heads(source, heads=['change-2']) |
|
1113 | target_repo_name = target.repo_name | |
|
1113 | 1114 | |
|
1114 | 1115 | # update PR |
|
1115 | 1116 | self.app.post( |
|
1116 | 1117 | route_path('pullrequest_update', |
|
1117 |
repo_name=target |
|
|
1118 | repo_name=target_repo_name, pull_request_id=pull_request_id), | |
|
1118 | 1119 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
1119 | 1120 | |
|
1120 | 1121 | response = self.app.get( |
|
1121 | 1122 | route_path('pullrequest_show', |
|
1122 |
repo_name=target |
|
|
1123 | repo_name=target_repo_name, | |
|
1123 | 1124 | pull_request_id=pull_request.pull_request_id)) |
|
1124 | 1125 | |
|
1125 | 1126 | assert response.status_int == 200 |
|
1126 | 1127 | response.mustcontain('Pull request updated to') |
|
1127 | 1128 | response.mustcontain('with 1 added, 0 removed commits.') |
|
1128 | 1129 | |
|
1129 | 1130 | # check that we have now both revisions |
|
1130 | 1131 | pull_request = PullRequest.get(pull_request_id) |
|
1131 | 1132 | assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']] |
|
1132 | 1133 | |
|
1133 | 1134 | def test_update_target_revision(self, backend, csrf_token): |
|
1134 | 1135 | commits = [ |
|
1135 | 1136 | {'message': 'ancestor'}, |
|
1136 | 1137 | {'message': 'change'}, |
|
1137 | 1138 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
1138 | 1139 | {'message': 'change-rebased'}, |
|
1139 | 1140 | ] |
|
1140 | 1141 | commit_ids = backend.create_master_repo(commits) |
|
1141 | 1142 | target = backend.create_repo(heads=['ancestor']) |
|
1142 | 1143 | source = backend.create_repo(heads=['change']) |
|
1143 | 1144 | |
|
1144 | 1145 | # create pr from a in source to A in target |
|
1145 | 1146 | pull_request = PullRequest() |
|
1146 | 1147 | |
|
1147 | 1148 | pull_request.source_repo = source |
|
1148 | 1149 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1149 | 1150 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
1150 | 1151 | |
|
1151 | 1152 | pull_request.target_repo = target |
|
1152 | 1153 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1153 | 1154 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1154 | 1155 | |
|
1155 | 1156 | pull_request.revisions = [commit_ids['change']] |
|
1156 | 1157 | pull_request.title = u"Test" |
|
1157 | 1158 | pull_request.description = u"Description" |
|
1158 | 1159 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1159 | 1160 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1160 | 1161 | |
|
1161 | 1162 | Session().add(pull_request) |
|
1162 | 1163 | Session().commit() |
|
1163 | 1164 | pull_request_id = pull_request.pull_request_id |
|
1164 | 1165 | |
|
1165 | 1166 | # target has ancestor - ancestor-new |
|
1166 | 1167 | # source has ancestor - ancestor-new - change-rebased |
|
1167 | 1168 | backend.pull_heads(target, heads=['ancestor-new']) |
|
1168 | 1169 | backend.pull_heads(source, heads=['change-rebased']) |
|
1170 | target_repo_name = target.repo_name | |
|
1169 | 1171 | |
|
1170 | 1172 | # update PR |
|
1171 | 1173 | url = route_path('pullrequest_update', |
|
1172 |
repo_name=target |
|
|
1174 | repo_name=target_repo_name, | |
|
1173 | 1175 | pull_request_id=pull_request_id) |
|
1174 | 1176 | self.app.post(url, |
|
1175 | 1177 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
1176 | 1178 | status=200) |
|
1177 | 1179 | |
|
1178 | 1180 | # check that we have now both revisions |
|
1179 | 1181 | pull_request = PullRequest.get(pull_request_id) |
|
1180 | 1182 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
1181 | 1183 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( |
|
1182 | 1184 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new']) |
|
1183 | 1185 | |
|
1184 | 1186 | response = self.app.get( |
|
1185 | 1187 | route_path('pullrequest_show', |
|
1186 |
repo_name=target |
|
|
1188 | repo_name=target_repo_name, | |
|
1187 | 1189 | pull_request_id=pull_request.pull_request_id)) |
|
1188 | 1190 | assert response.status_int == 200 |
|
1189 | 1191 | response.mustcontain('Pull request updated to') |
|
1190 | 1192 | response.mustcontain('with 1 added, 1 removed commits.') |
|
1191 | 1193 | |
|
1192 | 1194 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): |
|
1193 | 1195 | backend = backend_git |
|
1194 | 1196 | commits = [ |
|
1195 | 1197 | {'message': 'master-commit-1'}, |
|
1196 | 1198 | {'message': 'master-commit-2-change-1'}, |
|
1197 | 1199 | {'message': 'master-commit-3-change-2'}, |
|
1198 | 1200 | |
|
1199 | 1201 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, |
|
1200 | 1202 | {'message': 'feat-commit-2'}, |
|
1201 | 1203 | ] |
|
1202 | 1204 | commit_ids = backend.create_master_repo(commits) |
|
1203 | 1205 | target = backend.create_repo(heads=['master-commit-3-change-2']) |
|
1204 | 1206 | source = backend.create_repo(heads=['feat-commit-2']) |
|
1205 | 1207 | |
|
1206 | 1208 | # create pr from a in source to A in target |
|
1207 | 1209 | pull_request = PullRequest() |
|
1208 | 1210 | pull_request.source_repo = source |
|
1209 | 1211 | |
|
1210 | 1212 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1211 | 1213 | branch=backend.default_branch_name, |
|
1212 | 1214 | commit_id=commit_ids['master-commit-3-change-2']) |
|
1213 | 1215 | |
|
1214 | 1216 | pull_request.target_repo = target |
|
1215 | 1217 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1216 | 1218 | branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2']) |
|
1217 | 1219 | |
|
1218 | 1220 | pull_request.revisions = [ |
|
1219 | 1221 | commit_ids['feat-commit-1'], |
|
1220 | 1222 | commit_ids['feat-commit-2'] |
|
1221 | 1223 | ] |
|
1222 | 1224 | pull_request.title = u"Test" |
|
1223 | 1225 | pull_request.description = u"Description" |
|
1224 | 1226 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1225 | 1227 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1226 | 1228 | Session().add(pull_request) |
|
1227 | 1229 | Session().commit() |
|
1228 | 1230 | pull_request_id = pull_request.pull_request_id |
|
1229 | 1231 | |
|
1230 | 1232 | # PR is created, now we simulate a force-push into target, |
|
1231 | 1233 | # that drops a 2 last commits |
|
1232 | 1234 | vcsrepo = target.scm_instance() |
|
1233 | 1235 | vcsrepo.config.clear_section('hooks') |
|
1234 | 1236 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) |
|
1237 | target_repo_name = target.repo_name | |
|
1235 | 1238 | |
|
1236 | 1239 | # update PR |
|
1237 | 1240 | url = route_path('pullrequest_update', |
|
1238 |
repo_name=target |
|
|
1241 | repo_name=target_repo_name, | |
|
1239 | 1242 | pull_request_id=pull_request_id) |
|
1240 | 1243 | self.app.post(url, |
|
1241 | 1244 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
1242 | 1245 | status=200) |
|
1243 | 1246 | |
|
1244 |
response = self.app.get(route_path('pullrequest_new', repo_name=target |
|
|
1247 | response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name)) | |
|
1245 | 1248 | assert response.status_int == 200 |
|
1246 | 1249 | response.mustcontain('Pull request updated to') |
|
1247 | 1250 | response.mustcontain('with 0 added, 0 removed commits.') |
|
1248 | 1251 | |
|
1249 | 1252 | def test_update_of_ancestor_reference(self, backend, csrf_token): |
|
1250 | 1253 | commits = [ |
|
1251 | 1254 | {'message': 'ancestor'}, |
|
1252 | 1255 | {'message': 'change'}, |
|
1253 | 1256 | {'message': 'change-2'}, |
|
1254 | 1257 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
1255 | 1258 | {'message': 'change-rebased'}, |
|
1256 | 1259 | ] |
|
1257 | 1260 | commit_ids = backend.create_master_repo(commits) |
|
1258 | 1261 | target = backend.create_repo(heads=['ancestor']) |
|
1259 | 1262 | source = backend.create_repo(heads=['change']) |
|
1260 | 1263 | |
|
1261 | 1264 | # create pr from a in source to A in target |
|
1262 | 1265 | pull_request = PullRequest() |
|
1263 | 1266 | pull_request.source_repo = source |
|
1264 | 1267 | |
|
1265 | 1268 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1266 | 1269 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
1267 | 1270 | pull_request.target_repo = target |
|
1268 | 1271 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1269 | 1272 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1270 | 1273 | pull_request.revisions = [commit_ids['change']] |
|
1271 | 1274 | pull_request.title = u"Test" |
|
1272 | 1275 | pull_request.description = u"Description" |
|
1273 | 1276 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1274 | 1277 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1275 | 1278 | Session().add(pull_request) |
|
1276 | 1279 | Session().commit() |
|
1277 | 1280 | pull_request_id = pull_request.pull_request_id |
|
1278 | 1281 | |
|
1279 | 1282 | # target has ancestor - ancestor-new |
|
1280 | 1283 | # source has ancestor - ancestor-new - change-rebased |
|
1281 | 1284 | backend.pull_heads(target, heads=['ancestor-new']) |
|
1282 | 1285 | backend.pull_heads(source, heads=['change-rebased']) |
|
1286 | target_repo_name = target.repo_name | |
|
1283 | 1287 | |
|
1284 | 1288 | # update PR |
|
1285 | 1289 | self.app.post( |
|
1286 | 1290 | route_path('pullrequest_update', |
|
1287 |
repo_name=target |
|
|
1291 | repo_name=target_repo_name, pull_request_id=pull_request_id), | |
|
1288 | 1292 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
1289 | 1293 | status=200) |
|
1290 | 1294 | |
|
1291 | 1295 | # Expect the target reference to be updated correctly |
|
1292 | 1296 | pull_request = PullRequest.get(pull_request_id) |
|
1293 | 1297 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
1294 | 1298 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1295 | 1299 | branch=backend.default_branch_name, |
|
1296 | 1300 | commit_id=commit_ids['ancestor-new']) |
|
1297 | 1301 | assert pull_request.target_ref == expected_target_ref |
|
1298 | 1302 | |
|
1299 | 1303 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
1300 | 1304 | branch_name = 'development' |
|
1301 | 1305 | commits = [ |
|
1302 | 1306 | {'message': 'initial-commit'}, |
|
1303 | 1307 | {'message': 'old-feature'}, |
|
1304 | 1308 | {'message': 'new-feature', 'branch': branch_name}, |
|
1305 | 1309 | ] |
|
1306 | 1310 | repo = backend_git.create_repo(commits) |
|
1307 | 1311 | repo_name = repo.repo_name |
|
1308 | 1312 | commit_ids = backend_git.commit_ids |
|
1309 | 1313 | |
|
1310 | 1314 | pull_request = PullRequest() |
|
1311 | 1315 | pull_request.source_repo = repo |
|
1312 | 1316 | pull_request.target_repo = repo |
|
1313 | 1317 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1314 | 1318 | branch=branch_name, commit_id=commit_ids['new-feature']) |
|
1315 | 1319 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1316 | 1320 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) |
|
1317 | 1321 | pull_request.revisions = [commit_ids['new-feature']] |
|
1318 | 1322 | pull_request.title = u"Test" |
|
1319 | 1323 | pull_request.description = u"Description" |
|
1320 | 1324 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1321 | 1325 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1322 | 1326 | Session().add(pull_request) |
|
1323 | 1327 | Session().commit() |
|
1324 | 1328 | |
|
1325 | 1329 | pull_request_id = pull_request.pull_request_id |
|
1326 | 1330 | |
|
1327 | 1331 | vcs = repo.scm_instance() |
|
1328 | 1332 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) |
|
1329 | 1333 | # NOTE(marcink): run GC to ensure the commits are gone |
|
1330 | 1334 | vcs.run_gc() |
|
1331 | 1335 | |
|
1332 | 1336 | response = self.app.get(route_path( |
|
1333 | 1337 | 'pullrequest_show', |
|
1334 | 1338 | repo_name=repo_name, |
|
1335 | 1339 | pull_request_id=pull_request_id)) |
|
1336 | 1340 | |
|
1337 | 1341 | assert response.status_int == 200 |
|
1338 | 1342 | |
|
1339 | 1343 | response.assert_response().element_contains( |
|
1340 | 1344 | '#changeset_compare_view_content .alert strong', |
|
1341 | 1345 | 'Missing commits') |
|
1342 | 1346 | response.assert_response().element_contains( |
|
1343 | 1347 | '#changeset_compare_view_content .alert', |
|
1344 | 1348 | 'This pull request cannot be displayed, because one or more' |
|
1345 | 1349 | ' commits no longer exist in the source repository.') |
|
1346 | 1350 | |
|
1347 | 1351 | def test_strip_commits_from_pull_request( |
|
1348 | 1352 | self, backend, pr_util, csrf_token): |
|
1349 | 1353 | commits = [ |
|
1350 | 1354 | {'message': 'initial-commit'}, |
|
1351 | 1355 | {'message': 'old-feature'}, |
|
1352 | 1356 | {'message': 'new-feature', 'parents': ['initial-commit']}, |
|
1353 | 1357 | ] |
|
1354 | 1358 | pull_request = pr_util.create_pull_request( |
|
1355 | 1359 | commits, target_head='initial-commit', source_head='new-feature', |
|
1356 | 1360 | revisions=['new-feature']) |
|
1357 | 1361 | |
|
1358 | 1362 | vcs = pr_util.source_repository.scm_instance() |
|
1359 | 1363 | if backend.alias == 'git': |
|
1360 | 1364 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
1361 | 1365 | else: |
|
1362 | 1366 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
1363 | 1367 | |
|
1364 | 1368 | response = self.app.get(route_path( |
|
1365 | 1369 | 'pullrequest_show', |
|
1366 | 1370 | repo_name=pr_util.target_repository.repo_name, |
|
1367 | 1371 | pull_request_id=pull_request.pull_request_id)) |
|
1368 | 1372 | |
|
1369 | 1373 | assert response.status_int == 200 |
|
1370 | 1374 | |
|
1371 | 1375 | response.assert_response().element_contains( |
|
1372 | 1376 | '#changeset_compare_view_content .alert strong', |
|
1373 | 1377 | 'Missing commits') |
|
1374 | 1378 | response.assert_response().element_contains( |
|
1375 | 1379 | '#changeset_compare_view_content .alert', |
|
1376 | 1380 | 'This pull request cannot be displayed, because one or more' |
|
1377 | 1381 | ' commits no longer exist in the source repository.') |
|
1378 | 1382 | response.assert_response().element_contains( |
|
1379 | 1383 | '#update_commits', |
|
1380 | 1384 | 'Update commits') |
|
1381 | 1385 | |
|
1382 | 1386 | def test_strip_commits_and_update( |
|
1383 | 1387 | self, backend, pr_util, csrf_token): |
|
1384 | 1388 | commits = [ |
|
1385 | 1389 | {'message': 'initial-commit'}, |
|
1386 | 1390 | {'message': 'old-feature'}, |
|
1387 | 1391 | {'message': 'new-feature', 'parents': ['old-feature']}, |
|
1388 | 1392 | ] |
|
1389 | 1393 | pull_request = pr_util.create_pull_request( |
|
1390 | 1394 | commits, target_head='old-feature', source_head='new-feature', |
|
1391 | 1395 | revisions=['new-feature'], mergeable=True) |
|
1396 | pr_id = pull_request.pull_request_id | |
|
1397 | target_repo_name = pull_request.target_repo.repo_name | |
|
1392 | 1398 | |
|
1393 | 1399 | vcs = pr_util.source_repository.scm_instance() |
|
1394 | 1400 | if backend.alias == 'git': |
|
1395 | 1401 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
1396 | 1402 | else: |
|
1397 | 1403 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
1398 | 1404 | |
|
1399 | 1405 | url = route_path('pullrequest_update', |
|
1400 |
repo_name= |
|
|
1401 |
pull_request_id=p |
|
|
1406 | repo_name=target_repo_name, | |
|
1407 | pull_request_id=pr_id) | |
|
1402 | 1408 | response = self.app.post(url, |
|
1403 | 1409 | params={'update_commits': 'true', |
|
1404 | 1410 | 'csrf_token': csrf_token}) |
|
1405 | 1411 | |
|
1406 | 1412 | assert response.status_int == 200 |
|
1407 | 1413 | assert response.body == '{"response": true, "redirect_url": null}' |
|
1408 | 1414 | |
|
1409 | 1415 | # Make sure that after update, it won't raise 500 errors |
|
1410 | 1416 | response = self.app.get(route_path( |
|
1411 | 1417 | 'pullrequest_show', |
|
1412 |
repo_name= |
|
|
1413 |
pull_request_id=p |
|
|
1418 | repo_name=target_repo_name, | |
|
1419 | pull_request_id=pr_id)) | |
|
1414 | 1420 | |
|
1415 | 1421 | assert response.status_int == 200 |
|
1416 | 1422 | response.assert_response().element_contains( |
|
1417 | 1423 | '#changeset_compare_view_content .alert strong', |
|
1418 | 1424 | 'Missing commits') |
|
1419 | 1425 | |
|
1420 | 1426 | def test_branch_is_a_link(self, pr_util): |
|
1421 | 1427 | pull_request = pr_util.create_pull_request() |
|
1422 | 1428 | pull_request.source_ref = 'branch:origin:1234567890abcdef' |
|
1423 | 1429 | pull_request.target_ref = 'branch:target:abcdef1234567890' |
|
1424 | 1430 | Session().add(pull_request) |
|
1425 | 1431 | Session().commit() |
|
1426 | 1432 | |
|
1427 | 1433 | response = self.app.get(route_path( |
|
1428 | 1434 | 'pullrequest_show', |
|
1429 | 1435 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1430 | 1436 | pull_request_id=pull_request.pull_request_id)) |
|
1431 | 1437 | assert response.status_int == 200 |
|
1432 | 1438 | |
|
1433 | 1439 | source = response.assert_response().get_element('.pr-source-info') |
|
1434 | 1440 | source_parent = source.getparent() |
|
1435 | 1441 | assert len(source_parent) == 1 |
|
1436 | 1442 | |
|
1437 | 1443 | target = response.assert_response().get_element('.pr-target-info') |
|
1438 | 1444 | target_parent = target.getparent() |
|
1439 | 1445 | assert len(target_parent) == 1 |
|
1440 | 1446 | |
|
1441 | 1447 | expected_origin_link = route_path( |
|
1442 | 1448 | 'repo_commits', |
|
1443 | 1449 | repo_name=pull_request.source_repo.scm_instance().name, |
|
1444 | 1450 | params=dict(branch='origin')) |
|
1445 | 1451 | expected_target_link = route_path( |
|
1446 | 1452 | 'repo_commits', |
|
1447 | 1453 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1448 | 1454 | params=dict(branch='target')) |
|
1449 | 1455 | assert source_parent.attrib['href'] == expected_origin_link |
|
1450 | 1456 | assert target_parent.attrib['href'] == expected_target_link |
|
1451 | 1457 | |
|
1452 | 1458 | def test_bookmark_is_not_a_link(self, pr_util): |
|
1453 | 1459 | pull_request = pr_util.create_pull_request() |
|
1454 | 1460 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' |
|
1455 | 1461 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' |
|
1456 | 1462 | Session().add(pull_request) |
|
1457 | 1463 | Session().commit() |
|
1458 | 1464 | |
|
1459 | 1465 | response = self.app.get(route_path( |
|
1460 | 1466 | 'pullrequest_show', |
|
1461 | 1467 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1462 | 1468 | pull_request_id=pull_request.pull_request_id)) |
|
1463 | 1469 | assert response.status_int == 200 |
|
1464 | 1470 | |
|
1465 | 1471 | source = response.assert_response().get_element('.pr-source-info') |
|
1466 | 1472 | assert source.text.strip() == 'bookmark:origin' |
|
1467 | 1473 | assert source.getparent().attrib.get('href') is None |
|
1468 | 1474 | |
|
1469 | 1475 | target = response.assert_response().get_element('.pr-target-info') |
|
1470 | 1476 | assert target.text.strip() == 'bookmark:target' |
|
1471 | 1477 | assert target.getparent().attrib.get('href') is None |
|
1472 | 1478 | |
|
1473 | 1479 | def test_tag_is_not_a_link(self, pr_util): |
|
1474 | 1480 | pull_request = pr_util.create_pull_request() |
|
1475 | 1481 | pull_request.source_ref = 'tag:origin:1234567890abcdef' |
|
1476 | 1482 | pull_request.target_ref = 'tag:target:abcdef1234567890' |
|
1477 | 1483 | Session().add(pull_request) |
|
1478 | 1484 | Session().commit() |
|
1479 | 1485 | |
|
1480 | 1486 | response = self.app.get(route_path( |
|
1481 | 1487 | 'pullrequest_show', |
|
1482 | 1488 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1483 | 1489 | pull_request_id=pull_request.pull_request_id)) |
|
1484 | 1490 | assert response.status_int == 200 |
|
1485 | 1491 | |
|
1486 | 1492 | source = response.assert_response().get_element('.pr-source-info') |
|
1487 | 1493 | assert source.text.strip() == 'tag:origin' |
|
1488 | 1494 | assert source.getparent().attrib.get('href') is None |
|
1489 | 1495 | |
|
1490 | 1496 | target = response.assert_response().get_element('.pr-target-info') |
|
1491 | 1497 | assert target.text.strip() == 'tag:target' |
|
1492 | 1498 | assert target.getparent().attrib.get('href') is None |
|
1493 | 1499 | |
|
1494 | 1500 | @pytest.mark.parametrize('mergeable', [True, False]) |
|
1495 | 1501 | def test_shadow_repository_link( |
|
1496 | 1502 | self, mergeable, pr_util, http_host_only_stub): |
|
1497 | 1503 | """ |
|
1498 | 1504 | Check that the pull request summary page displays a link to the shadow |
|
1499 | 1505 | repository if the pull request is mergeable. If it is not mergeable |
|
1500 | 1506 | the link should not be displayed. |
|
1501 | 1507 | """ |
|
1502 | 1508 | pull_request = pr_util.create_pull_request( |
|
1503 | 1509 | mergeable=mergeable, enable_notifications=False) |
|
1504 | 1510 | target_repo = pull_request.target_repo.scm_instance() |
|
1505 | 1511 | pr_id = pull_request.pull_request_id |
|
1506 | 1512 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( |
|
1507 | 1513 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) |
|
1508 | 1514 | |
|
1509 | 1515 | response = self.app.get(route_path( |
|
1510 | 1516 | 'pullrequest_show', |
|
1511 | 1517 | repo_name=target_repo.name, |
|
1512 | 1518 | pull_request_id=pr_id)) |
|
1513 | 1519 | |
|
1514 | 1520 | if mergeable: |
|
1515 | 1521 | response.assert_response().element_value_contains( |
|
1516 | 1522 | 'input.pr-mergeinfo', shadow_url) |
|
1517 | 1523 | response.assert_response().element_value_contains( |
|
1518 | 1524 | 'input.pr-mergeinfo ', 'pr-merge') |
|
1519 | 1525 | else: |
|
1520 | 1526 | response.assert_response().no_element_exists('.pr-mergeinfo') |
|
1521 | 1527 | |
|
1522 | 1528 | |
|
1523 | 1529 | @pytest.mark.usefixtures('app') |
|
1524 | 1530 | @pytest.mark.backends("git", "hg") |
|
1525 | 1531 | class TestPullrequestsControllerDelete(object): |
|
1526 | 1532 | def test_pull_request_delete_button_permissions_admin( |
|
1527 | 1533 | self, autologin_user, user_admin, pr_util): |
|
1528 | 1534 | pull_request = pr_util.create_pull_request( |
|
1529 | 1535 | author=user_admin.username, enable_notifications=False) |
|
1530 | 1536 | |
|
1531 | 1537 | response = self.app.get(route_path( |
|
1532 | 1538 | 'pullrequest_show', |
|
1533 | 1539 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1534 | 1540 | pull_request_id=pull_request.pull_request_id)) |
|
1535 | 1541 | |
|
1536 | 1542 | response.mustcontain('id="delete_pullrequest"') |
|
1537 | 1543 | response.mustcontain('Confirm to delete this pull request') |
|
1538 | 1544 | |
|
1539 | 1545 | def test_pull_request_delete_button_permissions_owner( |
|
1540 | 1546 | self, autologin_regular_user, user_regular, pr_util): |
|
1541 | 1547 | pull_request = pr_util.create_pull_request( |
|
1542 | 1548 | author=user_regular.username, enable_notifications=False) |
|
1543 | 1549 | |
|
1544 | 1550 | response = self.app.get(route_path( |
|
1545 | 1551 | 'pullrequest_show', |
|
1546 | 1552 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1547 | 1553 | pull_request_id=pull_request.pull_request_id)) |
|
1548 | 1554 | |
|
1549 | 1555 | response.mustcontain('id="delete_pullrequest"') |
|
1550 | 1556 | response.mustcontain('Confirm to delete this pull request') |
|
1551 | 1557 | |
|
1552 | 1558 | def test_pull_request_delete_button_permissions_forbidden( |
|
1553 | 1559 | self, autologin_regular_user, user_regular, user_admin, pr_util): |
|
1554 | 1560 | pull_request = pr_util.create_pull_request( |
|
1555 | 1561 | author=user_admin.username, enable_notifications=False) |
|
1556 | 1562 | |
|
1557 | 1563 | response = self.app.get(route_path( |
|
1558 | 1564 | 'pullrequest_show', |
|
1559 | 1565 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1560 | 1566 | pull_request_id=pull_request.pull_request_id)) |
|
1561 | 1567 | response.mustcontain(no=['id="delete_pullrequest"']) |
|
1562 | 1568 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1563 | 1569 | |
|
1564 | 1570 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( |
|
1565 | 1571 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
1566 | 1572 | user_util): |
|
1567 | 1573 | |
|
1568 | 1574 | pull_request = pr_util.create_pull_request( |
|
1569 | 1575 | author=user_admin.username, enable_notifications=False) |
|
1570 | 1576 | |
|
1571 | 1577 | user_util.grant_user_permission_to_repo( |
|
1572 | 1578 | pull_request.target_repo, user_regular, |
|
1573 | 1579 | 'repository.write') |
|
1574 | 1580 | |
|
1575 | 1581 | response = self.app.get(route_path( |
|
1576 | 1582 | 'pullrequest_show', |
|
1577 | 1583 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1578 | 1584 | pull_request_id=pull_request.pull_request_id)) |
|
1579 | 1585 | |
|
1580 | 1586 | response.mustcontain('id="open_edit_pullrequest"') |
|
1581 | 1587 | response.mustcontain('id="delete_pullrequest"') |
|
1582 | 1588 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1583 | 1589 | |
|
1584 | 1590 | def test_delete_comment_returns_404_if_comment_does_not_exist( |
|
1585 | 1591 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1586 | 1592 | |
|
1587 | 1593 | pull_request = pr_util.create_pull_request( |
|
1588 | 1594 | author=user_admin.username, enable_notifications=False) |
|
1589 | 1595 | |
|
1590 | 1596 | self.app.post( |
|
1591 | 1597 | route_path( |
|
1592 | 1598 | 'pullrequest_comment_delete', |
|
1593 | 1599 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1594 | 1600 | pull_request_id=pull_request.pull_request_id, |
|
1595 | 1601 | comment_id=1024404), |
|
1596 | 1602 | extra_environ=xhr_header, |
|
1597 | 1603 | params={'csrf_token': csrf_token}, |
|
1598 | 1604 | status=404 |
|
1599 | 1605 | ) |
|
1600 | 1606 | |
|
1601 | 1607 | def test_delete_comment( |
|
1602 | 1608 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1603 | 1609 | |
|
1604 | 1610 | pull_request = pr_util.create_pull_request( |
|
1605 | 1611 | author=user_admin.username, enable_notifications=False) |
|
1606 | 1612 | comment = pr_util.create_comment() |
|
1607 | 1613 | comment_id = comment.comment_id |
|
1608 | 1614 | |
|
1609 | 1615 | response = self.app.post( |
|
1610 | 1616 | route_path( |
|
1611 | 1617 | 'pullrequest_comment_delete', |
|
1612 | 1618 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1613 | 1619 | pull_request_id=pull_request.pull_request_id, |
|
1614 | 1620 | comment_id=comment_id), |
|
1615 | 1621 | extra_environ=xhr_header, |
|
1616 | 1622 | params={'csrf_token': csrf_token}, |
|
1617 | 1623 | status=200 |
|
1618 | 1624 | ) |
|
1619 | 1625 | assert response.body == 'true' |
|
1620 | 1626 | |
|
1621 | 1627 | @pytest.mark.parametrize('url_type', [ |
|
1622 | 1628 | 'pullrequest_new', |
|
1623 | 1629 | 'pullrequest_create', |
|
1624 | 1630 | 'pullrequest_update', |
|
1625 | 1631 | 'pullrequest_merge', |
|
1626 | 1632 | ]) |
|
1627 | 1633 | def test_pull_request_is_forbidden_on_archived_repo( |
|
1628 | 1634 | self, autologin_user, backend, xhr_header, user_util, url_type): |
|
1629 | 1635 | |
|
1630 | 1636 | # create a temporary repo |
|
1631 | 1637 | source = user_util.create_repo(repo_type=backend.alias) |
|
1632 | 1638 | repo_name = source.repo_name |
|
1633 | 1639 | repo = Repository.get_by_repo_name(repo_name) |
|
1634 | 1640 | repo.archived = True |
|
1635 | 1641 | Session().commit() |
|
1636 | 1642 | |
|
1637 | 1643 | response = self.app.get( |
|
1638 | 1644 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) |
|
1639 | 1645 | |
|
1640 | 1646 | msg = 'Action not supported for archived repository.' |
|
1641 | 1647 | assert_session_flash(response, msg) |
|
1642 | 1648 | |
|
1643 | 1649 | |
|
1644 | 1650 | def assert_pull_request_status(pull_request, expected_status): |
|
1645 | 1651 | status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request) |
|
1646 | 1652 | assert status == expected_status |
|
1647 | 1653 | |
|
1648 | 1654 | |
|
1649 | 1655 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) |
|
1650 | 1656 | @pytest.mark.usefixtures("autologin_user") |
|
1651 | 1657 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): |
|
1652 | 1658 | app.get(route_path(route, repo_name=backend_svn.repo_name), status=404) |
@@ -1,723 +1,781 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | ||
|
22 | 21 | import logging |
|
22 | import collections | |
|
23 | 23 | |
|
24 | 24 | from pyramid.httpexceptions import ( |
|
25 | 25 | HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict) |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from pyramid.renderers import render |
|
28 | 28 | from pyramid.response import Response |
|
29 | 29 | |
|
30 | 30 | from rhodecode.apps._base import RepoAppView |
|
31 | 31 | from rhodecode.apps.file_store import utils as store_utils |
|
32 | 32 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import diffs, codeblocks |
|
35 | 35 | from rhodecode.lib.auth import ( |
|
36 | 36 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
37 | ||
|
37 | from rhodecode.lib.ext_json import json | |
|
38 | 38 | from rhodecode.lib.compat import OrderedDict |
|
39 | 39 | from rhodecode.lib.diffs import ( |
|
40 | 40 | cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, |
|
41 | 41 | get_diff_whitespace_flag) |
|
42 | 42 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch |
|
43 | 43 | import rhodecode.lib.helpers as h |
|
44 | from rhodecode.lib.utils2 import safe_unicode, str2bool | |
|
44 | from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict | |
|
45 | 45 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
46 | 46 | from rhodecode.lib.vcs.exceptions import ( |
|
47 | 47 | RepositoryError, CommitDoesNotExistError) |
|
48 | 48 | from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \ |
|
49 | 49 | ChangesetCommentHistory |
|
50 | 50 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
51 | 51 | from rhodecode.model.comment import CommentsModel |
|
52 | 52 | from rhodecode.model.meta import Session |
|
53 | 53 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | def _update_with_GET(params, request): |
|
59 | 59 | for k in ['diff1', 'diff2', 'diff']: |
|
60 | 60 | params[k] += request.GET.getall(k) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class RepoCommitsView(RepoAppView): |
|
64 | 64 | def load_default_context(self): |
|
65 | 65 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
66 | 66 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
67 | 67 | |
|
68 | 68 | return c |
|
69 | 69 | |
|
70 | 70 | def _is_diff_cache_enabled(self, target_repo): |
|
71 | 71 | caching_enabled = self._get_general_setting( |
|
72 | 72 | target_repo, 'rhodecode_diff_cache') |
|
73 | 73 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
74 | 74 | return caching_enabled |
|
75 | 75 | |
|
76 | 76 | def _commit(self, commit_id_range, method): |
|
77 | 77 | _ = self.request.translate |
|
78 | 78 | c = self.load_default_context() |
|
79 | 79 | c.fulldiff = self.request.GET.get('fulldiff') |
|
80 | 80 | |
|
81 | 81 | # fetch global flags of ignore ws or context lines |
|
82 | 82 | diff_context = get_diff_context(self.request) |
|
83 | 83 | hide_whitespace_changes = get_diff_whitespace_flag(self.request) |
|
84 | 84 | |
|
85 | 85 | # diff_limit will cut off the whole diff if the limit is applied |
|
86 | 86 | # otherwise it will just hide the big files from the front-end |
|
87 | 87 | diff_limit = c.visual.cut_off_limit_diff |
|
88 | 88 | file_limit = c.visual.cut_off_limit_file |
|
89 | 89 | |
|
90 | 90 | # get ranges of commit ids if preset |
|
91 | 91 | commit_range = commit_id_range.split('...')[:2] |
|
92 | 92 | |
|
93 | 93 | try: |
|
94 | 94 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
95 | 95 | 'message', 'parents'] |
|
96 | 96 | if self.rhodecode_vcs_repo.alias == 'hg': |
|
97 | 97 | pre_load += ['hidden', 'obsolete', 'phase'] |
|
98 | 98 | |
|
99 | 99 | if len(commit_range) == 2: |
|
100 | 100 | commits = self.rhodecode_vcs_repo.get_commits( |
|
101 | 101 | start_id=commit_range[0], end_id=commit_range[1], |
|
102 | 102 | pre_load=pre_load, translate_tags=False) |
|
103 | 103 | commits = list(commits) |
|
104 | 104 | else: |
|
105 | 105 | commits = [self.rhodecode_vcs_repo.get_commit( |
|
106 | 106 | commit_id=commit_id_range, pre_load=pre_load)] |
|
107 | 107 | |
|
108 | 108 | c.commit_ranges = commits |
|
109 | 109 | if not c.commit_ranges: |
|
110 | 110 | raise RepositoryError('The commit range returned an empty result') |
|
111 | 111 | except CommitDoesNotExistError as e: |
|
112 | 112 | msg = _('No such commit exists. Org exception: `{}`').format(e) |
|
113 | 113 | h.flash(msg, category='error') |
|
114 | 114 | raise HTTPNotFound() |
|
115 | 115 | except Exception: |
|
116 | 116 | log.exception("General failure") |
|
117 | 117 | raise HTTPNotFound() |
|
118 | single_commit = len(c.commit_ranges) == 1 | |
|
118 | 119 | |
|
119 | 120 | c.changes = OrderedDict() |
|
120 | 121 | c.lines_added = 0 |
|
121 | 122 | c.lines_deleted = 0 |
|
122 | 123 | |
|
123 | 124 | # auto collapse if we have more than limit |
|
124 | 125 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
125 | 126 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
126 | 127 | |
|
127 | 128 | c.commit_statuses = ChangesetStatus.STATUSES |
|
128 | 129 | c.inline_comments = [] |
|
129 | 130 | c.files = [] |
|
130 | 131 | |
|
131 | c.statuses = [] | |
|
132 | 132 | c.comments = [] |
|
133 | 133 | c.unresolved_comments = [] |
|
134 | 134 | c.resolved_comments = [] |
|
135 | if len(c.commit_ranges) == 1: | |
|
135 | ||
|
136 | # Single commit | |
|
137 | if single_commit: | |
|
136 | 138 | commit = c.commit_ranges[0] |
|
137 | 139 | c.comments = CommentsModel().get_comments( |
|
138 | 140 | self.db_repo.repo_id, |
|
139 | 141 | revision=commit.raw_id) |
|
140 | c.statuses.append(ChangesetStatusModel().get_status( | |
|
141 | self.db_repo.repo_id, commit.raw_id)) | |
|
142 | ||
|
142 | 143 | # comments from PR |
|
143 | 144 | statuses = ChangesetStatusModel().get_statuses( |
|
144 | 145 | self.db_repo.repo_id, commit.raw_id, |
|
145 | 146 | with_revisions=True) |
|
146 | prs = set(st.pull_request for st in statuses | |
|
147 | if st.pull_request is not None) | |
|
147 | ||
|
148 | prs = set() | |
|
149 | reviewers = list() | |
|
150 | reviewers_duplicates = set() # to not have duplicates from multiple votes | |
|
151 | for c_status in statuses: | |
|
152 | ||
|
153 | # extract associated pull-requests from votes | |
|
154 | if c_status.pull_request: | |
|
155 | prs.add(c_status.pull_request) | |
|
156 | ||
|
157 | # extract reviewers | |
|
158 | _user_id = c_status.author.user_id | |
|
159 | if _user_id not in reviewers_duplicates: | |
|
160 | reviewers.append( | |
|
161 | StrictAttributeDict({ | |
|
162 | 'user': c_status.author, | |
|
163 | ||
|
164 | # fake attributed for commit, page that we don't have | |
|
165 | # but we share the display with PR page | |
|
166 | 'mandatory': False, | |
|
167 | 'reasons': [], | |
|
168 | 'rule_user_group_data': lambda: None | |
|
169 | }) | |
|
170 | ) | |
|
171 | reviewers_duplicates.add(_user_id) | |
|
172 | ||
|
173 | c.allowed_reviewers = reviewers | |
|
148 | 174 | # from associated statuses, check the pull requests, and |
|
149 | 175 | # show comments from them |
|
150 | 176 | for pr in prs: |
|
151 | 177 | c.comments.extend(pr.comments) |
|
152 | 178 | |
|
153 | 179 | c.unresolved_comments = CommentsModel()\ |
|
154 | 180 | .get_commit_unresolved_todos(commit.raw_id) |
|
155 | 181 | c.resolved_comments = CommentsModel()\ |
|
156 | 182 | .get_commit_resolved_todos(commit.raw_id) |
|
157 | 183 | |
|
184 | c.inline_comments_flat = CommentsModel()\ | |
|
185 | .get_commit_inline_comments(commit.raw_id) | |
|
186 | ||
|
187 | review_statuses = ChangesetStatusModel().aggregate_votes_by_user( | |
|
188 | statuses, reviewers) | |
|
189 | ||
|
190 | c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED | |
|
191 | ||
|
192 | c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []}) | |
|
193 | ||
|
194 | for review_obj, member, reasons, mandatory, status in review_statuses: | |
|
195 | member_reviewer = h.reviewer_as_json( | |
|
196 | member, reasons=reasons, mandatory=mandatory, | |
|
197 | user_group=None | |
|
198 | ) | |
|
199 | ||
|
200 | current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED | |
|
201 | member_reviewer['review_status'] = current_review_status | |
|
202 | member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status) | |
|
203 | member_reviewer['allowed_to_update'] = False | |
|
204 | c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer) | |
|
205 | ||
|
206 | c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json) | |
|
207 | ||
|
208 | # NOTE(marcink): this uses the same voting logic as in pull-requests | |
|
209 | c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses) | |
|
210 | c.commit_broadcast_channel = u'/repo${}$/commit/{}'.format( | |
|
211 | c.repo_name, | |
|
212 | commit.raw_id | |
|
213 | ) | |
|
214 | ||
|
158 | 215 | diff = None |
|
159 | 216 | # Iterate over ranges (default commit view is always one commit) |
|
160 | 217 | for commit in c.commit_ranges: |
|
161 | 218 | c.changes[commit.raw_id] = [] |
|
162 | 219 | |
|
163 | 220 | commit2 = commit |
|
164 | 221 | commit1 = commit.first_parent |
|
165 | 222 | |
|
166 | 223 | if method == 'show': |
|
167 | 224 | inline_comments = CommentsModel().get_inline_comments( |
|
168 | 225 | self.db_repo.repo_id, revision=commit.raw_id) |
|
169 |
c.inline_cnt = CommentsModel().get_inline_comments_ |
|
|
170 | inline_comments) | |
|
226 | c.inline_cnt = len(CommentsModel().get_inline_comments_as_list( | |
|
227 | inline_comments)) | |
|
171 | 228 | c.inline_comments = inline_comments |
|
172 | 229 | |
|
173 | 230 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( |
|
174 | 231 | self.db_repo) |
|
175 | 232 | cache_file_path = diff_cache_exist( |
|
176 | 233 | cache_path, 'diff', commit.raw_id, |
|
177 | 234 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
178 | 235 | |
|
179 | 236 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) |
|
180 | 237 | force_recache = str2bool(self.request.GET.get('force_recache')) |
|
181 | 238 | |
|
182 | 239 | cached_diff = None |
|
183 | 240 | if caching_enabled: |
|
184 | 241 | cached_diff = load_cached_diff(cache_file_path) |
|
185 | 242 | |
|
186 | 243 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
187 | 244 | if not force_recache and has_proper_diff_cache: |
|
188 | 245 | diffset = cached_diff['diff'] |
|
189 | 246 | else: |
|
190 | 247 | vcs_diff = self.rhodecode_vcs_repo.get_diff( |
|
191 | 248 | commit1, commit2, |
|
192 | 249 | ignore_whitespace=hide_whitespace_changes, |
|
193 | 250 | context=diff_context) |
|
194 | 251 | |
|
195 | 252 | diff_processor = diffs.DiffProcessor( |
|
196 | 253 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
197 | 254 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
198 | 255 | |
|
199 | 256 | _parsed = diff_processor.prepare() |
|
200 | 257 | |
|
201 | 258 | diffset = codeblocks.DiffSet( |
|
202 | 259 | repo_name=self.db_repo_name, |
|
203 | 260 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
204 | 261 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
205 | 262 | |
|
206 | 263 | diffset = self.path_filter.render_patchset_filtered( |
|
207 | 264 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
208 | 265 | |
|
209 | 266 | # save cached diff |
|
210 | 267 | if caching_enabled: |
|
211 | 268 | cache_diff(cache_file_path, diffset, None) |
|
212 | 269 | |
|
213 | 270 | c.limited_diff = diffset.limited_diff |
|
214 | 271 | c.changes[commit.raw_id] = diffset |
|
215 | 272 | else: |
|
216 | 273 | # TODO(marcink): no cache usage here... |
|
217 | 274 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
218 | 275 | commit1, commit2, |
|
219 | 276 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
220 | 277 | diff_processor = diffs.DiffProcessor( |
|
221 | 278 | _diff, format='newdiff', diff_limit=diff_limit, |
|
222 | 279 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
223 | 280 | # downloads/raw we only need RAW diff nothing else |
|
224 | 281 | diff = self.path_filter.get_raw_patch(diff_processor) |
|
225 | 282 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
226 | 283 | |
|
227 | 284 | # sort comments by how they were generated |
|
228 | 285 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
286 | c.at_version_num = None | |
|
229 | 287 | |
|
230 | 288 | if len(c.commit_ranges) == 1: |
|
231 | 289 | c.commit = c.commit_ranges[0] |
|
232 | 290 | c.parent_tmpl = ''.join( |
|
233 | 291 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
234 | 292 | |
|
235 | 293 | if method == 'download': |
|
236 | 294 | response = Response(diff) |
|
237 | 295 | response.content_type = 'text/plain' |
|
238 | 296 | response.content_disposition = ( |
|
239 | 297 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
240 | 298 | return response |
|
241 | 299 | elif method == 'patch': |
|
242 | 300 | c.diff = safe_unicode(diff) |
|
243 | 301 | patch = render( |
|
244 | 302 | 'rhodecode:templates/changeset/patch_changeset.mako', |
|
245 | 303 | self._get_template_context(c), self.request) |
|
246 | 304 | response = Response(patch) |
|
247 | 305 | response.content_type = 'text/plain' |
|
248 | 306 | return response |
|
249 | 307 | elif method == 'raw': |
|
250 | 308 | response = Response(diff) |
|
251 | 309 | response.content_type = 'text/plain' |
|
252 | 310 | return response |
|
253 | 311 | elif method == 'show': |
|
254 | 312 | if len(c.commit_ranges) == 1: |
|
255 | 313 | html = render( |
|
256 | 314 | 'rhodecode:templates/changeset/changeset.mako', |
|
257 | 315 | self._get_template_context(c), self.request) |
|
258 | 316 | return Response(html) |
|
259 | 317 | else: |
|
260 | 318 | c.ancestor = None |
|
261 | 319 | c.target_repo = self.db_repo |
|
262 | 320 | html = render( |
|
263 | 321 | 'rhodecode:templates/changeset/changeset_range.mako', |
|
264 | 322 | self._get_template_context(c), self.request) |
|
265 | 323 | return Response(html) |
|
266 | 324 | |
|
267 | 325 | raise HTTPBadRequest() |
|
268 | 326 | |
|
269 | 327 | @LoginRequired() |
|
270 | 328 | @HasRepoPermissionAnyDecorator( |
|
271 | 329 | 'repository.read', 'repository.write', 'repository.admin') |
|
272 | 330 | @view_config( |
|
273 | 331 | route_name='repo_commit', request_method='GET', |
|
274 | 332 | renderer=None) |
|
275 | 333 | def repo_commit_show(self): |
|
276 | 334 | commit_id = self.request.matchdict['commit_id'] |
|
277 | 335 | return self._commit(commit_id, method='show') |
|
278 | 336 | |
|
279 | 337 | @LoginRequired() |
|
280 | 338 | @HasRepoPermissionAnyDecorator( |
|
281 | 339 | 'repository.read', 'repository.write', 'repository.admin') |
|
282 | 340 | @view_config( |
|
283 | 341 | route_name='repo_commit_raw', request_method='GET', |
|
284 | 342 | renderer=None) |
|
285 | 343 | @view_config( |
|
286 | 344 | route_name='repo_commit_raw_deprecated', request_method='GET', |
|
287 | 345 | renderer=None) |
|
288 | 346 | def repo_commit_raw(self): |
|
289 | 347 | commit_id = self.request.matchdict['commit_id'] |
|
290 | 348 | return self._commit(commit_id, method='raw') |
|
291 | 349 | |
|
292 | 350 | @LoginRequired() |
|
293 | 351 | @HasRepoPermissionAnyDecorator( |
|
294 | 352 | 'repository.read', 'repository.write', 'repository.admin') |
|
295 | 353 | @view_config( |
|
296 | 354 | route_name='repo_commit_patch', request_method='GET', |
|
297 | 355 | renderer=None) |
|
298 | 356 | def repo_commit_patch(self): |
|
299 | 357 | commit_id = self.request.matchdict['commit_id'] |
|
300 | 358 | return self._commit(commit_id, method='patch') |
|
301 | 359 | |
|
302 | 360 | @LoginRequired() |
|
303 | 361 | @HasRepoPermissionAnyDecorator( |
|
304 | 362 | 'repository.read', 'repository.write', 'repository.admin') |
|
305 | 363 | @view_config( |
|
306 | 364 | route_name='repo_commit_download', request_method='GET', |
|
307 | 365 | renderer=None) |
|
308 | 366 | def repo_commit_download(self): |
|
309 | 367 | commit_id = self.request.matchdict['commit_id'] |
|
310 | 368 | return self._commit(commit_id, method='download') |
|
311 | 369 | |
|
312 | 370 | @LoginRequired() |
|
313 | 371 | @NotAnonymous() |
|
314 | 372 | @HasRepoPermissionAnyDecorator( |
|
315 | 373 | 'repository.read', 'repository.write', 'repository.admin') |
|
316 | 374 | @CSRFRequired() |
|
317 | 375 | @view_config( |
|
318 | 376 | route_name='repo_commit_comment_create', request_method='POST', |
|
319 | 377 | renderer='json_ext') |
|
320 | 378 | def repo_commit_comment_create(self): |
|
321 | 379 | _ = self.request.translate |
|
322 | 380 | commit_id = self.request.matchdict['commit_id'] |
|
323 | 381 | |
|
324 | 382 | c = self.load_default_context() |
|
325 | 383 | status = self.request.POST.get('changeset_status', None) |
|
326 | 384 | text = self.request.POST.get('text') |
|
327 | 385 | comment_type = self.request.POST.get('comment_type') |
|
328 | 386 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
329 | 387 | |
|
330 | 388 | if status: |
|
331 | 389 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
332 | 390 | % {'transition_icon': '>', |
|
333 | 391 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
334 | 392 | |
|
335 | 393 | multi_commit_ids = [] |
|
336 | 394 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): |
|
337 | 395 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
338 | 396 | if _commit_id not in multi_commit_ids: |
|
339 | 397 | multi_commit_ids.append(_commit_id) |
|
340 | 398 | |
|
341 | 399 | commit_ids = multi_commit_ids or [commit_id] |
|
342 | 400 | |
|
343 | 401 | comment = None |
|
344 | 402 | for current_id in filter(None, commit_ids): |
|
345 | 403 | comment = CommentsModel().create( |
|
346 | 404 | text=text, |
|
347 | 405 | repo=self.db_repo.repo_id, |
|
348 | 406 | user=self._rhodecode_db_user.user_id, |
|
349 | 407 | commit_id=current_id, |
|
350 | 408 | f_path=self.request.POST.get('f_path'), |
|
351 | 409 | line_no=self.request.POST.get('line'), |
|
352 | 410 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
353 | 411 | if status else None), |
|
354 | 412 | status_change_type=status, |
|
355 | 413 | comment_type=comment_type, |
|
356 | 414 | resolves_comment_id=resolves_comment_id, |
|
357 | 415 | auth_user=self._rhodecode_user |
|
358 | 416 | ) |
|
359 | 417 | |
|
360 | 418 | # get status if set ! |
|
361 | 419 | if status: |
|
362 | 420 | # if latest status was from pull request and it's closed |
|
363 | 421 | # disallow changing status ! |
|
364 | 422 | # dont_allow_on_closed_pull_request = True ! |
|
365 | 423 | |
|
366 | 424 | try: |
|
367 | 425 | ChangesetStatusModel().set_status( |
|
368 | 426 | self.db_repo.repo_id, |
|
369 | 427 | status, |
|
370 | 428 | self._rhodecode_db_user.user_id, |
|
371 | 429 | comment, |
|
372 | 430 | revision=current_id, |
|
373 | 431 | dont_allow_on_closed_pull_request=True |
|
374 | 432 | ) |
|
375 | 433 | except StatusChangeOnClosedPullRequestError: |
|
376 | 434 | msg = _('Changing the status of a commit associated with ' |
|
377 | 435 | 'a closed pull request is not allowed') |
|
378 | 436 | log.exception(msg) |
|
379 | 437 | h.flash(msg, category='warning') |
|
380 | 438 | raise HTTPFound(h.route_path( |
|
381 | 439 | 'repo_commit', repo_name=self.db_repo_name, |
|
382 | 440 | commit_id=current_id)) |
|
383 | 441 | |
|
384 | 442 | commit = self.db_repo.get_commit(current_id) |
|
385 | 443 | CommentsModel().trigger_commit_comment_hook( |
|
386 | 444 | self.db_repo, self._rhodecode_user, 'create', |
|
387 | 445 | data={'comment': comment, 'commit': commit}) |
|
388 | 446 | |
|
389 | 447 | # finalize, commit and redirect |
|
390 | 448 | Session().commit() |
|
391 | 449 | |
|
392 | 450 | data = { |
|
393 | 451 | 'target_id': h.safeid(h.safe_unicode( |
|
394 | 452 | self.request.POST.get('f_path'))), |
|
395 | 453 | } |
|
396 | 454 | if comment: |
|
397 | 455 | c.co = comment |
|
456 | c.at_version_num = 0 | |
|
398 | 457 | rendered_comment = render( |
|
399 | 458 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
400 | 459 | self._get_template_context(c), self.request) |
|
401 | 460 | |
|
402 | 461 | data.update(comment.get_dict()) |
|
403 | 462 | data.update({'rendered_text': rendered_comment}) |
|
404 | 463 | |
|
405 | 464 | return data |
|
406 | 465 | |
|
407 | 466 | @LoginRequired() |
|
408 | 467 | @NotAnonymous() |
|
409 | 468 | @HasRepoPermissionAnyDecorator( |
|
410 | 469 | 'repository.read', 'repository.write', 'repository.admin') |
|
411 | 470 | @CSRFRequired() |
|
412 | 471 | @view_config( |
|
413 | 472 | route_name='repo_commit_comment_preview', request_method='POST', |
|
414 | 473 | renderer='string', xhr=True) |
|
415 | 474 | def repo_commit_comment_preview(self): |
|
416 | 475 | # Technically a CSRF token is not needed as no state changes with this |
|
417 | 476 | # call. However, as this is a POST is better to have it, so automated |
|
418 | 477 | # tools don't flag it as potential CSRF. |
|
419 | 478 | # Post is required because the payload could be bigger than the maximum |
|
420 | 479 | # allowed by GET. |
|
421 | 480 | |
|
422 | 481 | text = self.request.POST.get('text') |
|
423 | 482 | renderer = self.request.POST.get('renderer') or 'rst' |
|
424 | 483 | if text: |
|
425 | 484 | return h.render(text, renderer=renderer, mentions=True, |
|
426 | 485 | repo_name=self.db_repo_name) |
|
427 | 486 | return '' |
|
428 | 487 | |
|
429 | 488 | @LoginRequired() |
|
430 | @NotAnonymous() | |
|
431 | 489 | @HasRepoPermissionAnyDecorator( |
|
432 | 490 | 'repository.read', 'repository.write', 'repository.admin') |
|
433 | 491 | @CSRFRequired() |
|
434 | 492 | @view_config( |
|
435 | 493 | route_name='repo_commit_comment_history_view', request_method='POST', |
|
436 | 494 | renderer='string', xhr=True) |
|
437 | 495 | def repo_commit_comment_history_view(self): |
|
438 | 496 | c = self.load_default_context() |
|
439 | 497 | |
|
440 | 498 | comment_history_id = self.request.matchdict['comment_history_id'] |
|
441 | 499 | comment_history = ChangesetCommentHistory.get_or_404(comment_history_id) |
|
442 | 500 | is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id |
|
443 | 501 | |
|
444 | 502 | if is_repo_comment: |
|
445 | 503 | c.comment_history = comment_history |
|
446 | 504 | |
|
447 | 505 | rendered_comment = render( |
|
448 | 506 | 'rhodecode:templates/changeset/comment_history.mako', |
|
449 | 507 | self._get_template_context(c) |
|
450 | 508 | , self.request) |
|
451 | 509 | return rendered_comment |
|
452 | 510 | else: |
|
453 | 511 | log.warning('No permissions for user %s to show comment_history_id: %s', |
|
454 | 512 | self._rhodecode_db_user, comment_history_id) |
|
455 | 513 | raise HTTPNotFound() |
|
456 | 514 | |
|
457 | 515 | @LoginRequired() |
|
458 | 516 | @NotAnonymous() |
|
459 | 517 | @HasRepoPermissionAnyDecorator( |
|
460 | 518 | 'repository.read', 'repository.write', 'repository.admin') |
|
461 | 519 | @CSRFRequired() |
|
462 | 520 | @view_config( |
|
463 | 521 | route_name='repo_commit_comment_attachment_upload', request_method='POST', |
|
464 | 522 | renderer='json_ext', xhr=True) |
|
465 | 523 | def repo_commit_comment_attachment_upload(self): |
|
466 | 524 | c = self.load_default_context() |
|
467 | 525 | upload_key = 'attachment' |
|
468 | 526 | |
|
469 | 527 | file_obj = self.request.POST.get(upload_key) |
|
470 | 528 | |
|
471 | 529 | if file_obj is None: |
|
472 | 530 | self.request.response.status = 400 |
|
473 | 531 | return {'store_fid': None, |
|
474 | 532 | 'access_path': None, |
|
475 | 533 | 'error': '{} data field is missing'.format(upload_key)} |
|
476 | 534 | |
|
477 | 535 | if not hasattr(file_obj, 'filename'): |
|
478 | 536 | self.request.response.status = 400 |
|
479 | 537 | return {'store_fid': None, |
|
480 | 538 | 'access_path': None, |
|
481 | 539 | 'error': 'filename cannot be read from the data field'} |
|
482 | 540 | |
|
483 | 541 | filename = file_obj.filename |
|
484 | 542 | file_display_name = filename |
|
485 | 543 | |
|
486 | 544 | metadata = { |
|
487 | 545 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
488 | 546 | 'user_id': self._rhodecode_user.user_id, |
|
489 | 547 | 'ip': self._rhodecode_user.ip_addr}} |
|
490 | 548 | |
|
491 | 549 | # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size |
|
492 | 550 | allowed_extensions = [ |
|
493 | 551 | 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', |
|
494 | 552 | '.pptx', '.txt', '.xlsx', '.zip'] |
|
495 | 553 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js |
|
496 | 554 | |
|
497 | 555 | try: |
|
498 | 556 | storage = store_utils.get_file_storage(self.request.registry.settings) |
|
499 | 557 | store_uid, metadata = storage.save_file( |
|
500 | 558 | file_obj.file, filename, extra_metadata=metadata, |
|
501 | 559 | extensions=allowed_extensions, max_filesize=max_file_size) |
|
502 | 560 | except FileNotAllowedException: |
|
503 | 561 | self.request.response.status = 400 |
|
504 | 562 | permitted_extensions = ', '.join(allowed_extensions) |
|
505 | 563 | error_msg = 'File `{}` is not allowed. ' \ |
|
506 | 564 | 'Only following extensions are permitted: {}'.format( |
|
507 | 565 | filename, permitted_extensions) |
|
508 | 566 | return {'store_fid': None, |
|
509 | 567 | 'access_path': None, |
|
510 | 568 | 'error': error_msg} |
|
511 | 569 | except FileOverSizeException: |
|
512 | 570 | self.request.response.status = 400 |
|
513 | 571 | limit_mb = h.format_byte_size_binary(max_file_size) |
|
514 | 572 | return {'store_fid': None, |
|
515 | 573 | 'access_path': None, |
|
516 | 574 | 'error': 'File {} is exceeding allowed limit of {}.'.format( |
|
517 | 575 | filename, limit_mb)} |
|
518 | 576 | |
|
519 | 577 | try: |
|
520 | 578 | entry = FileStore.create( |
|
521 | 579 | file_uid=store_uid, filename=metadata["filename"], |
|
522 | 580 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
523 | 581 | file_display_name=file_display_name, |
|
524 | 582 | file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), |
|
525 | 583 | hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, |
|
526 | 584 | scope_repo_id=self.db_repo.repo_id |
|
527 | 585 | ) |
|
528 | 586 | Session().add(entry) |
|
529 | 587 | Session().commit() |
|
530 | 588 | log.debug('Stored upload in DB as %s', entry) |
|
531 | 589 | except Exception: |
|
532 | 590 | log.exception('Failed to store file %s', filename) |
|
533 | 591 | self.request.response.status = 400 |
|
534 | 592 | return {'store_fid': None, |
|
535 | 593 | 'access_path': None, |
|
536 | 594 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
537 | 595 | |
|
538 | 596 | Session().commit() |
|
539 | 597 | |
|
540 | 598 | return { |
|
541 | 599 | 'store_fid': store_uid, |
|
542 | 600 | 'access_path': h.route_path( |
|
543 | 601 | 'download_file', fid=store_uid), |
|
544 | 602 | 'fqn_access_path': h.route_url( |
|
545 | 603 | 'download_file', fid=store_uid), |
|
546 | 604 | 'repo_access_path': h.route_path( |
|
547 | 605 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
548 | 606 | 'repo_fqn_access_path': h.route_url( |
|
549 | 607 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
550 | 608 | } |
|
551 | 609 | |
|
552 | 610 | @LoginRequired() |
|
553 | 611 | @NotAnonymous() |
|
554 | 612 | @HasRepoPermissionAnyDecorator( |
|
555 | 613 | 'repository.read', 'repository.write', 'repository.admin') |
|
556 | 614 | @CSRFRequired() |
|
557 | 615 | @view_config( |
|
558 | 616 | route_name='repo_commit_comment_delete', request_method='POST', |
|
559 | 617 | renderer='json_ext') |
|
560 | 618 | def repo_commit_comment_delete(self): |
|
561 | 619 | commit_id = self.request.matchdict['commit_id'] |
|
562 | 620 | comment_id = self.request.matchdict['comment_id'] |
|
563 | 621 | |
|
564 | 622 | comment = ChangesetComment.get_or_404(comment_id) |
|
565 | 623 | if not comment: |
|
566 | 624 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
567 | 625 | # comment already deleted in another call probably |
|
568 | 626 | return True |
|
569 | 627 | |
|
570 | 628 | if comment.immutable: |
|
571 | 629 | # don't allow deleting comments that are immutable |
|
572 | 630 | raise HTTPForbidden() |
|
573 | 631 | |
|
574 | 632 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
575 | 633 | super_admin = h.HasPermissionAny('hg.admin')() |
|
576 | 634 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
577 | 635 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id |
|
578 | 636 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
579 | 637 | |
|
580 | 638 | if super_admin or comment_owner or comment_repo_admin: |
|
581 | 639 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
582 | 640 | Session().commit() |
|
583 | 641 | return True |
|
584 | 642 | else: |
|
585 | 643 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
586 | 644 | self._rhodecode_db_user, comment_id) |
|
587 | 645 | raise HTTPNotFound() |
|
588 | 646 | |
|
589 | 647 | @LoginRequired() |
|
590 | 648 | @NotAnonymous() |
|
591 | 649 | @HasRepoPermissionAnyDecorator( |
|
592 | 650 | 'repository.read', 'repository.write', 'repository.admin') |
|
593 | 651 | @CSRFRequired() |
|
594 | 652 | @view_config( |
|
595 | 653 | route_name='repo_commit_comment_edit', request_method='POST', |
|
596 | 654 | renderer='json_ext') |
|
597 | 655 | def repo_commit_comment_edit(self): |
|
598 | 656 | self.load_default_context() |
|
599 | 657 | |
|
600 | 658 | comment_id = self.request.matchdict['comment_id'] |
|
601 | 659 | comment = ChangesetComment.get_or_404(comment_id) |
|
602 | 660 | |
|
603 | 661 | if comment.immutable: |
|
604 | 662 | # don't allow deleting comments that are immutable |
|
605 | 663 | raise HTTPForbidden() |
|
606 | 664 | |
|
607 | 665 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
608 | 666 | super_admin = h.HasPermissionAny('hg.admin')() |
|
609 | 667 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
610 | 668 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id |
|
611 | 669 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
612 | 670 | |
|
613 | 671 | if super_admin or comment_owner or comment_repo_admin: |
|
614 | 672 | text = self.request.POST.get('text') |
|
615 | 673 | version = self.request.POST.get('version') |
|
616 | 674 | if text == comment.text: |
|
617 | 675 | log.warning( |
|
618 | 676 | 'Comment(repo): ' |
|
619 | 677 | 'Trying to create new version ' |
|
620 | 678 | 'with the same comment body {}'.format( |
|
621 | 679 | comment_id, |
|
622 | 680 | ) |
|
623 | 681 | ) |
|
624 | 682 | raise HTTPNotFound() |
|
625 | 683 | |
|
626 | 684 | if version.isdigit(): |
|
627 | 685 | version = int(version) |
|
628 | 686 | else: |
|
629 | 687 | log.warning( |
|
630 | 688 | 'Comment(repo): Wrong version type {} {} ' |
|
631 | 689 | 'for comment {}'.format( |
|
632 | 690 | version, |
|
633 | 691 | type(version), |
|
634 | 692 | comment_id, |
|
635 | 693 | ) |
|
636 | 694 | ) |
|
637 | 695 | raise HTTPNotFound() |
|
638 | 696 | |
|
639 | 697 | try: |
|
640 | 698 | comment_history = CommentsModel().edit( |
|
641 | 699 | comment_id=comment_id, |
|
642 | 700 | text=text, |
|
643 | 701 | auth_user=self._rhodecode_user, |
|
644 | 702 | version=version, |
|
645 | 703 | ) |
|
646 | 704 | except CommentVersionMismatch: |
|
647 | 705 | raise HTTPConflict() |
|
648 | 706 | |
|
649 | 707 | if not comment_history: |
|
650 | 708 | raise HTTPNotFound() |
|
651 | 709 | |
|
652 | 710 | commit_id = self.request.matchdict['commit_id'] |
|
653 | 711 | commit = self.db_repo.get_commit(commit_id) |
|
654 | 712 | CommentsModel().trigger_commit_comment_hook( |
|
655 | 713 | self.db_repo, self._rhodecode_user, 'edit', |
|
656 | 714 | data={'comment': comment, 'commit': commit}) |
|
657 | 715 | |
|
658 | 716 | Session().commit() |
|
659 | 717 | return { |
|
660 | 718 | 'comment_history_id': comment_history.comment_history_id, |
|
661 | 719 | 'comment_id': comment.comment_id, |
|
662 | 720 | 'comment_version': comment_history.version, |
|
663 | 721 | 'comment_author_username': comment_history.author.username, |
|
664 | 722 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
665 | 723 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
666 | 724 | time_is_local=True), |
|
667 | 725 | } |
|
668 | 726 | else: |
|
669 | 727 | log.warning('No permissions for user %s to edit comment_id: %s', |
|
670 | 728 | self._rhodecode_db_user, comment_id) |
|
671 | 729 | raise HTTPNotFound() |
|
672 | 730 | |
|
673 | 731 | @LoginRequired() |
|
674 | 732 | @HasRepoPermissionAnyDecorator( |
|
675 | 733 | 'repository.read', 'repository.write', 'repository.admin') |
|
676 | 734 | @view_config( |
|
677 | 735 | route_name='repo_commit_data', request_method='GET', |
|
678 | 736 | renderer='json_ext', xhr=True) |
|
679 | 737 | def repo_commit_data(self): |
|
680 | 738 | commit_id = self.request.matchdict['commit_id'] |
|
681 | 739 | self.load_default_context() |
|
682 | 740 | |
|
683 | 741 | try: |
|
684 | 742 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
685 | 743 | except CommitDoesNotExistError as e: |
|
686 | 744 | return EmptyCommit(message=str(e)) |
|
687 | 745 | |
|
688 | 746 | @LoginRequired() |
|
689 | 747 | @HasRepoPermissionAnyDecorator( |
|
690 | 748 | 'repository.read', 'repository.write', 'repository.admin') |
|
691 | 749 | @view_config( |
|
692 | 750 | route_name='repo_commit_children', request_method='GET', |
|
693 | 751 | renderer='json_ext', xhr=True) |
|
694 | 752 | def repo_commit_children(self): |
|
695 | 753 | commit_id = self.request.matchdict['commit_id'] |
|
696 | 754 | self.load_default_context() |
|
697 | 755 | |
|
698 | 756 | try: |
|
699 | 757 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
700 | 758 | children = commit.children |
|
701 | 759 | except CommitDoesNotExistError: |
|
702 | 760 | children = [] |
|
703 | 761 | |
|
704 | 762 | result = {"results": children} |
|
705 | 763 | return result |
|
706 | 764 | |
|
707 | 765 | @LoginRequired() |
|
708 | 766 | @HasRepoPermissionAnyDecorator( |
|
709 | 767 | 'repository.read', 'repository.write', 'repository.admin') |
|
710 | 768 | @view_config( |
|
711 | 769 | route_name='repo_commit_parents', request_method='GET', |
|
712 | 770 | renderer='json_ext') |
|
713 | 771 | def repo_commit_parents(self): |
|
714 | 772 | commit_id = self.request.matchdict['commit_id'] |
|
715 | 773 | self.load_default_context() |
|
716 | 774 | |
|
717 | 775 | try: |
|
718 | 776 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
719 | 777 | parents = commit.parents |
|
720 | 778 | except CommitDoesNotExistError: |
|
721 | 779 | parents = [] |
|
722 | 780 | result = {"results": parents} |
|
723 | 781 | return result |
@@ -1,1637 +1,1757 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import collections |
|
23 | 23 | |
|
24 | 24 | import formencode |
|
25 | 25 | import formencode.htmlfill |
|
26 | 26 | import peppercorn |
|
27 | 27 | from pyramid.httpexceptions import ( |
|
28 | 28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict) |
|
29 | 29 | from pyramid.view import view_config |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | |
|
32 | 32 | from rhodecode.apps._base import RepoAppView, DataGridAppView |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream |
|
35 | 35 | from rhodecode.lib.base import vcs_operation_context |
|
36 | 36 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist |
|
37 | 37 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
38 | 38 | from rhodecode.lib.ext_json import json |
|
39 | 39 | from rhodecode.lib.auth import ( |
|
40 | 40 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, |
|
41 | 41 | NotAnonymous, CSRFRequired) |
|
42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode | |
|
42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int | |
|
43 | 43 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason |
|
44 | 44 | from rhodecode.lib.vcs.exceptions import ( |
|
45 | 45 | CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) |
|
46 | 46 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
47 | 47 | from rhodecode.model.comment import CommentsModel |
|
48 | 48 | from rhodecode.model.db import ( |
|
49 | 49 | func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository) |
|
50 | 50 | from rhodecode.model.forms import PullRequestForm |
|
51 | 51 | from rhodecode.model.meta import Session |
|
52 | 52 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
53 | 53 | from rhodecode.model.scm import ScmModel |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class RepoPullRequestsView(RepoAppView, DataGridAppView): |
|
59 | 59 | |
|
60 | 60 | def load_default_context(self): |
|
61 | 61 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
62 | 62 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
63 | 63 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
64 | 64 | # backward compat., we use for OLD PRs a plain renderer |
|
65 | 65 | c.renderer = 'plain' |
|
66 | 66 | return c |
|
67 | 67 | |
|
68 | 68 | def _get_pull_requests_list( |
|
69 | 69 | self, repo_name, source, filter_type, opened_by, statuses): |
|
70 | 70 | |
|
71 | 71 | draw, start, limit = self._extract_chunk(self.request) |
|
72 | 72 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
73 | 73 | _render = self.request.get_partial_renderer( |
|
74 | 74 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
75 | 75 | |
|
76 | 76 | # pagination |
|
77 | 77 | |
|
78 | 78 | if filter_type == 'awaiting_review': |
|
79 | 79 | pull_requests = PullRequestModel().get_awaiting_review( |
|
80 | 80 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
81 | 81 | statuses=statuses, offset=start, length=limit, |
|
82 | 82 | order_by=order_by, order_dir=order_dir) |
|
83 | 83 | pull_requests_total_count = PullRequestModel().count_awaiting_review( |
|
84 | 84 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
85 | 85 | opened_by=opened_by) |
|
86 | 86 | elif filter_type == 'awaiting_my_review': |
|
87 | 87 | pull_requests = PullRequestModel().get_awaiting_my_review( |
|
88 | 88 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
89 | 89 | user_id=self._rhodecode_user.user_id, statuses=statuses, |
|
90 | 90 | offset=start, length=limit, order_by=order_by, |
|
91 | 91 | order_dir=order_dir) |
|
92 | 92 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( |
|
93 | 93 | repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, |
|
94 | 94 | statuses=statuses, opened_by=opened_by) |
|
95 | 95 | else: |
|
96 | 96 | pull_requests = PullRequestModel().get_all( |
|
97 | 97 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
98 | 98 | statuses=statuses, offset=start, length=limit, |
|
99 | 99 | order_by=order_by, order_dir=order_dir) |
|
100 | 100 | pull_requests_total_count = PullRequestModel().count_all( |
|
101 | 101 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
102 | 102 | opened_by=opened_by) |
|
103 | 103 | |
|
104 | 104 | data = [] |
|
105 | 105 | comments_model = CommentsModel() |
|
106 | 106 | for pr in pull_requests: |
|
107 | 107 | comments = comments_model.get_all_comments( |
|
108 | 108 | self.db_repo.repo_id, pull_request=pr) |
|
109 | 109 | |
|
110 | 110 | data.append({ |
|
111 | 111 | 'name': _render('pullrequest_name', |
|
112 | 112 | pr.pull_request_id, pr.pull_request_state, |
|
113 | 113 | pr.work_in_progress, pr.target_repo.repo_name), |
|
114 | 114 | 'name_raw': pr.pull_request_id, |
|
115 | 115 | 'status': _render('pullrequest_status', |
|
116 | 116 | pr.calculated_review_status()), |
|
117 | 117 | 'title': _render('pullrequest_title', pr.title, pr.description), |
|
118 | 118 | 'description': h.escape(pr.description), |
|
119 | 119 | 'updated_on': _render('pullrequest_updated_on', |
|
120 | 120 | h.datetime_to_time(pr.updated_on)), |
|
121 | 121 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), |
|
122 | 122 | 'created_on': _render('pullrequest_updated_on', |
|
123 | 123 | h.datetime_to_time(pr.created_on)), |
|
124 | 124 | 'created_on_raw': h.datetime_to_time(pr.created_on), |
|
125 | 125 | 'state': pr.pull_request_state, |
|
126 | 126 | 'author': _render('pullrequest_author', |
|
127 | 127 | pr.author.full_contact, ), |
|
128 | 128 | 'author_raw': pr.author.full_name, |
|
129 | 129 | 'comments': _render('pullrequest_comments', len(comments)), |
|
130 | 130 | 'comments_raw': len(comments), |
|
131 | 131 | 'closed': pr.is_closed(), |
|
132 | 132 | }) |
|
133 | 133 | |
|
134 | 134 | data = ({ |
|
135 | 135 | 'draw': draw, |
|
136 | 136 | 'data': data, |
|
137 | 137 | 'recordsTotal': pull_requests_total_count, |
|
138 | 138 | 'recordsFiltered': pull_requests_total_count, |
|
139 | 139 | }) |
|
140 | 140 | return data |
|
141 | 141 | |
|
142 | 142 | @LoginRequired() |
|
143 | 143 | @HasRepoPermissionAnyDecorator( |
|
144 | 144 | 'repository.read', 'repository.write', 'repository.admin') |
|
145 | 145 | @view_config( |
|
146 | 146 | route_name='pullrequest_show_all', request_method='GET', |
|
147 | 147 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') |
|
148 | 148 | def pull_request_list(self): |
|
149 | 149 | c = self.load_default_context() |
|
150 | 150 | |
|
151 | 151 | req_get = self.request.GET |
|
152 | 152 | c.source = str2bool(req_get.get('source')) |
|
153 | 153 | c.closed = str2bool(req_get.get('closed')) |
|
154 | 154 | c.my = str2bool(req_get.get('my')) |
|
155 | 155 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
156 | 156 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
157 | 157 | |
|
158 | 158 | c.active = 'open' |
|
159 | 159 | if c.my: |
|
160 | 160 | c.active = 'my' |
|
161 | 161 | if c.closed: |
|
162 | 162 | c.active = 'closed' |
|
163 | 163 | if c.awaiting_review and not c.source: |
|
164 | 164 | c.active = 'awaiting' |
|
165 | 165 | if c.source and not c.awaiting_review: |
|
166 | 166 | c.active = 'source' |
|
167 | 167 | if c.awaiting_my_review: |
|
168 | 168 | c.active = 'awaiting_my' |
|
169 | 169 | |
|
170 | 170 | return self._get_template_context(c) |
|
171 | 171 | |
|
172 | 172 | @LoginRequired() |
|
173 | 173 | @HasRepoPermissionAnyDecorator( |
|
174 | 174 | 'repository.read', 'repository.write', 'repository.admin') |
|
175 | 175 | @view_config( |
|
176 | 176 | route_name='pullrequest_show_all_data', request_method='GET', |
|
177 | 177 | renderer='json_ext', xhr=True) |
|
178 | 178 | def pull_request_list_data(self): |
|
179 | 179 | self.load_default_context() |
|
180 | 180 | |
|
181 | 181 | # additional filters |
|
182 | 182 | req_get = self.request.GET |
|
183 | 183 | source = str2bool(req_get.get('source')) |
|
184 | 184 | closed = str2bool(req_get.get('closed')) |
|
185 | 185 | my = str2bool(req_get.get('my')) |
|
186 | 186 | awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
187 | 187 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
188 | 188 | |
|
189 | 189 | filter_type = 'awaiting_review' if awaiting_review \ |
|
190 | 190 | else 'awaiting_my_review' if awaiting_my_review \ |
|
191 | 191 | else None |
|
192 | 192 | |
|
193 | 193 | opened_by = None |
|
194 | 194 | if my: |
|
195 | 195 | opened_by = [self._rhodecode_user.user_id] |
|
196 | 196 | |
|
197 | 197 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] |
|
198 | 198 | if closed: |
|
199 | 199 | statuses = [PullRequest.STATUS_CLOSED] |
|
200 | 200 | |
|
201 | 201 | data = self._get_pull_requests_list( |
|
202 | 202 | repo_name=self.db_repo_name, source=source, |
|
203 | 203 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) |
|
204 | 204 | |
|
205 | 205 | return data |
|
206 | 206 | |
|
207 | 207 | def _is_diff_cache_enabled(self, target_repo): |
|
208 | 208 | caching_enabled = self._get_general_setting( |
|
209 | 209 | target_repo, 'rhodecode_diff_cache') |
|
210 | 210 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
211 | 211 | return caching_enabled |
|
212 | 212 | |
|
213 | 213 | def _get_diffset(self, source_repo_name, source_repo, |
|
214 | 214 | ancestor_commit, |
|
215 | 215 | source_ref_id, target_ref_id, |
|
216 | 216 | target_commit, source_commit, diff_limit, file_limit, |
|
217 | 217 | fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True): |
|
218 | 218 | |
|
219 | 219 | if use_ancestor: |
|
220 | 220 | # we might want to not use it for versions |
|
221 | 221 | target_ref_id = ancestor_commit.raw_id |
|
222 | 222 | |
|
223 | 223 | vcs_diff = PullRequestModel().get_diff( |
|
224 | 224 | source_repo, source_ref_id, target_ref_id, |
|
225 | 225 | hide_whitespace_changes, diff_context) |
|
226 | 226 | |
|
227 | 227 | diff_processor = diffs.DiffProcessor( |
|
228 | 228 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
229 | 229 | file_limit=file_limit, show_full_diff=fulldiff) |
|
230 | 230 | |
|
231 | 231 | _parsed = diff_processor.prepare() |
|
232 | 232 | |
|
233 | 233 | diffset = codeblocks.DiffSet( |
|
234 | 234 | repo_name=self.db_repo_name, |
|
235 | 235 | source_repo_name=source_repo_name, |
|
236 | 236 | source_node_getter=codeblocks.diffset_node_getter(target_commit), |
|
237 | 237 | target_node_getter=codeblocks.diffset_node_getter(source_commit), |
|
238 | 238 | ) |
|
239 | 239 | diffset = self.path_filter.render_patchset_filtered( |
|
240 | 240 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) |
|
241 | 241 | |
|
242 | 242 | return diffset |
|
243 | 243 | |
|
244 | 244 | def _get_range_diffset(self, source_scm, source_repo, |
|
245 | 245 | commit1, commit2, diff_limit, file_limit, |
|
246 | 246 | fulldiff, hide_whitespace_changes, diff_context): |
|
247 | 247 | vcs_diff = source_scm.get_diff( |
|
248 | 248 | commit1, commit2, |
|
249 | 249 | ignore_whitespace=hide_whitespace_changes, |
|
250 | 250 | context=diff_context) |
|
251 | 251 | |
|
252 | 252 | diff_processor = diffs.DiffProcessor( |
|
253 | 253 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
254 | 254 | file_limit=file_limit, show_full_diff=fulldiff) |
|
255 | 255 | |
|
256 | 256 | _parsed = diff_processor.prepare() |
|
257 | 257 | |
|
258 | 258 | diffset = codeblocks.DiffSet( |
|
259 | 259 | repo_name=source_repo.repo_name, |
|
260 | 260 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
261 | 261 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
262 | 262 | |
|
263 | 263 | diffset = self.path_filter.render_patchset_filtered( |
|
264 | 264 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
265 | 265 | |
|
266 | 266 | return diffset |
|
267 | 267 | |
|
268 | def register_comments_vars(self, c, pull_request, versions): | |
|
269 | comments_model = CommentsModel() | |
|
270 | ||
|
271 | # GENERAL COMMENTS with versions # | |
|
272 | q = comments_model._all_general_comments_of_pull_request(pull_request) | |
|
273 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
|
274 | general_comments = q | |
|
275 | ||
|
276 | # pick comments we want to render at current version | |
|
277 | c.comment_versions = comments_model.aggregate_comments( | |
|
278 | general_comments, versions, c.at_version_num) | |
|
279 | ||
|
280 | # INLINE COMMENTS with versions # | |
|
281 | q = comments_model._all_inline_comments_of_pull_request(pull_request) | |
|
282 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
|
283 | inline_comments = q | |
|
284 | ||
|
285 | c.inline_versions = comments_model.aggregate_comments( | |
|
286 | inline_comments, versions, c.at_version_num, inline=True) | |
|
287 | ||
|
288 | # Comments inline+general | |
|
289 | if c.at_version: | |
|
290 | c.inline_comments_flat = c.inline_versions[c.at_version_num]['display'] | |
|
291 | c.comments = c.comment_versions[c.at_version_num]['display'] | |
|
292 | else: | |
|
293 | c.inline_comments_flat = c.inline_versions[c.at_version_num]['until'] | |
|
294 | c.comments = c.comment_versions[c.at_version_num]['until'] | |
|
295 | ||
|
296 | return general_comments, inline_comments | |
|
297 | ||
|
268 | 298 | @LoginRequired() |
|
269 | 299 | @HasRepoPermissionAnyDecorator( |
|
270 | 300 | 'repository.read', 'repository.write', 'repository.admin') |
|
271 | 301 | @view_config( |
|
272 | 302 | route_name='pullrequest_show', request_method='GET', |
|
273 | 303 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') |
|
274 | 304 | def pull_request_show(self): |
|
275 | 305 | _ = self.request.translate |
|
276 | 306 | c = self.load_default_context() |
|
277 | 307 | |
|
278 | 308 | pull_request = PullRequest.get_or_404( |
|
279 | 309 | self.request.matchdict['pull_request_id']) |
|
280 | 310 | pull_request_id = pull_request.pull_request_id |
|
281 | 311 | |
|
282 | 312 | c.state_progressing = pull_request.is_state_changing() |
|
313 | c.pr_broadcast_channel = '/repo${}$/pr/{}'.format( | |
|
314 | pull_request.target_repo.repo_name, pull_request.pull_request_id) | |
|
283 | 315 | |
|
284 | 316 | _new_state = { |
|
285 | 317 | 'created': PullRequest.STATE_CREATED, |
|
286 | 318 | }.get(self.request.GET.get('force_state')) |
|
287 | 319 | |
|
288 | 320 | if c.is_super_admin and _new_state: |
|
289 | 321 | with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state): |
|
290 | 322 | h.flash( |
|
291 | 323 | _('Pull Request state was force changed to `{}`').format(_new_state), |
|
292 | 324 | category='success') |
|
293 | 325 | Session().commit() |
|
294 | 326 | |
|
295 | 327 | raise HTTPFound(h.route_path( |
|
296 | 328 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
297 | 329 | pull_request_id=pull_request_id)) |
|
298 | 330 | |
|
299 | 331 | version = self.request.GET.get('version') |
|
300 | 332 | from_version = self.request.GET.get('from_version') or version |
|
301 | 333 | merge_checks = self.request.GET.get('merge_checks') |
|
302 | 334 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) |
|
335 | force_refresh = str2bool(self.request.GET.get('force_refresh')) | |
|
336 | c.range_diff_on = self.request.GET.get('range-diff') == "1" | |
|
303 | 337 | |
|
304 | 338 | # fetch global flags of ignore ws or context lines |
|
305 | 339 | diff_context = diffs.get_diff_context(self.request) |
|
306 | 340 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) |
|
307 | 341 | |
|
308 | force_refresh = str2bool(self.request.GET.get('force_refresh')) | |
|
309 | ||
|
310 | 342 | (pull_request_latest, |
|
311 | 343 | pull_request_at_ver, |
|
312 | 344 | pull_request_display_obj, |
|
313 | 345 | at_version) = PullRequestModel().get_pr_version( |
|
314 | 346 | pull_request_id, version=version) |
|
347 | ||
|
315 | 348 | pr_closed = pull_request_latest.is_closed() |
|
316 | 349 | |
|
317 | 350 | if pr_closed and (version or from_version): |
|
318 | # not allow to browse versions | |
|
351 | # not allow to browse versions for closed PR | |
|
319 | 352 | raise HTTPFound(h.route_path( |
|
320 | 353 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
321 | 354 | pull_request_id=pull_request_id)) |
|
322 | 355 | |
|
323 | 356 | versions = pull_request_display_obj.versions() |
|
324 | 357 | # used to store per-commit range diffs |
|
325 | 358 | c.changes = collections.OrderedDict() |
|
326 | c.range_diff_on = self.request.GET.get('range-diff') == "1" | |
|
327 | 359 | |
|
328 | 360 | c.at_version = at_version |
|
329 | 361 | c.at_version_num = (at_version |
|
330 |
if at_version and at_version != |
|
|
362 | if at_version and at_version != PullRequest.LATEST_VER | |
|
331 | 363 | else None) |
|
332 | c.at_version_pos = ChangesetComment.get_index_from_version( | |
|
364 | ||
|
365 | c.at_version_index = ChangesetComment.get_index_from_version( | |
|
333 | 366 | c.at_version_num, versions) |
|
334 | 367 | |
|
335 | 368 | (prev_pull_request_latest, |
|
336 | 369 | prev_pull_request_at_ver, |
|
337 | 370 | prev_pull_request_display_obj, |
|
338 | 371 | prev_at_version) = PullRequestModel().get_pr_version( |
|
339 | 372 | pull_request_id, version=from_version) |
|
340 | 373 | |
|
341 | 374 | c.from_version = prev_at_version |
|
342 | 375 | c.from_version_num = (prev_at_version |
|
343 |
if prev_at_version and prev_at_version != |
|
|
376 | if prev_at_version and prev_at_version != PullRequest.LATEST_VER | |
|
344 | 377 | else None) |
|
345 |
c.from_version_ |
|
|
378 | c.from_version_index = ChangesetComment.get_index_from_version( | |
|
346 | 379 | c.from_version_num, versions) |
|
347 | 380 | |
|
348 | 381 | # define if we're in COMPARE mode or VIEW at version mode |
|
349 | 382 | compare = at_version != prev_at_version |
|
350 | 383 | |
|
351 | 384 | # pull_requests repo_name we opened it against |
|
352 | 385 | # ie. target_repo must match |
|
353 | 386 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: |
|
387 | log.warning('Mismatch between the current repo: %s, and target %s', | |
|
388 | self.db_repo_name, pull_request_at_ver.target_repo.repo_name) | |
|
354 | 389 | raise HTTPNotFound() |
|
355 | 390 | |
|
356 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( | |
|
357 | pull_request_at_ver) | |
|
391 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver) | |
|
358 | 392 | |
|
359 | 393 | c.pull_request = pull_request_display_obj |
|
360 | 394 | c.renderer = pull_request_at_ver.description_renderer or c.renderer |
|
361 | 395 | c.pull_request_latest = pull_request_latest |
|
362 | 396 | |
|
363 | if compare or (at_version and not at_version == 'latest'): | |
|
397 | # inject latest version | |
|
398 | latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest) | |
|
399 | c.versions = versions + [latest_ver] | |
|
400 | ||
|
401 | if compare or (at_version and not at_version == PullRequest.LATEST_VER): | |
|
364 | 402 | c.allowed_to_change_status = False |
|
365 | 403 | c.allowed_to_update = False |
|
366 | 404 | c.allowed_to_merge = False |
|
367 | 405 | c.allowed_to_delete = False |
|
368 | 406 | c.allowed_to_comment = False |
|
369 | 407 | c.allowed_to_close = False |
|
370 | 408 | else: |
|
371 | 409 | can_change_status = PullRequestModel().check_user_change_status( |
|
372 | 410 | pull_request_at_ver, self._rhodecode_user) |
|
373 | 411 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
374 | 412 | |
|
375 | 413 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
376 | 414 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
377 | 415 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
378 | 416 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
379 | 417 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
380 | 418 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
381 | 419 | c.allowed_to_comment = not pr_closed |
|
382 | 420 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
383 | 421 | |
|
384 | 422 | c.forbid_adding_reviewers = False |
|
385 | 423 | c.forbid_author_to_review = False |
|
386 | 424 | c.forbid_commit_author_to_review = False |
|
387 | 425 | |
|
388 | 426 | if pull_request_latest.reviewer_data and \ |
|
389 | 427 | 'rules' in pull_request_latest.reviewer_data: |
|
390 | 428 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
391 | 429 | try: |
|
392 | c.forbid_adding_reviewers = rules.get( | |
|
393 | 'forbid_adding_reviewers') | |
|
394 | c.forbid_author_to_review = rules.get( | |
|
395 | 'forbid_author_to_review') | |
|
396 | c.forbid_commit_author_to_review = rules.get( | |
|
397 | 'forbid_commit_author_to_review') | |
|
430 | c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers') | |
|
431 | c.forbid_author_to_review = rules.get('forbid_author_to_review') | |
|
432 | c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review') | |
|
398 | 433 | except Exception: |
|
399 | 434 | pass |
|
400 | 435 | |
|
401 | 436 | # check merge capabilities |
|
402 | 437 | _merge_check = MergeCheck.validate( |
|
403 | 438 | pull_request_latest, auth_user=self._rhodecode_user, |
|
404 | 439 | translator=self.request.translate, |
|
405 | 440 | force_shadow_repo_refresh=force_refresh) |
|
406 | 441 | |
|
407 | 442 | c.pr_merge_errors = _merge_check.error_details |
|
408 | 443 | c.pr_merge_possible = not _merge_check.failed |
|
409 | 444 | c.pr_merge_message = _merge_check.merge_msg |
|
410 | 445 | c.pr_merge_source_commit = _merge_check.source_commit |
|
411 | 446 | c.pr_merge_target_commit = _merge_check.target_commit |
|
412 | 447 | |
|
413 | 448 | c.pr_merge_info = MergeCheck.get_merge_conditions( |
|
414 | 449 | pull_request_latest, translator=self.request.translate) |
|
415 | 450 | |
|
416 | 451 | c.pull_request_review_status = _merge_check.review_status |
|
417 | 452 | if merge_checks: |
|
418 | 453 | self.request.override_renderer = \ |
|
419 | 454 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' |
|
420 | 455 | return self._get_template_context(c) |
|
421 | 456 | |
|
422 | comments_model = CommentsModel() | |
|
457 | c.allowed_reviewers = [obj.user_id for obj in pull_request.reviewers if obj.user] | |
|
423 | 458 | |
|
424 | 459 | # reviewers and statuses |
|
425 |
c.pull_request_reviewers = |
|
|
426 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] | |
|
460 | c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data) | |
|
461 | c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []}) | |
|
427 | 462 | |
|
428 | # GENERAL COMMENTS with versions # | |
|
429 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) | |
|
430 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
|
431 | general_comments = q | |
|
463 | for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses(): | |
|
464 | member_reviewer = h.reviewer_as_json( | |
|
465 | member, reasons=reasons, mandatory=mandatory, | |
|
466 | user_group=review_obj.rule_user_group_data() | |
|
467 | ) | |
|
432 | 468 | |
|
433 | # pick comments we want to render at current version | |
|
434 | c.comment_versions = comments_model.aggregate_comments( | |
|
435 | general_comments, versions, c.at_version_num) | |
|
436 | c.comments = c.comment_versions[c.at_version_num]['until'] | |
|
469 | current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED | |
|
470 | member_reviewer['review_status'] = current_review_status | |
|
471 | member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status) | |
|
472 | member_reviewer['allowed_to_update'] = c.allowed_to_update | |
|
473 | c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer) | |
|
437 | 474 | |
|
438 | # INLINE COMMENTS with versions # | |
|
439 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) | |
|
440 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
|
441 | inline_comments = q | |
|
475 | c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json) | |
|
442 | 476 | |
|
443 | c.inline_versions = comments_model.aggregate_comments( | |
|
444 | inline_comments, versions, c.at_version_num, inline=True) | |
|
477 | general_comments, inline_comments = \ | |
|
478 | self.register_comments_vars(c, pull_request_latest, versions) | |
|
445 | 479 | |
|
446 | 480 | # TODOs |
|
447 | 481 | c.unresolved_comments = CommentsModel() \ |
|
448 | .get_pull_request_unresolved_todos(pull_request) | |
|
482 | .get_pull_request_unresolved_todos(pull_request_latest) | |
|
449 | 483 | c.resolved_comments = CommentsModel() \ |
|
450 | .get_pull_request_resolved_todos(pull_request) | |
|
451 | ||
|
452 | # inject latest version | |
|
453 | latest_ver = PullRequest.get_pr_display_object( | |
|
454 | pull_request_latest, pull_request_latest) | |
|
455 | ||
|
456 | c.versions = versions + [latest_ver] | |
|
484 | .get_pull_request_resolved_todos(pull_request_latest) | |
|
457 | 485 | |
|
458 | 486 | # if we use version, then do not show later comments |
|
459 | 487 | # than current version |
|
460 | 488 | display_inline_comments = collections.defaultdict( |
|
461 | 489 | lambda: collections.defaultdict(list)) |
|
462 | 490 | for co in inline_comments: |
|
463 | 491 | if c.at_version_num: |
|
464 | 492 | # pick comments that are at least UPTO given version, so we |
|
465 | 493 | # don't render comments for higher version |
|
466 | 494 | should_render = co.pull_request_version_id and \ |
|
467 | 495 | co.pull_request_version_id <= c.at_version_num |
|
468 | 496 | else: |
|
469 | 497 | # showing all, for 'latest' |
|
470 | 498 | should_render = True |
|
471 | 499 | |
|
472 | 500 | if should_render: |
|
473 | 501 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
474 | 502 | |
|
475 | 503 | # load diff data into template context, if we use compare mode then |
|
476 | 504 | # diff is calculated based on changes between versions of PR |
|
477 | 505 | |
|
478 | 506 | source_repo = pull_request_at_ver.source_repo |
|
479 | 507 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
480 | 508 | |
|
481 | 509 | target_repo = pull_request_at_ver.target_repo |
|
482 | 510 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
483 | 511 | |
|
484 | 512 | if compare: |
|
485 | 513 | # in compare switch the diff base to latest commit from prev version |
|
486 | 514 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
487 | 515 | |
|
488 | 516 | # despite opening commits for bookmarks/branches/tags, we always |
|
489 | 517 | # convert this to rev to prevent changes after bookmark or branch change |
|
490 | 518 | c.source_ref_type = 'rev' |
|
491 | 519 | c.source_ref = source_ref_id |
|
492 | 520 | |
|
493 | 521 | c.target_ref_type = 'rev' |
|
494 | 522 | c.target_ref = target_ref_id |
|
495 | 523 | |
|
496 | 524 | c.source_repo = source_repo |
|
497 | 525 | c.target_repo = target_repo |
|
498 | 526 | |
|
499 | 527 | c.commit_ranges = [] |
|
500 | 528 | source_commit = EmptyCommit() |
|
501 | 529 | target_commit = EmptyCommit() |
|
502 | 530 | c.missing_requirements = False |
|
503 | 531 | |
|
504 | 532 | source_scm = source_repo.scm_instance() |
|
505 | 533 | target_scm = target_repo.scm_instance() |
|
506 | 534 | |
|
507 | 535 | shadow_scm = None |
|
508 | 536 | try: |
|
509 | 537 | shadow_scm = pull_request_latest.get_shadow_repo() |
|
510 | 538 | except Exception: |
|
511 | 539 | log.debug('Failed to get shadow repo', exc_info=True) |
|
512 | 540 | # try first the existing source_repo, and then shadow |
|
513 | 541 | # repo if we can obtain one |
|
514 | 542 | commits_source_repo = source_scm |
|
515 | 543 | if shadow_scm: |
|
516 | 544 | commits_source_repo = shadow_scm |
|
517 | 545 | |
|
518 | 546 | c.commits_source_repo = commits_source_repo |
|
519 | 547 | c.ancestor = None # set it to None, to hide it from PR view |
|
520 | 548 | |
|
521 | 549 | # empty version means latest, so we keep this to prevent |
|
522 | 550 | # double caching |
|
523 |
version_normalized = version or |
|
|
524 |
from_version_normalized = from_version or |
|
|
551 | version_normalized = version or PullRequest.LATEST_VER | |
|
552 | from_version_normalized = from_version or PullRequest.LATEST_VER | |
|
525 | 553 | |
|
526 | 554 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) |
|
527 | 555 | cache_file_path = diff_cache_exist( |
|
528 | 556 | cache_path, 'pull_request', pull_request_id, version_normalized, |
|
529 | 557 | from_version_normalized, source_ref_id, target_ref_id, |
|
530 | 558 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
531 | 559 | |
|
532 | 560 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) |
|
533 | 561 | force_recache = self.get_recache_flag() |
|
534 | 562 | |
|
535 | 563 | cached_diff = None |
|
536 | 564 | if caching_enabled: |
|
537 | 565 | cached_diff = load_cached_diff(cache_file_path) |
|
538 | 566 | |
|
539 | 567 | has_proper_commit_cache = ( |
|
540 | 568 | cached_diff and cached_diff.get('commits') |
|
541 | 569 | and len(cached_diff.get('commits', [])) == 5 |
|
542 | 570 | and cached_diff.get('commits')[0] |
|
543 | 571 | and cached_diff.get('commits')[3]) |
|
544 | 572 | |
|
545 | 573 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: |
|
546 | 574 | diff_commit_cache = \ |
|
547 | 575 | (ancestor_commit, commit_cache, missing_requirements, |
|
548 | 576 | source_commit, target_commit) = cached_diff['commits'] |
|
549 | 577 | else: |
|
550 | 578 | # NOTE(marcink): we reach potentially unreachable errors when a PR has |
|
551 | 579 | # merge errors resulting in potentially hidden commits in the shadow repo. |
|
552 | 580 | maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \ |
|
553 | 581 | and _merge_check.merge_response |
|
554 | 582 | maybe_unreachable = maybe_unreachable \ |
|
555 | 583 | and _merge_check.merge_response.metadata.get('unresolved_files') |
|
556 | 584 | log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation") |
|
557 | 585 | diff_commit_cache = \ |
|
558 | 586 | (ancestor_commit, commit_cache, missing_requirements, |
|
559 | 587 | source_commit, target_commit) = self.get_commits( |
|
560 | 588 | commits_source_repo, |
|
561 | 589 | pull_request_at_ver, |
|
562 | 590 | source_commit, |
|
563 | 591 | source_ref_id, |
|
564 | 592 | source_scm, |
|
565 | 593 | target_commit, |
|
566 | 594 | target_ref_id, |
|
567 | 595 | target_scm, |
|
568 | 596 | maybe_unreachable=maybe_unreachable) |
|
569 | 597 | |
|
570 | 598 | # register our commit range |
|
571 | 599 | for comm in commit_cache.values(): |
|
572 | 600 | c.commit_ranges.append(comm) |
|
573 | 601 | |
|
574 | 602 | c.missing_requirements = missing_requirements |
|
575 | 603 | c.ancestor_commit = ancestor_commit |
|
576 | 604 | c.statuses = source_repo.statuses( |
|
577 | 605 | [x.raw_id for x in c.commit_ranges]) |
|
578 | 606 | |
|
579 | 607 | # auto collapse if we have more than limit |
|
580 | 608 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
581 | 609 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
582 | 610 | c.compare_mode = compare |
|
583 | 611 | |
|
584 | 612 | # diff_limit is the old behavior, will cut off the whole diff |
|
585 | 613 | # if the limit is applied otherwise will just hide the |
|
586 | 614 | # big files from the front-end |
|
587 | 615 | diff_limit = c.visual.cut_off_limit_diff |
|
588 | 616 | file_limit = c.visual.cut_off_limit_file |
|
589 | 617 | |
|
590 | 618 | c.missing_commits = False |
|
591 | 619 | if (c.missing_requirements |
|
592 | 620 | or isinstance(source_commit, EmptyCommit) |
|
593 | 621 | or source_commit == target_commit): |
|
594 | 622 | |
|
595 | 623 | c.missing_commits = True |
|
596 | 624 | else: |
|
597 | 625 | c.inline_comments = display_inline_comments |
|
598 | 626 | |
|
599 | 627 | use_ancestor = True |
|
600 | 628 | if from_version_normalized != version_normalized: |
|
601 | 629 | use_ancestor = False |
|
602 | 630 | |
|
603 | 631 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') |
|
604 | 632 | if not force_recache and has_proper_diff_cache: |
|
605 | 633 | c.diffset = cached_diff['diff'] |
|
606 | 634 | else: |
|
607 | 635 | try: |
|
608 | 636 | c.diffset = self._get_diffset( |
|
609 | 637 | c.source_repo.repo_name, commits_source_repo, |
|
610 | 638 | c.ancestor_commit, |
|
611 | 639 | source_ref_id, target_ref_id, |
|
612 | 640 | target_commit, source_commit, |
|
613 | 641 | diff_limit, file_limit, c.fulldiff, |
|
614 | 642 | hide_whitespace_changes, diff_context, |
|
615 | 643 | use_ancestor=use_ancestor |
|
616 | ) | |
|
644 | ) | |
|
617 | 645 | |
|
618 | 646 | # save cached diff |
|
619 | 647 | if caching_enabled: |
|
620 | 648 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) |
|
621 | 649 | except CommitDoesNotExistError: |
|
622 | 650 | log.exception('Failed to generate diffset') |
|
623 | 651 | c.missing_commits = True |
|
624 | 652 | |
|
625 | 653 | if not c.missing_commits: |
|
626 | 654 | |
|
627 | 655 | c.limited_diff = c.diffset.limited_diff |
|
628 | 656 | |
|
629 | 657 | # calculate removed files that are bound to comments |
|
630 | 658 | comment_deleted_files = [ |
|
631 | 659 | fname for fname in display_inline_comments |
|
632 | 660 | if fname not in c.diffset.file_stats] |
|
633 | 661 | |
|
634 | 662 | c.deleted_files_comments = collections.defaultdict(dict) |
|
635 | 663 | for fname, per_line_comments in display_inline_comments.items(): |
|
636 | 664 | if fname in comment_deleted_files: |
|
637 | 665 | c.deleted_files_comments[fname]['stats'] = 0 |
|
638 | 666 | c.deleted_files_comments[fname]['comments'] = list() |
|
639 | 667 | for lno, comments in per_line_comments.items(): |
|
640 | 668 | c.deleted_files_comments[fname]['comments'].extend(comments) |
|
641 | 669 | |
|
642 | 670 | # maybe calculate the range diff |
|
643 | 671 | if c.range_diff_on: |
|
644 | 672 | # TODO(marcink): set whitespace/context |
|
645 | 673 | context_lcl = 3 |
|
646 | 674 | ign_whitespace_lcl = False |
|
647 | 675 | |
|
648 | 676 | for commit in c.commit_ranges: |
|
649 | 677 | commit2 = commit |
|
650 | 678 | commit1 = commit.first_parent |
|
651 | 679 | |
|
652 | 680 | range_diff_cache_file_path = diff_cache_exist( |
|
653 | 681 | cache_path, 'diff', commit.raw_id, |
|
654 | 682 | ign_whitespace_lcl, context_lcl, c.fulldiff) |
|
655 | 683 | |
|
656 | 684 | cached_diff = None |
|
657 | 685 | if caching_enabled: |
|
658 | 686 | cached_diff = load_cached_diff(range_diff_cache_file_path) |
|
659 | 687 | |
|
660 | 688 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
661 | 689 | if not force_recache and has_proper_diff_cache: |
|
662 | 690 | diffset = cached_diff['diff'] |
|
663 | 691 | else: |
|
664 | 692 | diffset = self._get_range_diffset( |
|
665 | 693 | commits_source_repo, source_repo, |
|
666 | 694 | commit1, commit2, diff_limit, file_limit, |
|
667 | 695 | c.fulldiff, ign_whitespace_lcl, context_lcl |
|
668 | 696 | ) |
|
669 | 697 | |
|
670 | 698 | # save cached diff |
|
671 | 699 | if caching_enabled: |
|
672 | 700 | cache_diff(range_diff_cache_file_path, diffset, None) |
|
673 | 701 | |
|
674 | 702 | c.changes[commit.raw_id] = diffset |
|
675 | 703 | |
|
676 | 704 | # this is a hack to properly display links, when creating PR, the |
|
677 | 705 | # compare view and others uses different notation, and |
|
678 | 706 | # compare_commits.mako renders links based on the target_repo. |
|
679 | 707 | # We need to swap that here to generate it properly on the html side |
|
680 | 708 | c.target_repo = c.source_repo |
|
681 | 709 | |
|
682 | 710 | c.commit_statuses = ChangesetStatus.STATUSES |
|
683 | 711 | |
|
684 | 712 | c.show_version_changes = not pr_closed |
|
685 | 713 | if c.show_version_changes: |
|
686 | 714 | cur_obj = pull_request_at_ver |
|
687 | 715 | prev_obj = prev_pull_request_at_ver |
|
688 | 716 | |
|
689 | 717 | old_commit_ids = prev_obj.revisions |
|
690 | 718 | new_commit_ids = cur_obj.revisions |
|
691 | 719 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
692 | 720 | old_commit_ids, new_commit_ids) |
|
693 | 721 | c.commit_changes_summary = commit_changes |
|
694 | 722 | |
|
695 | 723 | # calculate the diff for commits between versions |
|
696 | 724 | c.commit_changes = [] |
|
697 | 725 | |
|
698 | 726 | def mark(cs, fw): |
|
699 | 727 | return list(h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
700 | 728 | |
|
701 | 729 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
702 | 730 | + mark(commit_changes.removed, 'r') \ |
|
703 | 731 | + mark(commit_changes.common, 'c'): |
|
704 | 732 | |
|
705 | 733 | if raw_id in commit_cache: |
|
706 | 734 | commit = commit_cache[raw_id] |
|
707 | 735 | else: |
|
708 | 736 | try: |
|
709 | 737 | commit = commits_source_repo.get_commit(raw_id) |
|
710 | 738 | except CommitDoesNotExistError: |
|
711 | 739 | # in case we fail extracting still use "dummy" commit |
|
712 | 740 | # for display in commit diff |
|
713 | 741 | commit = h.AttributeDict( |
|
714 | 742 | {'raw_id': raw_id, |
|
715 | 743 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
716 | 744 | c.commit_changes.append([c_type, commit]) |
|
717 | 745 | |
|
718 | 746 | # current user review statuses for each version |
|
719 | 747 | c.review_versions = {} |
|
720 | if self._rhodecode_user.user_id in allowed_reviewers: | |
|
748 | if self._rhodecode_user.user_id in c.allowed_reviewers: | |
|
721 | 749 | for co in general_comments: |
|
722 | 750 | if co.author.user_id == self._rhodecode_user.user_id: |
|
723 | 751 | status = co.status_change |
|
724 | 752 | if status: |
|
725 | 753 | _ver_pr = status[0].comment.pull_request_version_id |
|
726 | 754 | c.review_versions[_ver_pr] = status[0] |
|
727 | 755 | |
|
728 | 756 | return self._get_template_context(c) |
|
729 | 757 | |
|
730 | 758 | def get_commits( |
|
731 | 759 | self, commits_source_repo, pull_request_at_ver, source_commit, |
|
732 | 760 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm, |
|
733 | 761 | maybe_unreachable=False): |
|
734 | 762 | |
|
735 | 763 | commit_cache = collections.OrderedDict() |
|
736 | 764 | missing_requirements = False |
|
737 | 765 | |
|
738 | 766 | try: |
|
739 | 767 | pre_load = ["author", "date", "message", "branch", "parents"] |
|
740 | 768 | |
|
741 | 769 | pull_request_commits = pull_request_at_ver.revisions |
|
742 | 770 | log.debug('Loading %s commits from %s', |
|
743 | 771 | len(pull_request_commits), commits_source_repo) |
|
744 | 772 | |
|
745 | 773 | for rev in pull_request_commits: |
|
746 | 774 | comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load, |
|
747 | 775 | maybe_unreachable=maybe_unreachable) |
|
748 | 776 | commit_cache[comm.raw_id] = comm |
|
749 | 777 | |
|
750 | 778 | # Order here matters, we first need to get target, and then |
|
751 | 779 | # the source |
|
752 | 780 | target_commit = commits_source_repo.get_commit( |
|
753 | 781 | commit_id=safe_str(target_ref_id)) |
|
754 | 782 | |
|
755 | 783 | source_commit = commits_source_repo.get_commit( |
|
756 | 784 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
757 | 785 | except CommitDoesNotExistError: |
|
758 | 786 | log.warning('Failed to get commit from `{}` repo'.format( |
|
759 | 787 | commits_source_repo), exc_info=True) |
|
760 | 788 | except RepositoryRequirementError: |
|
761 | 789 | log.warning('Failed to get all required data from repo', exc_info=True) |
|
762 | 790 | missing_requirements = True |
|
763 | 791 | |
|
764 | 792 | pr_ancestor_id = pull_request_at_ver.common_ancestor_id |
|
765 | 793 | |
|
766 | 794 | try: |
|
767 | 795 | ancestor_commit = source_scm.get_commit(pr_ancestor_id) |
|
768 | 796 | except Exception: |
|
769 | 797 | ancestor_commit = None |
|
770 | 798 | |
|
771 | 799 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit |
|
772 | 800 | |
|
773 | 801 | def assure_not_empty_repo(self): |
|
774 | 802 | _ = self.request.translate |
|
775 | 803 | |
|
776 | 804 | try: |
|
777 | 805 | self.db_repo.scm_instance().get_commit() |
|
778 | 806 | except EmptyRepositoryError: |
|
779 | 807 | h.flash(h.literal(_('There are no commits yet')), |
|
780 | 808 | category='warning') |
|
781 | 809 | raise HTTPFound( |
|
782 | 810 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) |
|
783 | 811 | |
|
784 | 812 | @LoginRequired() |
|
785 | 813 | @NotAnonymous() |
|
786 | 814 | @HasRepoPermissionAnyDecorator( |
|
787 | 815 | 'repository.read', 'repository.write', 'repository.admin') |
|
788 | 816 | @view_config( |
|
789 | 817 | route_name='pullrequest_new', request_method='GET', |
|
790 | 818 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') |
|
791 | 819 | def pull_request_new(self): |
|
792 | 820 | _ = self.request.translate |
|
793 | 821 | c = self.load_default_context() |
|
794 | 822 | |
|
795 | 823 | self.assure_not_empty_repo() |
|
796 | 824 | source_repo = self.db_repo |
|
797 | 825 | |
|
798 | 826 | commit_id = self.request.GET.get('commit') |
|
799 | 827 | branch_ref = self.request.GET.get('branch') |
|
800 | 828 | bookmark_ref = self.request.GET.get('bookmark') |
|
801 | 829 | |
|
802 | 830 | try: |
|
803 | 831 | source_repo_data = PullRequestModel().generate_repo_data( |
|
804 | 832 | source_repo, commit_id=commit_id, |
|
805 | 833 | branch=branch_ref, bookmark=bookmark_ref, |
|
806 | 834 | translator=self.request.translate) |
|
807 | 835 | except CommitDoesNotExistError as e: |
|
808 | 836 | log.exception(e) |
|
809 | 837 | h.flash(_('Commit does not exist'), 'error') |
|
810 | 838 | raise HTTPFound( |
|
811 | 839 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) |
|
812 | 840 | |
|
813 | 841 | default_target_repo = source_repo |
|
814 | 842 | |
|
815 | 843 | if source_repo.parent and c.has_origin_repo_read_perm: |
|
816 | 844 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
817 | 845 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
818 | 846 | # change default if we have a parent repo |
|
819 | 847 | default_target_repo = source_repo.parent |
|
820 | 848 | |
|
821 | 849 | target_repo_data = PullRequestModel().generate_repo_data( |
|
822 | 850 | default_target_repo, translator=self.request.translate) |
|
823 | 851 | |
|
824 | 852 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
825 | 853 | title_source_ref = '' |
|
826 | 854 | if selected_source_ref: |
|
827 | 855 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
828 | 856 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
829 | 857 | source=source_repo.repo_name, |
|
830 | 858 | source_ref=title_source_ref, |
|
831 | 859 | target=default_target_repo.repo_name |
|
832 | 860 | ) |
|
833 | 861 | |
|
834 | 862 | c.default_repo_data = { |
|
835 | 863 | 'source_repo_name': source_repo.repo_name, |
|
836 | 864 | 'source_refs_json': json.dumps(source_repo_data), |
|
837 | 865 | 'target_repo_name': default_target_repo.repo_name, |
|
838 | 866 | 'target_refs_json': json.dumps(target_repo_data), |
|
839 | 867 | } |
|
840 | 868 | c.default_source_ref = selected_source_ref |
|
841 | 869 | |
|
842 | 870 | return self._get_template_context(c) |
|
843 | 871 | |
|
844 | 872 | @LoginRequired() |
|
845 | 873 | @NotAnonymous() |
|
846 | 874 | @HasRepoPermissionAnyDecorator( |
|
847 | 875 | 'repository.read', 'repository.write', 'repository.admin') |
|
848 | 876 | @view_config( |
|
849 | 877 | route_name='pullrequest_repo_refs', request_method='GET', |
|
850 | 878 | renderer='json_ext', xhr=True) |
|
851 | 879 | def pull_request_repo_refs(self): |
|
852 | 880 | self.load_default_context() |
|
853 | 881 | target_repo_name = self.request.matchdict['target_repo_name'] |
|
854 | 882 | repo = Repository.get_by_repo_name(target_repo_name) |
|
855 | 883 | if not repo: |
|
856 | 884 | raise HTTPNotFound() |
|
857 | 885 | |
|
858 | 886 | target_perm = HasRepoPermissionAny( |
|
859 | 887 | 'repository.read', 'repository.write', 'repository.admin')( |
|
860 | 888 | target_repo_name) |
|
861 | 889 | if not target_perm: |
|
862 | 890 | raise HTTPNotFound() |
|
863 | 891 | |
|
864 | 892 | return PullRequestModel().generate_repo_data( |
|
865 | 893 | repo, translator=self.request.translate) |
|
866 | 894 | |
|
867 | 895 | @LoginRequired() |
|
868 | 896 | @NotAnonymous() |
|
869 | 897 | @HasRepoPermissionAnyDecorator( |
|
870 | 898 | 'repository.read', 'repository.write', 'repository.admin') |
|
871 | 899 | @view_config( |
|
872 | 900 | route_name='pullrequest_repo_targets', request_method='GET', |
|
873 | 901 | renderer='json_ext', xhr=True) |
|
874 | 902 | def pullrequest_repo_targets(self): |
|
875 | 903 | _ = self.request.translate |
|
876 | 904 | filter_query = self.request.GET.get('query') |
|
877 | 905 | |
|
878 | 906 | # get the parents |
|
879 | 907 | parent_target_repos = [] |
|
880 | 908 | if self.db_repo.parent: |
|
881 | 909 | parents_query = Repository.query() \ |
|
882 | 910 | .order_by(func.length(Repository.repo_name)) \ |
|
883 | 911 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
884 | 912 | |
|
885 | 913 | if filter_query: |
|
886 | 914 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
887 | 915 | parents_query = parents_query.filter( |
|
888 | 916 | Repository.repo_name.ilike(ilike_expression)) |
|
889 | 917 | parents = parents_query.limit(20).all() |
|
890 | 918 | |
|
891 | 919 | for parent in parents: |
|
892 | 920 | parent_vcs_obj = parent.scm_instance() |
|
893 | 921 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
894 | 922 | parent_target_repos.append(parent) |
|
895 | 923 | |
|
896 | 924 | # get other forks, and repo itself |
|
897 | 925 | query = Repository.query() \ |
|
898 | 926 | .order_by(func.length(Repository.repo_name)) \ |
|
899 | 927 | .filter( |
|
900 | 928 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself |
|
901 | 929 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo |
|
902 | 930 | ) \ |
|
903 | 931 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) |
|
904 | 932 | |
|
905 | 933 | if filter_query: |
|
906 | 934 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
907 | 935 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) |
|
908 | 936 | |
|
909 | 937 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 |
|
910 | 938 | target_repos = query.limit(limit).all() |
|
911 | 939 | |
|
912 | 940 | all_target_repos = target_repos + parent_target_repos |
|
913 | 941 | |
|
914 | 942 | repos = [] |
|
915 | 943 | # This checks permissions to the repositories |
|
916 | 944 | for obj in ScmModel().get_repos(all_target_repos): |
|
917 | 945 | repos.append({ |
|
918 | 946 | 'id': obj['name'], |
|
919 | 947 | 'text': obj['name'], |
|
920 | 948 | 'type': 'repo', |
|
921 | 949 | 'repo_id': obj['dbrepo']['repo_id'], |
|
922 | 950 | 'repo_type': obj['dbrepo']['repo_type'], |
|
923 | 951 | 'private': obj['dbrepo']['private'], |
|
924 | 952 | |
|
925 | 953 | }) |
|
926 | 954 | |
|
927 | 955 | data = { |
|
928 | 956 | 'more': False, |
|
929 | 957 | 'results': [{ |
|
930 | 958 | 'text': _('Repositories'), |
|
931 | 959 | 'children': repos |
|
932 | 960 | }] if repos else [] |
|
933 | 961 | } |
|
934 | 962 | return data |
|
935 | 963 | |
|
936 | 964 | @LoginRequired() |
|
937 | 965 | @NotAnonymous() |
|
938 | 966 | @HasRepoPermissionAnyDecorator( |
|
939 | 967 | 'repository.read', 'repository.write', 'repository.admin') |
|
968 | @view_config( | |
|
969 | route_name='pullrequest_comments', request_method='POST', | |
|
970 | renderer='string', xhr=True) | |
|
971 | def pullrequest_comments(self): | |
|
972 | self.load_default_context() | |
|
973 | ||
|
974 | pull_request = PullRequest.get_or_404( | |
|
975 | self.request.matchdict['pull_request_id']) | |
|
976 | pull_request_id = pull_request.pull_request_id | |
|
977 | version = self.request.GET.get('version') | |
|
978 | ||
|
979 | _render = self.request.get_partial_renderer( | |
|
980 | 'rhodecode:templates/base/sidebar.mako') | |
|
981 | c = _render.get_call_context() | |
|
982 | ||
|
983 | (pull_request_latest, | |
|
984 | pull_request_at_ver, | |
|
985 | pull_request_display_obj, | |
|
986 | at_version) = PullRequestModel().get_pr_version( | |
|
987 | pull_request_id, version=version) | |
|
988 | versions = pull_request_display_obj.versions() | |
|
989 | latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest) | |
|
990 | c.versions = versions + [latest_ver] | |
|
991 | ||
|
992 | c.at_version = at_version | |
|
993 | c.at_version_num = (at_version | |
|
994 | if at_version and at_version != PullRequest.LATEST_VER | |
|
995 | else None) | |
|
996 | ||
|
997 | self.register_comments_vars(c, pull_request_latest, versions) | |
|
998 | all_comments = c.inline_comments_flat + c.comments | |
|
999 | ||
|
1000 | existing_ids = filter( | |
|
1001 | lambda e: e, map(safe_int, self.request.POST.getall('comments[]'))) | |
|
1002 | return _render('comments_table', all_comments, len(all_comments), | |
|
1003 | existing_ids=existing_ids) | |
|
1004 | ||
|
1005 | @LoginRequired() | |
|
1006 | @NotAnonymous() | |
|
1007 | @HasRepoPermissionAnyDecorator( | |
|
1008 | 'repository.read', 'repository.write', 'repository.admin') | |
|
1009 | @view_config( | |
|
1010 | route_name='pullrequest_todos', request_method='POST', | |
|
1011 | renderer='string', xhr=True) | |
|
1012 | def pullrequest_todos(self): | |
|
1013 | self.load_default_context() | |
|
1014 | ||
|
1015 | pull_request = PullRequest.get_or_404( | |
|
1016 | self.request.matchdict['pull_request_id']) | |
|
1017 | pull_request_id = pull_request.pull_request_id | |
|
1018 | version = self.request.GET.get('version') | |
|
1019 | ||
|
1020 | _render = self.request.get_partial_renderer( | |
|
1021 | 'rhodecode:templates/base/sidebar.mako') | |
|
1022 | c = _render.get_call_context() | |
|
1023 | (pull_request_latest, | |
|
1024 | pull_request_at_ver, | |
|
1025 | pull_request_display_obj, | |
|
1026 | at_version) = PullRequestModel().get_pr_version( | |
|
1027 | pull_request_id, version=version) | |
|
1028 | versions = pull_request_display_obj.versions() | |
|
1029 | latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest) | |
|
1030 | c.versions = versions + [latest_ver] | |
|
1031 | ||
|
1032 | c.at_version = at_version | |
|
1033 | c.at_version_num = (at_version | |
|
1034 | if at_version and at_version != PullRequest.LATEST_VER | |
|
1035 | else None) | |
|
1036 | ||
|
1037 | c.unresolved_comments = CommentsModel() \ | |
|
1038 | .get_pull_request_unresolved_todos(pull_request) | |
|
1039 | c.resolved_comments = CommentsModel() \ | |
|
1040 | .get_pull_request_resolved_todos(pull_request) | |
|
1041 | ||
|
1042 | all_comments = c.unresolved_comments + c.resolved_comments | |
|
1043 | existing_ids = filter( | |
|
1044 | lambda e: e, map(safe_int, self.request.POST.getall('comments[]'))) | |
|
1045 | return _render('comments_table', all_comments, len(c.unresolved_comments), | |
|
1046 | todo_comments=True, existing_ids=existing_ids) | |
|
1047 | ||
|
1048 | @LoginRequired() | |
|
1049 | @NotAnonymous() | |
|
1050 | @HasRepoPermissionAnyDecorator( | |
|
1051 | 'repository.read', 'repository.write', 'repository.admin') | |
|
940 | 1052 | @CSRFRequired() |
|
941 | 1053 | @view_config( |
|
942 | 1054 | route_name='pullrequest_create', request_method='POST', |
|
943 | 1055 | renderer=None) |
|
944 | 1056 | def pull_request_create(self): |
|
945 | 1057 | _ = self.request.translate |
|
946 | 1058 | self.assure_not_empty_repo() |
|
947 | 1059 | self.load_default_context() |
|
948 | 1060 | |
|
949 | 1061 | controls = peppercorn.parse(self.request.POST.items()) |
|
950 | 1062 | |
|
951 | 1063 | try: |
|
952 | 1064 | form = PullRequestForm( |
|
953 | 1065 | self.request.translate, self.db_repo.repo_id)() |
|
954 | 1066 | _form = form.to_python(controls) |
|
955 | 1067 | except formencode.Invalid as errors: |
|
956 | 1068 | if errors.error_dict.get('revisions'): |
|
957 | 1069 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
958 | 1070 | elif errors.error_dict.get('pullrequest_title'): |
|
959 | 1071 | msg = errors.error_dict.get('pullrequest_title') |
|
960 | 1072 | else: |
|
961 | 1073 | msg = _('Error creating pull request: {}').format(errors) |
|
962 | 1074 | log.exception(msg) |
|
963 | 1075 | h.flash(msg, 'error') |
|
964 | 1076 | |
|
965 | 1077 | # would rather just go back to form ... |
|
966 | 1078 | raise HTTPFound( |
|
967 | 1079 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
968 | 1080 | |
|
969 | 1081 | source_repo = _form['source_repo'] |
|
970 | 1082 | source_ref = _form['source_ref'] |
|
971 | 1083 | target_repo = _form['target_repo'] |
|
972 | 1084 | target_ref = _form['target_ref'] |
|
973 | 1085 | commit_ids = _form['revisions'][::-1] |
|
974 | 1086 | common_ancestor_id = _form['common_ancestor'] |
|
975 | 1087 | |
|
976 | 1088 | # find the ancestor for this pr |
|
977 | 1089 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
978 | 1090 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
979 | 1091 | |
|
980 | 1092 | if not (source_db_repo or target_db_repo): |
|
981 | 1093 | h.flash(_('source_repo or target repo not found'), category='error') |
|
982 | 1094 | raise HTTPFound( |
|
983 | 1095 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
984 | 1096 | |
|
985 | 1097 | # re-check permissions again here |
|
986 | 1098 | # source_repo we must have read permissions |
|
987 | 1099 | |
|
988 | 1100 | source_perm = HasRepoPermissionAny( |
|
989 | 1101 | 'repository.read', 'repository.write', 'repository.admin')( |
|
990 | 1102 | source_db_repo.repo_name) |
|
991 | 1103 | if not source_perm: |
|
992 | 1104 | msg = _('Not Enough permissions to source repo `{}`.'.format( |
|
993 | 1105 | source_db_repo.repo_name)) |
|
994 | 1106 | h.flash(msg, category='error') |
|
995 | 1107 | # copy the args back to redirect |
|
996 | 1108 | org_query = self.request.GET.mixed() |
|
997 | 1109 | raise HTTPFound( |
|
998 | 1110 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
999 | 1111 | _query=org_query)) |
|
1000 | 1112 | |
|
1001 | 1113 | # target repo we must have read permissions, and also later on |
|
1002 | 1114 | # we want to check branch permissions here |
|
1003 | 1115 | target_perm = HasRepoPermissionAny( |
|
1004 | 1116 | 'repository.read', 'repository.write', 'repository.admin')( |
|
1005 | 1117 | target_db_repo.repo_name) |
|
1006 | 1118 | if not target_perm: |
|
1007 | 1119 | msg = _('Not Enough permissions to target repo `{}`.'.format( |
|
1008 | 1120 | target_db_repo.repo_name)) |
|
1009 | 1121 | h.flash(msg, category='error') |
|
1010 | 1122 | # copy the args back to redirect |
|
1011 | 1123 | org_query = self.request.GET.mixed() |
|
1012 | 1124 | raise HTTPFound( |
|
1013 | 1125 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1014 | 1126 | _query=org_query)) |
|
1015 | 1127 | |
|
1016 | 1128 | source_scm = source_db_repo.scm_instance() |
|
1017 | 1129 | target_scm = target_db_repo.scm_instance() |
|
1018 | 1130 | |
|
1019 | 1131 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
1020 | 1132 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
1021 | 1133 | |
|
1022 | 1134 | ancestor = source_scm.get_common_ancestor( |
|
1023 | 1135 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
1024 | 1136 | |
|
1025 | 1137 | # recalculate target ref based on ancestor |
|
1026 | 1138 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
1027 | 1139 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
1028 | 1140 | |
|
1029 | 1141 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1030 | 1142 | PullRequestModel().get_reviewer_functions() |
|
1031 | 1143 | |
|
1032 | 1144 | # recalculate reviewers logic, to make sure we can validate this |
|
1033 | 1145 | reviewer_rules = get_default_reviewers_data( |
|
1034 | 1146 | self._rhodecode_db_user, source_db_repo, |
|
1035 | 1147 | source_commit, target_db_repo, target_commit) |
|
1036 | 1148 | |
|
1037 | 1149 | given_reviewers = _form['review_members'] |
|
1038 | 1150 | reviewers = validate_default_reviewers( |
|
1039 | 1151 | given_reviewers, reviewer_rules) |
|
1040 | 1152 | |
|
1041 | 1153 | pullrequest_title = _form['pullrequest_title'] |
|
1042 | 1154 | title_source_ref = source_ref.split(':', 2)[1] |
|
1043 | 1155 | if not pullrequest_title: |
|
1044 | 1156 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
1045 | 1157 | source=source_repo, |
|
1046 | 1158 | source_ref=title_source_ref, |
|
1047 | 1159 | target=target_repo |
|
1048 | 1160 | ) |
|
1049 | 1161 | |
|
1050 | 1162 | description = _form['pullrequest_desc'] |
|
1051 | 1163 | description_renderer = _form['description_renderer'] |
|
1052 | 1164 | |
|
1053 | 1165 | try: |
|
1054 | 1166 | pull_request = PullRequestModel().create( |
|
1055 | 1167 | created_by=self._rhodecode_user.user_id, |
|
1056 | 1168 | source_repo=source_repo, |
|
1057 | 1169 | source_ref=source_ref, |
|
1058 | 1170 | target_repo=target_repo, |
|
1059 | 1171 | target_ref=target_ref, |
|
1060 | 1172 | revisions=commit_ids, |
|
1061 | 1173 | common_ancestor_id=common_ancestor_id, |
|
1062 | 1174 | reviewers=reviewers, |
|
1063 | 1175 | title=pullrequest_title, |
|
1064 | 1176 | description=description, |
|
1065 | 1177 | description_renderer=description_renderer, |
|
1066 | 1178 | reviewer_data=reviewer_rules, |
|
1067 | 1179 | auth_user=self._rhodecode_user |
|
1068 | 1180 | ) |
|
1069 | 1181 | Session().commit() |
|
1070 | 1182 | |
|
1071 | 1183 | h.flash(_('Successfully opened new pull request'), |
|
1072 | 1184 | category='success') |
|
1073 | 1185 | except Exception: |
|
1074 | 1186 | msg = _('Error occurred during creation of this pull request.') |
|
1075 | 1187 | log.exception(msg) |
|
1076 | 1188 | h.flash(msg, category='error') |
|
1077 | 1189 | |
|
1078 | 1190 | # copy the args back to redirect |
|
1079 | 1191 | org_query = self.request.GET.mixed() |
|
1080 | 1192 | raise HTTPFound( |
|
1081 | 1193 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1082 | 1194 | _query=org_query)) |
|
1083 | 1195 | |
|
1084 | 1196 | raise HTTPFound( |
|
1085 | 1197 | h.route_path('pullrequest_show', repo_name=target_repo, |
|
1086 | 1198 | pull_request_id=pull_request.pull_request_id)) |
|
1087 | 1199 | |
|
1088 | 1200 | @LoginRequired() |
|
1089 | 1201 | @NotAnonymous() |
|
1090 | 1202 | @HasRepoPermissionAnyDecorator( |
|
1091 | 1203 | 'repository.read', 'repository.write', 'repository.admin') |
|
1092 | 1204 | @CSRFRequired() |
|
1093 | 1205 | @view_config( |
|
1094 | 1206 | route_name='pullrequest_update', request_method='POST', |
|
1095 | 1207 | renderer='json_ext') |
|
1096 | 1208 | def pull_request_update(self): |
|
1097 | 1209 | pull_request = PullRequest.get_or_404( |
|
1098 | 1210 | self.request.matchdict['pull_request_id']) |
|
1099 | 1211 | _ = self.request.translate |
|
1100 | 1212 | |
|
1101 | self.load_default_context() | |
|
1213 | c = self.load_default_context() | |
|
1102 | 1214 | redirect_url = None |
|
1103 | 1215 | |
|
1104 | 1216 | if pull_request.is_closed(): |
|
1105 | 1217 | log.debug('update: forbidden because pull request is closed') |
|
1106 | 1218 | msg = _(u'Cannot update closed pull requests.') |
|
1107 | 1219 | h.flash(msg, category='error') |
|
1108 | 1220 | return {'response': True, |
|
1109 | 1221 | 'redirect_url': redirect_url} |
|
1110 | 1222 | |
|
1111 | 1223 | is_state_changing = pull_request.is_state_changing() |
|
1224 | c.pr_broadcast_channel = '/repo${}$/pr/{}'.format( | |
|
1225 | pull_request.target_repo.repo_name, pull_request.pull_request_id) | |
|
1112 | 1226 | |
|
1113 | 1227 | # only owner or admin can update it |
|
1114 | 1228 | allowed_to_update = PullRequestModel().check_user_update( |
|
1115 | 1229 | pull_request, self._rhodecode_user) |
|
1116 | 1230 | if allowed_to_update: |
|
1117 | 1231 | controls = peppercorn.parse(self.request.POST.items()) |
|
1118 | 1232 | force_refresh = str2bool(self.request.POST.get('force_refresh')) |
|
1119 | 1233 | |
|
1120 | 1234 | if 'review_members' in controls: |
|
1121 | 1235 | self._update_reviewers( |
|
1122 | 1236 | pull_request, controls['review_members'], |
|
1123 | 1237 | pull_request.reviewer_data) |
|
1124 | 1238 | elif str2bool(self.request.POST.get('update_commits', 'false')): |
|
1125 | 1239 | if is_state_changing: |
|
1126 | 1240 | log.debug('commits update: forbidden because pull request is in state %s', |
|
1127 | 1241 | pull_request.pull_request_state) |
|
1128 | 1242 | msg = _(u'Cannot update pull requests commits in state other than `{}`. ' |
|
1129 | 1243 | u'Current state is: `{}`').format( |
|
1130 | 1244 | PullRequest.STATE_CREATED, pull_request.pull_request_state) |
|
1131 | 1245 | h.flash(msg, category='error') |
|
1132 | 1246 | return {'response': True, |
|
1133 | 1247 | 'redirect_url': redirect_url} |
|
1134 | 1248 | |
|
1135 | self._update_commits(pull_request) | |
|
1249 | self._update_commits(c, pull_request) | |
|
1136 | 1250 | if force_refresh: |
|
1137 | 1251 | redirect_url = h.route_path( |
|
1138 | 1252 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
1139 | 1253 | pull_request_id=pull_request.pull_request_id, |
|
1140 | 1254 | _query={"force_refresh": 1}) |
|
1141 | 1255 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): |
|
1142 | 1256 | self._edit_pull_request(pull_request) |
|
1143 | 1257 | else: |
|
1144 | 1258 | raise HTTPBadRequest() |
|
1145 | 1259 | |
|
1146 | 1260 | return {'response': True, |
|
1147 | 1261 | 'redirect_url': redirect_url} |
|
1148 | 1262 | raise HTTPForbidden() |
|
1149 | 1263 | |
|
1150 | 1264 | def _edit_pull_request(self, pull_request): |
|
1151 | 1265 | _ = self.request.translate |
|
1152 | 1266 | |
|
1153 | 1267 | try: |
|
1154 | 1268 | PullRequestModel().edit( |
|
1155 | 1269 | pull_request, |
|
1156 | 1270 | self.request.POST.get('title'), |
|
1157 | 1271 | self.request.POST.get('description'), |
|
1158 | 1272 | self.request.POST.get('description_renderer'), |
|
1159 | 1273 | self._rhodecode_user) |
|
1160 | 1274 | except ValueError: |
|
1161 | 1275 | msg = _(u'Cannot update closed pull requests.') |
|
1162 | 1276 | h.flash(msg, category='error') |
|
1163 | 1277 | return |
|
1164 | 1278 | else: |
|
1165 | 1279 | Session().commit() |
|
1166 | 1280 | |
|
1167 | 1281 | msg = _(u'Pull request title & description updated.') |
|
1168 | 1282 | h.flash(msg, category='success') |
|
1169 | 1283 | return |
|
1170 | 1284 | |
|
1171 | def _update_commits(self, pull_request): | |
|
1285 | def _update_commits(self, c, pull_request): | |
|
1172 | 1286 | _ = self.request.translate |
|
1173 | 1287 | |
|
1174 | 1288 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1175 | 1289 | resp = PullRequestModel().update_commits( |
|
1176 | 1290 | pull_request, self._rhodecode_db_user) |
|
1177 | 1291 | |
|
1178 | 1292 | if resp.executed: |
|
1179 | 1293 | |
|
1180 | 1294 | if resp.target_changed and resp.source_changed: |
|
1181 | 1295 | changed = 'target and source repositories' |
|
1182 | 1296 | elif resp.target_changed and not resp.source_changed: |
|
1183 | 1297 | changed = 'target repository' |
|
1184 | 1298 | elif not resp.target_changed and resp.source_changed: |
|
1185 | 1299 | changed = 'source repository' |
|
1186 | 1300 | else: |
|
1187 | 1301 | changed = 'nothing' |
|
1188 | 1302 | |
|
1189 | 1303 | msg = _(u'Pull request updated to "{source_commit_id}" with ' |
|
1190 | 1304 | u'{count_added} added, {count_removed} removed commits. ' |
|
1191 | 1305 | u'Source of changes: {change_source}') |
|
1192 | 1306 | msg = msg.format( |
|
1193 | 1307 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
1194 | 1308 | count_added=len(resp.changes.added), |
|
1195 | 1309 | count_removed=len(resp.changes.removed), |
|
1196 | 1310 | change_source=changed) |
|
1197 | 1311 | h.flash(msg, category='success') |
|
1198 | 1312 | |
|
1199 | channel = '/repo${}$/pr/{}'.format( | |
|
1200 | pull_request.target_repo.repo_name, pull_request.pull_request_id) | |
|
1201 | 1313 | message = msg + ( |
|
1202 | 1314 | ' - <a onclick="window.location.reload()">' |
|
1203 | 1315 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
1316 | ||
|
1317 | message_obj = { | |
|
1318 | 'message': message, | |
|
1319 | 'level': 'success', | |
|
1320 | 'topic': '/notifications' | |
|
1321 | } | |
|
1322 | ||
|
1204 | 1323 | channelstream.post_message( |
|
1205 | channel, message, self._rhodecode_user.username, | |
|
1324 | c.pr_broadcast_channel, message_obj, self._rhodecode_user.username, | |
|
1206 | 1325 | registry=self.request.registry) |
|
1207 | 1326 | else: |
|
1208 | 1327 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
1209 | 1328 | warning_reasons = [ |
|
1210 | 1329 | UpdateFailureReason.NO_CHANGE, |
|
1211 | 1330 | UpdateFailureReason.WRONG_REF_TYPE, |
|
1212 | 1331 | ] |
|
1213 | 1332 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
1214 | 1333 | h.flash(msg, category=category) |
|
1215 | 1334 | |
|
1216 | 1335 | @LoginRequired() |
|
1217 | 1336 | @NotAnonymous() |
|
1218 | 1337 | @HasRepoPermissionAnyDecorator( |
|
1219 | 1338 | 'repository.read', 'repository.write', 'repository.admin') |
|
1220 | 1339 | @CSRFRequired() |
|
1221 | 1340 | @view_config( |
|
1222 | 1341 | route_name='pullrequest_merge', request_method='POST', |
|
1223 | 1342 | renderer='json_ext') |
|
1224 | 1343 | def pull_request_merge(self): |
|
1225 | 1344 | """ |
|
1226 | 1345 | Merge will perform a server-side merge of the specified |
|
1227 | 1346 | pull request, if the pull request is approved and mergeable. |
|
1228 | 1347 | After successful merging, the pull request is automatically |
|
1229 | 1348 | closed, with a relevant comment. |
|
1230 | 1349 | """ |
|
1231 | 1350 | pull_request = PullRequest.get_or_404( |
|
1232 | 1351 | self.request.matchdict['pull_request_id']) |
|
1233 | 1352 | _ = self.request.translate |
|
1234 | 1353 | |
|
1235 | 1354 | if pull_request.is_state_changing(): |
|
1236 | 1355 | log.debug('show: forbidden because pull request is in state %s', |
|
1237 | 1356 | pull_request.pull_request_state) |
|
1238 | 1357 | msg = _(u'Cannot merge pull requests in state other than `{}`. ' |
|
1239 | 1358 | u'Current state is: `{}`').format(PullRequest.STATE_CREATED, |
|
1240 | 1359 | pull_request.pull_request_state) |
|
1241 | 1360 | h.flash(msg, category='error') |
|
1242 | 1361 | raise HTTPFound( |
|
1243 | 1362 | h.route_path('pullrequest_show', |
|
1244 | 1363 | repo_name=pull_request.target_repo.repo_name, |
|
1245 | 1364 | pull_request_id=pull_request.pull_request_id)) |
|
1246 | 1365 | |
|
1247 | 1366 | self.load_default_context() |
|
1248 | 1367 | |
|
1249 | 1368 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1250 | 1369 | check = MergeCheck.validate( |
|
1251 | 1370 | pull_request, auth_user=self._rhodecode_user, |
|
1252 | 1371 | translator=self.request.translate) |
|
1253 | 1372 | merge_possible = not check.failed |
|
1254 | 1373 | |
|
1255 | 1374 | for err_type, error_msg in check.errors: |
|
1256 | 1375 | h.flash(error_msg, category=err_type) |
|
1257 | 1376 | |
|
1258 | 1377 | if merge_possible: |
|
1259 | 1378 | log.debug("Pre-conditions checked, trying to merge.") |
|
1260 | 1379 | extras = vcs_operation_context( |
|
1261 | 1380 | self.request.environ, repo_name=pull_request.target_repo.repo_name, |
|
1262 | 1381 | username=self._rhodecode_db_user.username, action='push', |
|
1263 | 1382 | scm=pull_request.target_repo.repo_type) |
|
1264 | 1383 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1265 | 1384 | self._merge_pull_request( |
|
1266 | 1385 | pull_request, self._rhodecode_db_user, extras) |
|
1267 | 1386 | else: |
|
1268 | 1387 | log.debug("Pre-conditions failed, NOT merging.") |
|
1269 | 1388 | |
|
1270 | 1389 | raise HTTPFound( |
|
1271 | 1390 | h.route_path('pullrequest_show', |
|
1272 | 1391 | repo_name=pull_request.target_repo.repo_name, |
|
1273 | 1392 | pull_request_id=pull_request.pull_request_id)) |
|
1274 | 1393 | |
|
1275 | 1394 | def _merge_pull_request(self, pull_request, user, extras): |
|
1276 | 1395 | _ = self.request.translate |
|
1277 | 1396 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) |
|
1278 | 1397 | |
|
1279 | 1398 | if merge_resp.executed: |
|
1280 | 1399 | log.debug("The merge was successful, closing the pull request.") |
|
1281 | 1400 | PullRequestModel().close_pull_request( |
|
1282 | 1401 | pull_request.pull_request_id, user) |
|
1283 | 1402 | Session().commit() |
|
1284 | 1403 | msg = _('Pull request was successfully merged and closed.') |
|
1285 | 1404 | h.flash(msg, category='success') |
|
1286 | 1405 | else: |
|
1287 | 1406 | log.debug( |
|
1288 | 1407 | "The merge was not successful. Merge response: %s", merge_resp) |
|
1289 | 1408 | msg = merge_resp.merge_status_message |
|
1290 | 1409 | h.flash(msg, category='error') |
|
1291 | 1410 | |
|
1292 | 1411 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): |
|
1293 | 1412 | _ = self.request.translate |
|
1294 | 1413 | |
|
1295 | 1414 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1296 | 1415 | PullRequestModel().get_reviewer_functions() |
|
1297 | 1416 | |
|
1298 | 1417 | try: |
|
1299 | 1418 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
1300 | 1419 | except ValueError as e: |
|
1301 | 1420 | log.error('Reviewers Validation: {}'.format(e)) |
|
1302 | 1421 | h.flash(e, category='error') |
|
1303 | 1422 | return |
|
1304 | 1423 | |
|
1305 | 1424 | old_calculated_status = pull_request.calculated_review_status() |
|
1306 | 1425 | PullRequestModel().update_reviewers( |
|
1307 | 1426 | pull_request, reviewers, self._rhodecode_user) |
|
1308 | 1427 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
1309 | 1428 | Session().commit() |
|
1310 | 1429 | |
|
1311 | 1430 | # trigger status changed if change in reviewers changes the status |
|
1312 | 1431 | calculated_status = pull_request.calculated_review_status() |
|
1313 | 1432 | if old_calculated_status != calculated_status: |
|
1314 | 1433 | PullRequestModel().trigger_pull_request_hook( |
|
1315 | 1434 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1316 | 1435 | data={'status': calculated_status}) |
|
1317 | 1436 | |
|
1318 | 1437 | @LoginRequired() |
|
1319 | 1438 | @NotAnonymous() |
|
1320 | 1439 | @HasRepoPermissionAnyDecorator( |
|
1321 | 1440 | 'repository.read', 'repository.write', 'repository.admin') |
|
1322 | 1441 | @CSRFRequired() |
|
1323 | 1442 | @view_config( |
|
1324 | 1443 | route_name='pullrequest_delete', request_method='POST', |
|
1325 | 1444 | renderer='json_ext') |
|
1326 | 1445 | def pull_request_delete(self): |
|
1327 | 1446 | _ = self.request.translate |
|
1328 | 1447 | |
|
1329 | 1448 | pull_request = PullRequest.get_or_404( |
|
1330 | 1449 | self.request.matchdict['pull_request_id']) |
|
1331 | 1450 | self.load_default_context() |
|
1332 | 1451 | |
|
1333 | 1452 | pr_closed = pull_request.is_closed() |
|
1334 | 1453 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
1335 | 1454 | pull_request, self._rhodecode_user) and not pr_closed |
|
1336 | 1455 | |
|
1337 | 1456 | # only owner can delete it ! |
|
1338 | 1457 | if allowed_to_delete: |
|
1339 | 1458 | PullRequestModel().delete(pull_request, self._rhodecode_user) |
|
1340 | 1459 | Session().commit() |
|
1341 | 1460 | h.flash(_('Successfully deleted pull request'), |
|
1342 | 1461 | category='success') |
|
1343 | 1462 | raise HTTPFound(h.route_path('pullrequest_show_all', |
|
1344 | 1463 | repo_name=self.db_repo_name)) |
|
1345 | 1464 | |
|
1346 | 1465 | log.warning('user %s tried to delete pull request without access', |
|
1347 | 1466 | self._rhodecode_user) |
|
1348 | 1467 | raise HTTPNotFound() |
|
1349 | 1468 | |
|
1350 | 1469 | @LoginRequired() |
|
1351 | 1470 | @NotAnonymous() |
|
1352 | 1471 | @HasRepoPermissionAnyDecorator( |
|
1353 | 1472 | 'repository.read', 'repository.write', 'repository.admin') |
|
1354 | 1473 | @CSRFRequired() |
|
1355 | 1474 | @view_config( |
|
1356 | 1475 | route_name='pullrequest_comment_create', request_method='POST', |
|
1357 | 1476 | renderer='json_ext') |
|
1358 | 1477 | def pull_request_comment_create(self): |
|
1359 | 1478 | _ = self.request.translate |
|
1360 | 1479 | |
|
1361 | 1480 | pull_request = PullRequest.get_or_404( |
|
1362 | 1481 | self.request.matchdict['pull_request_id']) |
|
1363 | 1482 | pull_request_id = pull_request.pull_request_id |
|
1364 | 1483 | |
|
1365 | 1484 | if pull_request.is_closed(): |
|
1366 | 1485 | log.debug('comment: forbidden because pull request is closed') |
|
1367 | 1486 | raise HTTPForbidden() |
|
1368 | 1487 | |
|
1369 | 1488 | allowed_to_comment = PullRequestModel().check_user_comment( |
|
1370 | 1489 | pull_request, self._rhodecode_user) |
|
1371 | 1490 | if not allowed_to_comment: |
|
1372 | 1491 | log.debug( |
|
1373 | 1492 | 'comment: forbidden because pull request is from forbidden repo') |
|
1374 | 1493 | raise HTTPForbidden() |
|
1375 | 1494 | |
|
1376 | 1495 | c = self.load_default_context() |
|
1377 | 1496 | |
|
1378 | 1497 | status = self.request.POST.get('changeset_status', None) |
|
1379 | 1498 | text = self.request.POST.get('text') |
|
1380 | 1499 | comment_type = self.request.POST.get('comment_type') |
|
1381 | 1500 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
1382 | 1501 | close_pull_request = self.request.POST.get('close_pull_request') |
|
1383 | 1502 | |
|
1384 | 1503 | # the logic here should work like following, if we submit close |
|
1385 | 1504 | # pr comment, use `close_pull_request_with_comment` function |
|
1386 | 1505 | # else handle regular comment logic |
|
1387 | 1506 | |
|
1388 | 1507 | if close_pull_request: |
|
1389 | 1508 | # only owner or admin or person with write permissions |
|
1390 | 1509 | allowed_to_close = PullRequestModel().check_user_update( |
|
1391 | 1510 | pull_request, self._rhodecode_user) |
|
1392 | 1511 | if not allowed_to_close: |
|
1393 | 1512 | log.debug('comment: forbidden because not allowed to close ' |
|
1394 | 1513 | 'pull request %s', pull_request_id) |
|
1395 | 1514 | raise HTTPForbidden() |
|
1396 | 1515 | |
|
1397 | 1516 | # This also triggers `review_status_change` |
|
1398 | 1517 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
1399 | 1518 | pull_request, self._rhodecode_user, self.db_repo, message=text, |
|
1400 | 1519 | auth_user=self._rhodecode_user) |
|
1401 | 1520 | Session().flush() |
|
1402 | 1521 | |
|
1403 | 1522 | PullRequestModel().trigger_pull_request_hook( |
|
1404 | 1523 | pull_request, self._rhodecode_user, 'comment', |
|
1405 | 1524 | data={'comment': comment}) |
|
1406 | 1525 | |
|
1407 | 1526 | else: |
|
1408 | 1527 | # regular comment case, could be inline, or one with status. |
|
1409 | 1528 | # for that one we check also permissions |
|
1410 | 1529 | |
|
1411 | 1530 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
1412 | 1531 | pull_request, self._rhodecode_user) |
|
1413 | 1532 | |
|
1414 | 1533 | if status and allowed_to_change_status: |
|
1415 | 1534 | message = (_('Status change %(transition_icon)s %(status)s') |
|
1416 | 1535 | % {'transition_icon': '>', |
|
1417 | 1536 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
1418 | 1537 | text = text or message |
|
1419 | 1538 | |
|
1420 | 1539 | comment = CommentsModel().create( |
|
1421 | 1540 | text=text, |
|
1422 | 1541 | repo=self.db_repo.repo_id, |
|
1423 | 1542 | user=self._rhodecode_user.user_id, |
|
1424 | 1543 | pull_request=pull_request, |
|
1425 | 1544 | f_path=self.request.POST.get('f_path'), |
|
1426 | 1545 | line_no=self.request.POST.get('line'), |
|
1427 | 1546 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
1428 | 1547 | if status and allowed_to_change_status else None), |
|
1429 | 1548 | status_change_type=(status |
|
1430 | 1549 | if status and allowed_to_change_status else None), |
|
1431 | 1550 | comment_type=comment_type, |
|
1432 | 1551 | resolves_comment_id=resolves_comment_id, |
|
1433 | 1552 | auth_user=self._rhodecode_user |
|
1434 | 1553 | ) |
|
1435 | 1554 | |
|
1436 | 1555 | if allowed_to_change_status: |
|
1437 | 1556 | # calculate old status before we change it |
|
1438 | 1557 | old_calculated_status = pull_request.calculated_review_status() |
|
1439 | 1558 | |
|
1440 | 1559 | # get status if set ! |
|
1441 | 1560 | if status: |
|
1442 | 1561 | ChangesetStatusModel().set_status( |
|
1443 | 1562 | self.db_repo.repo_id, |
|
1444 | 1563 | status, |
|
1445 | 1564 | self._rhodecode_user.user_id, |
|
1446 | 1565 | comment, |
|
1447 | 1566 | pull_request=pull_request |
|
1448 | 1567 | ) |
|
1449 | 1568 | |
|
1450 | 1569 | Session().flush() |
|
1451 | 1570 | # this is somehow required to get access to some relationship |
|
1452 | 1571 | # loaded on comment |
|
1453 | 1572 | Session().refresh(comment) |
|
1454 | 1573 | |
|
1455 | 1574 | PullRequestModel().trigger_pull_request_hook( |
|
1456 | 1575 | pull_request, self._rhodecode_user, 'comment', |
|
1457 | 1576 | data={'comment': comment}) |
|
1458 | 1577 | |
|
1459 | 1578 | # we now calculate the status of pull request, and based on that |
|
1460 | 1579 | # calculation we set the commits status |
|
1461 | 1580 | calculated_status = pull_request.calculated_review_status() |
|
1462 | 1581 | if old_calculated_status != calculated_status: |
|
1463 | 1582 | PullRequestModel().trigger_pull_request_hook( |
|
1464 | 1583 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1465 | 1584 | data={'status': calculated_status}) |
|
1466 | 1585 | |
|
1467 | 1586 | Session().commit() |
|
1468 | 1587 | |
|
1469 | 1588 | data = { |
|
1470 | 1589 | 'target_id': h.safeid(h.safe_unicode( |
|
1471 | 1590 | self.request.POST.get('f_path'))), |
|
1472 | 1591 | } |
|
1473 | 1592 | if comment: |
|
1474 | 1593 | c.co = comment |
|
1594 | c.at_version_num = None | |
|
1475 | 1595 | rendered_comment = render( |
|
1476 | 1596 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
1477 | 1597 | self._get_template_context(c), self.request) |
|
1478 | 1598 | |
|
1479 | 1599 | data.update(comment.get_dict()) |
|
1480 | 1600 | data.update({'rendered_text': rendered_comment}) |
|
1481 | 1601 | |
|
1482 | 1602 | return data |
|
1483 | 1603 | |
|
1484 | 1604 | @LoginRequired() |
|
1485 | 1605 | @NotAnonymous() |
|
1486 | 1606 | @HasRepoPermissionAnyDecorator( |
|
1487 | 1607 | 'repository.read', 'repository.write', 'repository.admin') |
|
1488 | 1608 | @CSRFRequired() |
|
1489 | 1609 | @view_config( |
|
1490 | 1610 | route_name='pullrequest_comment_delete', request_method='POST', |
|
1491 | 1611 | renderer='json_ext') |
|
1492 | 1612 | def pull_request_comment_delete(self): |
|
1493 | 1613 | pull_request = PullRequest.get_or_404( |
|
1494 | 1614 | self.request.matchdict['pull_request_id']) |
|
1495 | 1615 | |
|
1496 | 1616 | comment = ChangesetComment.get_or_404( |
|
1497 | 1617 | self.request.matchdict['comment_id']) |
|
1498 | 1618 | comment_id = comment.comment_id |
|
1499 | 1619 | |
|
1500 | 1620 | if comment.immutable: |
|
1501 | 1621 | # don't allow deleting comments that are immutable |
|
1502 | 1622 | raise HTTPForbidden() |
|
1503 | 1623 | |
|
1504 | 1624 | if pull_request.is_closed(): |
|
1505 | 1625 | log.debug('comment: forbidden because pull request is closed') |
|
1506 | 1626 | raise HTTPForbidden() |
|
1507 | 1627 | |
|
1508 | 1628 | if not comment: |
|
1509 | 1629 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1510 | 1630 | # comment already deleted in another call probably |
|
1511 | 1631 | return True |
|
1512 | 1632 | |
|
1513 | 1633 | if comment.pull_request.is_closed(): |
|
1514 | 1634 | # don't allow deleting comments on closed pull request |
|
1515 | 1635 | raise HTTPForbidden() |
|
1516 | 1636 | |
|
1517 | 1637 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1518 | 1638 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1519 | 1639 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1520 | 1640 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1521 | 1641 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1522 | 1642 | |
|
1523 | 1643 | if super_admin or comment_owner or comment_repo_admin: |
|
1524 | 1644 | old_calculated_status = comment.pull_request.calculated_review_status() |
|
1525 | 1645 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
1526 | 1646 | Session().commit() |
|
1527 | 1647 | calculated_status = comment.pull_request.calculated_review_status() |
|
1528 | 1648 | if old_calculated_status != calculated_status: |
|
1529 | 1649 | PullRequestModel().trigger_pull_request_hook( |
|
1530 | 1650 | comment.pull_request, self._rhodecode_user, 'review_status_change', |
|
1531 | 1651 | data={'status': calculated_status}) |
|
1532 | 1652 | return True |
|
1533 | 1653 | else: |
|
1534 | 1654 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
1535 | 1655 | self._rhodecode_db_user, comment_id) |
|
1536 | 1656 | raise HTTPNotFound() |
|
1537 | 1657 | |
|
1538 | 1658 | @LoginRequired() |
|
1539 | 1659 | @NotAnonymous() |
|
1540 | 1660 | @HasRepoPermissionAnyDecorator( |
|
1541 | 1661 | 'repository.read', 'repository.write', 'repository.admin') |
|
1542 | 1662 | @CSRFRequired() |
|
1543 | 1663 | @view_config( |
|
1544 | 1664 | route_name='pullrequest_comment_edit', request_method='POST', |
|
1545 | 1665 | renderer='json_ext') |
|
1546 | 1666 | def pull_request_comment_edit(self): |
|
1547 | 1667 | self.load_default_context() |
|
1548 | 1668 | |
|
1549 | 1669 | pull_request = PullRequest.get_or_404( |
|
1550 | 1670 | self.request.matchdict['pull_request_id'] |
|
1551 | 1671 | ) |
|
1552 | 1672 | comment = ChangesetComment.get_or_404( |
|
1553 | 1673 | self.request.matchdict['comment_id'] |
|
1554 | 1674 | ) |
|
1555 | 1675 | comment_id = comment.comment_id |
|
1556 | 1676 | |
|
1557 | 1677 | if comment.immutable: |
|
1558 | 1678 | # don't allow deleting comments that are immutable |
|
1559 | 1679 | raise HTTPForbidden() |
|
1560 | 1680 | |
|
1561 | 1681 | if pull_request.is_closed(): |
|
1562 | 1682 | log.debug('comment: forbidden because pull request is closed') |
|
1563 | 1683 | raise HTTPForbidden() |
|
1564 | 1684 | |
|
1565 | 1685 | if not comment: |
|
1566 | 1686 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1567 | 1687 | # comment already deleted in another call probably |
|
1568 | 1688 | return True |
|
1569 | 1689 | |
|
1570 | 1690 | if comment.pull_request.is_closed(): |
|
1571 | 1691 | # don't allow deleting comments on closed pull request |
|
1572 | 1692 | raise HTTPForbidden() |
|
1573 | 1693 | |
|
1574 | 1694 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1575 | 1695 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1576 | 1696 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1577 | 1697 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1578 | 1698 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1579 | 1699 | |
|
1580 | 1700 | if super_admin or comment_owner or comment_repo_admin: |
|
1581 | 1701 | text = self.request.POST.get('text') |
|
1582 | 1702 | version = self.request.POST.get('version') |
|
1583 | 1703 | if text == comment.text: |
|
1584 | 1704 | log.warning( |
|
1585 | 1705 | 'Comment(PR): ' |
|
1586 | 1706 | 'Trying to create new version ' |
|
1587 | 1707 | 'with the same comment body {}'.format( |
|
1588 | 1708 | comment_id, |
|
1589 | 1709 | ) |
|
1590 | 1710 | ) |
|
1591 | 1711 | raise HTTPNotFound() |
|
1592 | 1712 | |
|
1593 | 1713 | if version.isdigit(): |
|
1594 | 1714 | version = int(version) |
|
1595 | 1715 | else: |
|
1596 | 1716 | log.warning( |
|
1597 | 1717 | 'Comment(PR): Wrong version type {} {} ' |
|
1598 | 1718 | 'for comment {}'.format( |
|
1599 | 1719 | version, |
|
1600 | 1720 | type(version), |
|
1601 | 1721 | comment_id, |
|
1602 | 1722 | ) |
|
1603 | 1723 | ) |
|
1604 | 1724 | raise HTTPNotFound() |
|
1605 | 1725 | |
|
1606 | 1726 | try: |
|
1607 | 1727 | comment_history = CommentsModel().edit( |
|
1608 | 1728 | comment_id=comment_id, |
|
1609 | 1729 | text=text, |
|
1610 | 1730 | auth_user=self._rhodecode_user, |
|
1611 | 1731 | version=version, |
|
1612 | 1732 | ) |
|
1613 | 1733 | except CommentVersionMismatch: |
|
1614 | 1734 | raise HTTPConflict() |
|
1615 | 1735 | |
|
1616 | 1736 | if not comment_history: |
|
1617 | 1737 | raise HTTPNotFound() |
|
1618 | 1738 | |
|
1619 | 1739 | Session().commit() |
|
1620 | 1740 | |
|
1621 | 1741 | PullRequestModel().trigger_pull_request_hook( |
|
1622 | 1742 | pull_request, self._rhodecode_user, 'comment_edit', |
|
1623 | 1743 | data={'comment': comment}) |
|
1624 | 1744 | |
|
1625 | 1745 | return { |
|
1626 | 1746 | 'comment_history_id': comment_history.comment_history_id, |
|
1627 | 1747 | 'comment_id': comment.comment_id, |
|
1628 | 1748 | 'comment_version': comment_history.version, |
|
1629 | 1749 | 'comment_author_username': comment_history.author.username, |
|
1630 | 1750 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
1631 | 1751 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
1632 | 1752 | time_is_local=True), |
|
1633 | 1753 | } |
|
1634 | 1754 | else: |
|
1635 | 1755 | log.warning('No permissions for user %s to edit comment_id: %s', |
|
1636 | 1756 | self._rhodecode_db_user, comment_id) |
|
1637 | 1757 | raise HTTPNotFound() |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now