##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r4491:f919670e merge stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,18 b''
1 diff -rup Beaker-1.9.1-orig/beaker/session.py Beaker-1.9.1/beaker/session.py
2 --- Beaker-1.9.1-orig/beaker/session.py 2020-04-10 10:23:04.000000000 +0200
3 +++ Beaker-1.9.1/beaker/session.py 2020-04-10 10:23:34.000000000 +0200
4 @@ -156,6 +156,14 @@ def __init__(self, request, id=None, invalidate_corrupt=False,
5 if timeout and not save_accessed_time:
6 raise BeakerException("timeout requires save_accessed_time")
7 self.timeout = timeout
8 + # We want to pass timeout param to redis backend to support expiration of keys
9 + # In future, I believe, we can use this param for memcached and mongo as well
10 + if self.timeout is not None and self.type == 'ext:redis':
11 + # The backend expiration should always be a bit longer (I decied to use 2 minutes) than the
12 + # session expiration itself to prevent the case where the backend data expires while
13 + # the session is being read (PR#153)
14 + self.namespace_args['timeout'] = self.timeout + 60 * 2
15 +
16 self.save_atime = save_accessed_time
17 self.use_cookies = use_cookies
18 self.cookie_expires = cookie_expires No newline at end of file
@@ -0,0 +1,26 b''
1 diff -rup Beaker-1.9.1-orig/beaker/ext/redisnm.py Beaker-1.9.1/beaker/ext/redisnm.py
2 --- Beaker-1.9.1-orig/beaker/ext/redisnm.py 2018-04-10 10:23:04.000000000 +0200
3 +++ Beaker-1.9.1/beaker/ext/redisnm.py 2018-04-10 10:23:34.000000000 +0200
4 @@ -30,9 +30,10 @@ class RedisNamespaceManager(NamespaceManager):
5
6 clients = SyncDict()
7
8 - def __init__(self, namespace, url, **kw):
9 + def __init__(self, namespace, url, timeout=None, **kw):
10 super(RedisNamespaceManager, self).__init__(namespace)
11 self.lock_dir = None # Redis uses redis itself for locking.
12 + self.timeout = timeout
13
14 if redis is None:
15 raise RuntimeError('redis is not available')
16 @@ -68,6 +69,8 @@ def has_key(self, key):
17
18 def set_value(self, key, value, expiretime=None):
19 value = pickle.dumps(value)
20 + if expiretime is None and self.timeout is not None:
21 + expiretime = self.timeout
22 if expiretime is not None:
23 self.client.setex(self._format_key(key), int(expiretime), value)
24 else:
25
26
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,6 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.20.1
2 current_version = 4.21.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:rhodecode/VERSION]
5 [bumpversion:file:rhodecode/VERSION]
6
@@ -1,33 +1,28 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:rc_tools_pinned]
7 [task:rc_tools_pinned]
8 done = true
9
8
10 [task:fixes_on_stable]
9 [task:fixes_on_stable]
11 done = true
12
10
13 [task:pip2nix_generated]
11 [task:pip2nix_generated]
14 done = true
15
12
16 [task:changelog_updated]
13 [task:changelog_updated]
17 done = true
18
14
19 [task:generate_api_docs]
15 [task:generate_api_docs]
20 done = true
16
17 [task:updated_translation]
21
18
22 [release]
19 [release]
23 state = prepared
20 state = in_progress
24 version = 4.20.1
21 version = 4.21.0
25
26 [task:updated_translation]
27
22
28 [task:generate_js_routes]
23 [task:generate_js_routes]
29
24
30 [task:updated_trial_license]
25 [task:updated_trial_license]
31
26
32 [task:generate_oss_licenses]
27 [task:generate_oss_licenses]
33
28
@@ -1,193 +1,194 b''
1 Nginx Configuration Example
1 Nginx Configuration Example
2 ---------------------------
2 ---------------------------
3
3
4 Use the following example to configure Nginx as a your web server.
4 Use the following example to configure Nginx as a your web server.
5
5
6
6
7 .. code-block:: nginx
7 .. code-block:: nginx
8
8
9 ## Rate limiter for certain pages to prevent brute force attacks
9 ## Rate limiter for certain pages to prevent brute force attacks
10 limit_req_zone $binary_remote_addr zone=req_limit:10m rate=1r/s;
10 limit_req_zone $binary_remote_addr zone=req_limit:10m rate=1r/s;
11
11
12 ## cache zone
12 ## cache zone
13 proxy_cache_path /etc/nginx/nginx_cache levels=1:2 use_temp_path=off keys_zone=cache_zone:10m inactive=720h max_size=10g;
13 proxy_cache_path /etc/nginx/nginx_cache levels=1:2 use_temp_path=off keys_zone=cache_zone:10m inactive=720h max_size=10g;
14
14
15 ## Custom log format
15 ## Custom log format
16 log_format log_custom '$remote_addr - $remote_user [$time_local] '
16 log_format log_custom '$remote_addr - $remote_user [$time_local] '
17 '"$request" $status $body_bytes_sent '
17 '"$request" $status $body_bytes_sent '
18 '"$http_referer" "$http_user_agent" '
18 '"$http_referer" "$http_user_agent" '
19 '$request_time $upstream_response_time $pipe';
19 '$request_time $upstream_response_time $pipe';
20
20
21 ## Define one or more upstreams (local RhodeCode instance) to connect to
21 ## Define one or more upstreams (local RhodeCode instance) to connect to
22 upstream rc {
22 upstream rc {
23 # Url to running RhodeCode instance.
23 # Url to running RhodeCode instance.
24 # This is shown as `- URL: <host>` in output from rccontrol status.
24 # This is shown as `- URL: <host>` in output from rccontrol status.
25 server 127.0.0.1:10002;
25 server 127.0.0.1:10002;
26
26
27 # add more instances for load balancing
27 # add more instances for load balancing
28 # server 127.0.0.1:10003;
28 # server 127.0.0.1:10003;
29 # server 127.0.0.1:10004;
29 # server 127.0.0.1:10004;
30 }
30 }
31
31
32 ## HTTP to HTTPS rewrite
32 ## HTTP to HTTPS rewrite
33 server {
33 server {
34 listen 80;
34 listen 80;
35 server_name rhodecode.myserver.com;
35 server_name rhodecode.myserver.com;
36
36
37 if ($http_host = rhodecode.myserver.com) {
37 if ($http_host = rhodecode.myserver.com) {
38 rewrite (.*) https://rhodecode.myserver.com$1 permanent;
38 rewrite (.*) https://rhodecode.myserver.com$1 permanent;
39 }
39 }
40 }
40 }
41
41
42 ## Optional gist alias server, for serving nicer GIST urls.
42 ## Optional gist alias server, for serving nicer GIST urls.
43 server {
43 server {
44 listen 443;
44 listen 443;
45 server_name gist.myserver.com;
45 server_name gist.myserver.com;
46 access_log /var/log/nginx/gist.access.log log_custom;
46 access_log /var/log/nginx/gist.access.log log_custom;
47 error_log /var/log/nginx/gist.error.log;
47 error_log /var/log/nginx/gist.error.log;
48
48
49 ssl on;
49 ssl on;
50 ssl_certificate gist.rhodecode.myserver.com.crt;
50 ssl_certificate gist.rhodecode.myserver.com.crt;
51 ssl_certificate_key gist.rhodecode.myserver.com.key;
51 ssl_certificate_key gist.rhodecode.myserver.com.key;
52
52
53 ssl_session_timeout 5m;
53 ssl_session_timeout 5m;
54
54
55 ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
55 ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
56 ssl_prefer_server_ciphers on;
56 ssl_prefer_server_ciphers on;
57 ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
57 ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
58
58
59 ## Strict http prevents from https -> http downgrade
59 ## Strict http prevents from https -> http downgrade
60 add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;";
60 add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;";
61
61
62 ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits
62 ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits
63 #ssl_dhparam /etc/nginx/ssl/dhparam.pem;
63 #ssl_dhparam /etc/nginx/ssl/dhparam.pem;
64
64
65 rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1;
65 rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1;
66 rewrite (.*) https://rhodecode.myserver.com/_admin/gists;
66 rewrite (.*) https://rhodecode.myserver.com/_admin/gists;
67 }
67 }
68
68
69
69
70 ## MAIN SSL enabled server
70 ## MAIN SSL enabled server
71 server {
71 server {
72 listen 443 ssl http2;
72 listen 443 ssl http2;
73 server_name rhodecode.myserver.com;
73 server_name rhodecode.myserver.com;
74
74
75 access_log /var/log/nginx/rhodecode.access.log log_custom;
75 access_log /var/log/nginx/rhodecode.access.log log_custom;
76 error_log /var/log/nginx/rhodecode.error.log;
76 error_log /var/log/nginx/rhodecode.error.log;
77
77
78 ssl_certificate rhodecode.myserver.com.crt;
78 ssl_certificate rhodecode.myserver.com.crt;
79 ssl_certificate_key rhodecode.myserver.com.key;
79 ssl_certificate_key rhodecode.myserver.com.key;
80
80
81 # enable session resumption to improve https performance
81 # enable session resumption to improve https performance
82 # http://vincent.bernat.im/en/blog/2011-ssl-session-reuse-rfc5077.html
82 # http://vincent.bernat.im/en/blog/2011-ssl-session-reuse-rfc5077.html
83 ssl_session_cache shared:SSL:50m;
83 ssl_session_cache shared:SSL:50m;
84 ssl_session_timeout 5m;
84 ssl_session_timeout 5m;
85
85
86 ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits
86 ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits
87 #ssl_dhparam /etc/nginx/ssl/dhparam.pem;
87 #ssl_dhparam /etc/nginx/ssl/dhparam.pem;
88
88
89 # enables server-side protection from BEAST attacks
89 # enables server-side protection from BEAST attacks
90 # http://blog.ivanristic.com/2013/09/is-beast-still-a-threat.html
90 # http://blog.ivanristic.com/2013/09/is-beast-still-a-threat.html
91 ssl_prefer_server_ciphers on;
91 ssl_prefer_server_ciphers on;
92
92
93 # disable SSLv3(enabled by default since nginx 0.8.19) since it's less secure then TLS http://en.wikipedia.org/wiki/Secure_Sockets_Layer#SSL_3.0
93 # disable SSLv3(enabled by default since nginx 0.8.19) since it's less secure then TLS http://en.wikipedia.org/wiki/Secure_Sockets_Layer#SSL_3.0
94 ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
94 ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
95
95
96 # ciphers chosen for forward secrecy and compatibility
96 # ciphers chosen for forward secrecy and compatibility
97 # http://blog.ivanristic.com/2013/08/configuring-apache-nginx-and-openssl-for-forward-secrecy.html
97 # http://blog.ivanristic.com/2013/08/configuring-apache-nginx-and-openssl-for-forward-secrecy.html
98 ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
98 ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
99
99
100 client_body_buffer_size 128k;
100 client_body_buffer_size 128k;
101 # maximum number and size of buffers for large headers to read from client request
101 # maximum number and size of buffers for large headers to read from client request
102 large_client_header_buffers 16 256k;
102 large_client_header_buffers 16 256k;
103
103
104 ## uncomment to serve static files by Nginx, recommended for performance
104 ## uncomment to serve static files by Nginx, recommended for performance
105 # location /_static/rhodecode {
105 # location /_static/rhodecode {
106 # gzip on;
106 # gzip on;
107 # gzip_min_length 500;
107 # gzip_min_length 500;
108 # gzip_proxied any;
108 # gzip_proxied any;
109 # gzip_comp_level 4;
109 # gzip_comp_level 4;
110 # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml;
110 # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml;
111 # gzip_vary on;
111 # gzip_vary on;
112 # gzip_disable "msie6";
112 # gzip_disable "msie6";
113 # expires 60d;
113 # expires 60d;
114 # alias /path/to/.rccontrol/community-1/static;
114 # alias /path/to/.rccontrol/community-1/static;
115 # alias /path/to/.rccontrol/enterprise-1/static;
115 # alias /path/to/.rccontrol/enterprise-1/static;
116 # }
116 # }
117
117
118 ## channelstream location handler, if channelstream live chat and notifications
118 ## channelstream location handler, if channelstream live chat and notifications
119 ## are enable this will proxy the requests to channelstream websocket server
119 ## are enable this will proxy the requests to channelstream websocket server
120 location /_channelstream {
120 location /_channelstream {
121 rewrite /_channelstream/(.*) /$1 break;
121 rewrite /_channelstream/(.*) /$1 break;
122 gzip off;
122 gzip off;
123 tcp_nodelay off;
123 tcp_nodelay off;
124
124
125 proxy_connect_timeout 10;
125 proxy_connect_timeout 10;
126 proxy_send_timeout 10m;
126 proxy_send_timeout 10m;
127 proxy_read_timeout 10m;
127 proxy_read_timeout 10m;
128
128
129 proxy_set_header Host $host;
129 proxy_set_header Host $host;
130 proxy_set_header X-Real-IP $remote_addr;
130 proxy_set_header X-Real-IP $remote_addr;
131 proxy_set_header X-Url-Scheme $scheme;
131 proxy_set_header X-Url-Scheme $scheme;
132 proxy_set_header X-Forwarded-Proto $scheme;
132 proxy_set_header X-Forwarded-Proto $scheme;
133 proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
133 proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
134
134
135 proxy_http_version 1.1;
135 proxy_http_version 1.1;
136 proxy_set_header Upgrade $http_upgrade;
136 proxy_set_header Upgrade $http_upgrade;
137 proxy_set_header Connection "upgrade";
137 proxy_set_header Connection "upgrade";
138
138
139 proxy_pass http://127.0.0.1:9800;
139 proxy_pass http://127.0.0.1:9800;
140 }
140 }
141
141
142 ## rate limit this endpoint to prevent login page brute-force attacks
142 ## rate limit this endpoint to prevent login page brute-force attacks
143 location /_admin/login {
143 location /_admin/login {
144 limit_req zone=req_limit burst=10 nodelay;
144 limit_req zone=req_limit burst=10 nodelay;
145 try_files $uri @rhodecode_http;
145 try_files $uri @rhodecode_http;
146 }
146 }
147
147
148 ## Special Cache for file store, make sure you enable this intentionally as
148 ## Special Cache for file store, make sure you enable this intentionally as
149 ## it could bypass upload files permissions
149 ## it could bypass upload files permissions
150 # location /_file_store/download {
150 # location /_file_store/download/gravatars {
151 #
151 #
152 # proxy_cache cache_zone;
152 # proxy_cache cache_zone;
153 # # ignore Set-Cookie
153 # # ignore Set-Cookie
154 # proxy_ignore_headers Set-Cookie;
154 # proxy_ignore_headers Set-Cookie;
155 # proxy_ignore_headers Cookie;
155 # # ignore cache-control
156 # proxy_ignore_headers Cache-Control;
156 #
157 #
157 # proxy_cache_key $host$uri$is_args$args;
158 # proxy_cache_key $host$uri$is_args$args;
158 # proxy_cache_methods GET;
159 # proxy_cache_methods GET;
159 #
160 #
160 # proxy_cache_bypass $http_cache_control;
161 # proxy_cache_bypass $http_cache_control;
161 # proxy_cache_valid 200 302 720h;
162 # proxy_cache_valid 200 302 720h;
162 #
163 #
163 # proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504;
164 # proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504;
164 #
165 #
165 # # returns cache status in headers
166 # # returns cache status in headers
166 # add_header X-Proxy-Cache $upstream_cache_status;
167 # add_header X-Proxy-Cache $upstream_cache_status;
167 # add_header Cache-Control "public";
168 # add_header Cache-Control "public";
168 #
169 #
169 # proxy_cache_lock on;
170 # proxy_cache_lock on;
170 # proxy_cache_lock_age 5m;
171 # proxy_cache_lock_age 5m;
171 #
172 #
172 # proxy_pass http://rc;
173 # proxy_pass http://rc;
173 #
174 #
174 # }
175 # }
175
176
176 location / {
177 location / {
177 try_files $uri @rhodecode_http;
178 try_files $uri @rhodecode_http;
178 }
179 }
179
180
180 location @rhodecode_http {
181 location @rhodecode_http {
181 # example of proxy.conf can be found in our docs.
182 # example of proxy.conf can be found in our docs.
182 include /etc/nginx/proxy.conf;
183 include /etc/nginx/proxy.conf;
183 proxy_pass http://rc;
184 proxy_pass http://rc;
184 }
185 }
185
186
186 ## Custom 502 error page.
187 ## Custom 502 error page.
187 ## Will be displayed while RhodeCode server is turned off
188 ## Will be displayed while RhodeCode server is turned off
188 error_page 502 /502.html;
189 error_page 502 /502.html;
189 location = /502.html {
190 location = /502.html {
190 #root /path/to/.rccontrol/community-1/static;
191 #root /path/to/.rccontrol/community-1/static;
191 root /path/to/.rccontrol/enterprise-1/static;
192 root /path/to/.rccontrol/enterprise-1/static;
192 }
193 }
193 } No newline at end of file
194 }
@@ -1,279 +1,281 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs
7 { pkgs
8 , basePythonPackages
8 , basePythonPackages
9 }:
9 }:
10
10
11 let
11 let
12 sed = "sed -i";
12 sed = "sed -i";
13
13
14 localLicenses = {
14 localLicenses = {
15 repoze = {
15 repoze = {
16 fullName = "Repoze License";
16 fullName = "Repoze License";
17 url = http://www.repoze.org/LICENSE.txt;
17 url = http://www.repoze.org/LICENSE.txt;
18 };
18 };
19 };
19 };
20
20
21 in
21 in
22
22
23 self: super: {
23 self: super: {
24
24
25 "appenlight-client" = super."appenlight-client".override (attrs: {
25 "appenlight-client" = super."appenlight-client".override (attrs: {
26 meta = {
26 meta = {
27 license = [ pkgs.lib.licenses.bsdOriginal ];
27 license = [ pkgs.lib.licenses.bsdOriginal ];
28 };
28 };
29 });
29 });
30
30
31 "beaker" = super."beaker".override (attrs: {
31 "beaker" = super."beaker".override (attrs: {
32 patches = [
32 patches = [
33 ./patches/beaker/patch-beaker-lock-func-debug.diff
33 ./patches/beaker/patch-beaker-lock-func-debug.diff
34 ./patches/beaker/patch-beaker-metadata-reuse.diff
34 ./patches/beaker/patch-beaker-metadata-reuse.diff
35 ./patches/beaker/patch-beaker-improved-redis.diff
36 ./patches/beaker/patch-beaker-improved-redis-2.diff
35 ];
37 ];
36 });
38 });
37
39
38 "cffi" = super."cffi".override (attrs: {
40 "cffi" = super."cffi".override (attrs: {
39 buildInputs = [
41 buildInputs = [
40 pkgs.libffi
42 pkgs.libffi
41 ];
43 ];
42 });
44 });
43
45
44 "cryptography" = super."cryptography".override (attrs: {
46 "cryptography" = super."cryptography".override (attrs: {
45 buildInputs = [
47 buildInputs = [
46 pkgs.openssl
48 pkgs.openssl
47 ];
49 ];
48 });
50 });
49
51
50 "gevent" = super."gevent".override (attrs: {
52 "gevent" = super."gevent".override (attrs: {
51 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
53 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
52 # NOTE: (marcink) odd requirements from gevent aren't set properly,
54 # NOTE: (marcink) odd requirements from gevent aren't set properly,
53 # thus we need to inject psutil manually
55 # thus we need to inject psutil manually
54 self."psutil"
56 self."psutil"
55 ];
57 ];
56 });
58 });
57
59
58 "future" = super."future".override (attrs: {
60 "future" = super."future".override (attrs: {
59 meta = {
61 meta = {
60 license = [ pkgs.lib.licenses.mit ];
62 license = [ pkgs.lib.licenses.mit ];
61 };
63 };
62 });
64 });
63
65
64 "testpath" = super."testpath".override (attrs: {
66 "testpath" = super."testpath".override (attrs: {
65 meta = {
67 meta = {
66 license = [ pkgs.lib.licenses.mit ];
68 license = [ pkgs.lib.licenses.mit ];
67 };
69 };
68 });
70 });
69
71
70 "gnureadline" = super."gnureadline".override (attrs: {
72 "gnureadline" = super."gnureadline".override (attrs: {
71 buildInputs = [
73 buildInputs = [
72 pkgs.ncurses
74 pkgs.ncurses
73 ];
75 ];
74 patchPhase = ''
76 patchPhase = ''
75 substituteInPlace setup.py --replace "/bin/bash" "${pkgs.bash}/bin/bash"
77 substituteInPlace setup.py --replace "/bin/bash" "${pkgs.bash}/bin/bash"
76 '';
78 '';
77 });
79 });
78
80
79 "gunicorn" = super."gunicorn".override (attrs: {
81 "gunicorn" = super."gunicorn".override (attrs: {
80 propagatedBuildInputs = [
82 propagatedBuildInputs = [
81 # johbo: futures is needed as long as we are on Python 2, otherwise
83 # johbo: futures is needed as long as we are on Python 2, otherwise
82 # gunicorn explodes if used with multiple threads per worker.
84 # gunicorn explodes if used with multiple threads per worker.
83 self."futures"
85 self."futures"
84 ];
86 ];
85 });
87 });
86
88
87 "nbconvert" = super."nbconvert".override (attrs: {
89 "nbconvert" = super."nbconvert".override (attrs: {
88 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
90 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
89 # marcink: plug in jupyter-client for notebook rendering
91 # marcink: plug in jupyter-client for notebook rendering
90 self."jupyter-client"
92 self."jupyter-client"
91 ];
93 ];
92 });
94 });
93
95
94 "ipython" = super."ipython".override (attrs: {
96 "ipython" = super."ipython".override (attrs: {
95 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
97 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
96 self."gnureadline"
98 self."gnureadline"
97 ];
99 ];
98 });
100 });
99
101
100 "lxml" = super."lxml".override (attrs: {
102 "lxml" = super."lxml".override (attrs: {
101 buildInputs = [
103 buildInputs = [
102 pkgs.libxml2
104 pkgs.libxml2
103 pkgs.libxslt
105 pkgs.libxslt
104 ];
106 ];
105 propagatedBuildInputs = [
107 propagatedBuildInputs = [
106 # Needed, so that "setup.py bdist_wheel" does work
108 # Needed, so that "setup.py bdist_wheel" does work
107 self."wheel"
109 self."wheel"
108 ];
110 ];
109 });
111 });
110
112
111 "mysql-python" = super."mysql-python".override (attrs: {
113 "mysql-python" = super."mysql-python".override (attrs: {
112 buildInputs = [
114 buildInputs = [
113 pkgs.openssl
115 pkgs.openssl
114 ];
116 ];
115 propagatedBuildInputs = [
117 propagatedBuildInputs = [
116 pkgs.libmysql
118 pkgs.libmysql
117 pkgs.zlib
119 pkgs.zlib
118 ];
120 ];
119 });
121 });
120
122
121 "psycopg2" = super."psycopg2".override (attrs: {
123 "psycopg2" = super."psycopg2".override (attrs: {
122 propagatedBuildInputs = [
124 propagatedBuildInputs = [
123 pkgs.postgresql
125 pkgs.postgresql
124 ];
126 ];
125 meta = {
127 meta = {
126 license = pkgs.lib.licenses.lgpl3Plus;
128 license = pkgs.lib.licenses.lgpl3Plus;
127 };
129 };
128 });
130 });
129
131
130 "pycurl" = super."pycurl".override (attrs: {
132 "pycurl" = super."pycurl".override (attrs: {
131 propagatedBuildInputs = [
133 propagatedBuildInputs = [
132 pkgs.curl
134 pkgs.curl
133 pkgs.openssl
135 pkgs.openssl
134 ];
136 ];
135
137
136 preConfigure = ''
138 preConfigure = ''
137 substituteInPlace setup.py --replace '--static-libs' '--libs'
139 substituteInPlace setup.py --replace '--static-libs' '--libs'
138 export PYCURL_SSL_LIBRARY=openssl
140 export PYCURL_SSL_LIBRARY=openssl
139 '';
141 '';
140
142
141 meta = {
143 meta = {
142 license = pkgs.lib.licenses.mit;
144 license = pkgs.lib.licenses.mit;
143 };
145 };
144 });
146 });
145
147
146 "pyramid" = super."pyramid".override (attrs: {
148 "pyramid" = super."pyramid".override (attrs: {
147 meta = {
149 meta = {
148 license = localLicenses.repoze;
150 license = localLicenses.repoze;
149 };
151 };
150 });
152 });
151
153
152 "pyramid-debugtoolbar" = super."pyramid-debugtoolbar".override (attrs: {
154 "pyramid-debugtoolbar" = super."pyramid-debugtoolbar".override (attrs: {
153 meta = {
155 meta = {
154 license = [ pkgs.lib.licenses.bsdOriginal localLicenses.repoze ];
156 license = [ pkgs.lib.licenses.bsdOriginal localLicenses.repoze ];
155 };
157 };
156 });
158 });
157
159
158 "pysqlite" = super."pysqlite".override (attrs: {
160 "pysqlite" = super."pysqlite".override (attrs: {
159 propagatedBuildInputs = [
161 propagatedBuildInputs = [
160 pkgs.sqlite
162 pkgs.sqlite
161 ];
163 ];
162 meta = {
164 meta = {
163 license = [ pkgs.lib.licenses.zlib pkgs.lib.licenses.libpng ];
165 license = [ pkgs.lib.licenses.zlib pkgs.lib.licenses.libpng ];
164 };
166 };
165 });
167 });
166
168
167 "python-ldap" = super."python-ldap".override (attrs: {
169 "python-ldap" = super."python-ldap".override (attrs: {
168 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
170 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
169 pkgs.openldap
171 pkgs.openldap
170 pkgs.cyrus_sasl
172 pkgs.cyrus_sasl
171 pkgs.openssl
173 pkgs.openssl
172 ];
174 ];
173 });
175 });
174
176
175 "python-pam" = super."python-pam".override (attrs: {
177 "python-pam" = super."python-pam".override (attrs: {
176 propagatedBuildInputs = [
178 propagatedBuildInputs = [
177 pkgs.pam
179 pkgs.pam
178 ];
180 ];
179
181
180 # TODO: johbo: Check if this can be avoided, or transform into
182 # TODO: johbo: Check if this can be avoided, or transform into
181 # a real patch
183 # a real patch
182 patchPhase = ''
184 patchPhase = ''
183 substituteInPlace pam.py \
185 substituteInPlace pam.py \
184 --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"'
186 --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"'
185 '';
187 '';
186
188
187 });
189 });
188
190
189 "python-saml" = super."python-saml".override (attrs: {
191 "python-saml" = super."python-saml".override (attrs: {
190 buildInputs = [
192 buildInputs = [
191 pkgs.libxml2
193 pkgs.libxml2
192 pkgs.libxslt
194 pkgs.libxslt
193 ];
195 ];
194 });
196 });
195
197
196 "dm.xmlsec.binding" = super."dm.xmlsec.binding".override (attrs: {
198 "dm.xmlsec.binding" = super."dm.xmlsec.binding".override (attrs: {
197 buildInputs = [
199 buildInputs = [
198 pkgs.libxml2
200 pkgs.libxml2
199 pkgs.libxslt
201 pkgs.libxslt
200 pkgs.xmlsec
202 pkgs.xmlsec
201 pkgs.libtool
203 pkgs.libtool
202 ];
204 ];
203 });
205 });
204
206
205 "pyzmq" = super."pyzmq".override (attrs: {
207 "pyzmq" = super."pyzmq".override (attrs: {
206 buildInputs = [
208 buildInputs = [
207 pkgs.czmq
209 pkgs.czmq
208 ];
210 ];
209 });
211 });
210
212
211 "urlobject" = super."urlobject".override (attrs: {
213 "urlobject" = super."urlobject".override (attrs: {
212 meta = {
214 meta = {
213 license = {
215 license = {
214 spdxId = "Unlicense";
216 spdxId = "Unlicense";
215 fullName = "The Unlicense";
217 fullName = "The Unlicense";
216 url = http://unlicense.org/;
218 url = http://unlicense.org/;
217 };
219 };
218 };
220 };
219 });
221 });
220
222
221 "docutils" = super."docutils".override (attrs: {
223 "docutils" = super."docutils".override (attrs: {
222 meta = {
224 meta = {
223 license = pkgs.lib.licenses.bsd2;
225 license = pkgs.lib.licenses.bsd2;
224 };
226 };
225 });
227 });
226
228
227 "colander" = super."colander".override (attrs: {
229 "colander" = super."colander".override (attrs: {
228 meta = {
230 meta = {
229 license = localLicenses.repoze;
231 license = localLicenses.repoze;
230 };
232 };
231 });
233 });
232
234
233 "pyramid-beaker" = super."pyramid-beaker".override (attrs: {
235 "pyramid-beaker" = super."pyramid-beaker".override (attrs: {
234 meta = {
236 meta = {
235 license = localLicenses.repoze;
237 license = localLicenses.repoze;
236 };
238 };
237 });
239 });
238
240
239 "pyramid-mako" = super."pyramid-mako".override (attrs: {
241 "pyramid-mako" = super."pyramid-mako".override (attrs: {
240 meta = {
242 meta = {
241 license = localLicenses.repoze;
243 license = localLicenses.repoze;
242 };
244 };
243 });
245 });
244
246
245 "repoze.lru" = super."repoze.lru".override (attrs: {
247 "repoze.lru" = super."repoze.lru".override (attrs: {
246 meta = {
248 meta = {
247 license = localLicenses.repoze;
249 license = localLicenses.repoze;
248 };
250 };
249 });
251 });
250
252
251 "python-editor" = super."python-editor".override (attrs: {
253 "python-editor" = super."python-editor".override (attrs: {
252 meta = {
254 meta = {
253 license = pkgs.lib.licenses.asl20;
255 license = pkgs.lib.licenses.asl20;
254 };
256 };
255 });
257 });
256
258
257 "translationstring" = super."translationstring".override (attrs: {
259 "translationstring" = super."translationstring".override (attrs: {
258 meta = {
260 meta = {
259 license = localLicenses.repoze;
261 license = localLicenses.repoze;
260 };
262 };
261 });
263 });
262
264
263 "venusian" = super."venusian".override (attrs: {
265 "venusian" = super."venusian".override (attrs: {
264 meta = {
266 meta = {
265 license = localLicenses.repoze;
267 license = localLicenses.repoze;
266 };
268 };
267 });
269 });
268
270
269 "supervisor" = super."supervisor".override (attrs: {
271 "supervisor" = super."supervisor".override (attrs: {
270 patches = [
272 patches = [
271 ./patches/supervisor/patch-rlimits-old-kernel.diff
273 ./patches/supervisor/patch-rlimits-old-kernel.diff
272 ];
274 ];
273 });
275 });
274
276
275 # Avoid that base packages screw up the build process
277 # Avoid that base packages screw up the build process
276 inherit (basePythonPackages)
278 inherit (basePythonPackages)
277 setuptools;
279 setuptools;
278
280
279 }
281 }
@@ -1,2444 +1,2497 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "alembic" = super.buildPythonPackage {
7 "alembic" = super.buildPythonPackage {
8 name = "alembic-1.4.2";
8 name = "alembic-1.4.2";
9 doCheck = false;
9 doCheck = false;
10 propagatedBuildInputs = [
10 propagatedBuildInputs = [
11 self."sqlalchemy"
11 self."sqlalchemy"
12 self."mako"
12 self."mako"
13 self."python-editor"
13 self."python-editor"
14 self."python-dateutil"
14 self."python-dateutil"
15 ];
15 ];
16 src = fetchurl {
16 src = fetchurl {
17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
19 };
19 };
20 meta = {
20 meta = {
21 license = [ pkgs.lib.licenses.mit ];
21 license = [ pkgs.lib.licenses.mit ];
22 };
22 };
23 };
23 };
24 "amqp" = super.buildPythonPackage {
24 "amqp" = super.buildPythonPackage {
25 name = "amqp-2.5.2";
25 name = "amqp-2.5.2";
26 doCheck = false;
26 doCheck = false;
27 propagatedBuildInputs = [
27 propagatedBuildInputs = [
28 self."vine"
28 self."vine"
29 ];
29 ];
30 src = fetchurl {
30 src = fetchurl {
31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
33 };
33 };
34 meta = {
34 meta = {
35 license = [ pkgs.lib.licenses.bsdOriginal ];
35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 };
36 };
37 };
37 };
38 "apispec" = super.buildPythonPackage {
39 name = "apispec-1.0.0";
40 doCheck = false;
41 propagatedBuildInputs = [
42 self."PyYAML"
43 ];
44 src = fetchurl {
45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
47 };
48 meta = {
49 license = [ pkgs.lib.licenses.mit ];
50 };
51 };
38 "appenlight-client" = super.buildPythonPackage {
52 "appenlight-client" = super.buildPythonPackage {
39 name = "appenlight-client-0.6.26";
53 name = "appenlight-client-0.6.26";
40 doCheck = false;
54 doCheck = false;
41 propagatedBuildInputs = [
55 propagatedBuildInputs = [
42 self."webob"
56 self."webob"
43 self."requests"
57 self."requests"
44 self."six"
58 self."six"
45 ];
59 ];
46 src = fetchurl {
60 src = fetchurl {
47 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
48 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
49 };
63 };
50 meta = {
64 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
65 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
66 };
53 };
67 };
54 "asn1crypto" = super.buildPythonPackage {
68 "asn1crypto" = super.buildPythonPackage {
55 name = "asn1crypto-0.24.0";
69 name = "asn1crypto-0.24.0";
56 doCheck = false;
70 doCheck = false;
57 src = fetchurl {
71 src = fetchurl {
58 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
59 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
60 };
74 };
61 meta = {
75 meta = {
62 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
63 };
77 };
64 };
78 };
65 "atomicwrites" = super.buildPythonPackage {
79 "atomicwrites" = super.buildPythonPackage {
66 name = "atomicwrites-1.3.0";
80 name = "atomicwrites-1.3.0";
67 doCheck = false;
81 doCheck = false;
68 src = fetchurl {
82 src = fetchurl {
69 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
70 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
71 };
85 };
72 meta = {
86 meta = {
73 license = [ pkgs.lib.licenses.mit ];
87 license = [ pkgs.lib.licenses.mit ];
74 };
88 };
75 };
89 };
76 "attrs" = super.buildPythonPackage {
90 "attrs" = super.buildPythonPackage {
77 name = "attrs-19.3.0";
91 name = "attrs-19.3.0";
78 doCheck = false;
92 doCheck = false;
79 src = fetchurl {
93 src = fetchurl {
80 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
81 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
82 };
96 };
83 meta = {
97 meta = {
84 license = [ pkgs.lib.licenses.mit ];
98 license = [ pkgs.lib.licenses.mit ];
85 };
99 };
86 };
100 };
87 "babel" = super.buildPythonPackage {
101 "babel" = super.buildPythonPackage {
88 name = "babel-1.3";
102 name = "babel-1.3";
89 doCheck = false;
103 doCheck = false;
90 propagatedBuildInputs = [
104 propagatedBuildInputs = [
91 self."pytz"
105 self."pytz"
92 ];
106 ];
93 src = fetchurl {
107 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
95 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
96 };
110 };
97 meta = {
111 meta = {
98 license = [ pkgs.lib.licenses.bsdOriginal ];
112 license = [ pkgs.lib.licenses.bsdOriginal ];
99 };
113 };
100 };
114 };
101 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
102 name = "backports.shutil-get-terminal-size-1.0.0";
116 name = "backports.shutil-get-terminal-size-1.0.0";
103 doCheck = false;
117 doCheck = false;
104 src = fetchurl {
118 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
106 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
107 };
121 };
108 meta = {
122 meta = {
109 license = [ pkgs.lib.licenses.mit ];
123 license = [ pkgs.lib.licenses.mit ];
110 };
124 };
111 };
125 };
112 "beaker" = super.buildPythonPackage {
126 "beaker" = super.buildPythonPackage {
113 name = "beaker-1.9.1";
127 name = "beaker-1.9.1";
114 doCheck = false;
128 doCheck = false;
115 propagatedBuildInputs = [
129 propagatedBuildInputs = [
116 self."funcsigs"
130 self."funcsigs"
117 ];
131 ];
118 src = fetchurl {
132 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
120 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
121 };
135 };
122 meta = {
136 meta = {
123 license = [ pkgs.lib.licenses.bsdOriginal ];
137 license = [ pkgs.lib.licenses.bsdOriginal ];
124 };
138 };
125 };
139 };
126 "beautifulsoup4" = super.buildPythonPackage {
140 "beautifulsoup4" = super.buildPythonPackage {
127 name = "beautifulsoup4-4.6.3";
141 name = "beautifulsoup4-4.6.3";
128 doCheck = false;
142 doCheck = false;
129 src = fetchurl {
143 src = fetchurl {
130 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
131 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
132 };
146 };
133 meta = {
147 meta = {
134 license = [ pkgs.lib.licenses.mit ];
148 license = [ pkgs.lib.licenses.mit ];
135 };
149 };
136 };
150 };
137 "billiard" = super.buildPythonPackage {
151 "billiard" = super.buildPythonPackage {
138 name = "billiard-3.6.1.0";
152 name = "billiard-3.6.1.0";
139 doCheck = false;
153 doCheck = false;
140 src = fetchurl {
154 src = fetchurl {
141 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
142 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
143 };
157 };
144 meta = {
158 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal ];
159 license = [ pkgs.lib.licenses.bsdOriginal ];
146 };
160 };
147 };
161 };
148 "bleach" = super.buildPythonPackage {
162 "bleach" = super.buildPythonPackage {
149 name = "bleach-3.1.3";
163 name = "bleach-3.1.3";
150 doCheck = false;
164 doCheck = false;
151 propagatedBuildInputs = [
165 propagatedBuildInputs = [
152 self."six"
166 self."six"
153 self."webencodings"
167 self."webencodings"
154 ];
168 ];
155 src = fetchurl {
169 src = fetchurl {
156 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
157 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
158 };
172 };
159 meta = {
173 meta = {
160 license = [ pkgs.lib.licenses.asl20 ];
174 license = [ pkgs.lib.licenses.asl20 ];
161 };
175 };
162 };
176 };
163 "bumpversion" = super.buildPythonPackage {
177 "bumpversion" = super.buildPythonPackage {
164 name = "bumpversion-0.5.3";
178 name = "bumpversion-0.5.3";
165 doCheck = false;
179 doCheck = false;
166 src = fetchurl {
180 src = fetchurl {
167 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
168 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
169 };
183 };
170 meta = {
184 meta = {
171 license = [ pkgs.lib.licenses.mit ];
185 license = [ pkgs.lib.licenses.mit ];
172 };
186 };
173 };
187 };
174 "cachetools" = super.buildPythonPackage {
188 "cachetools" = super.buildPythonPackage {
175 name = "cachetools-3.1.1";
189 name = "cachetools-3.1.1";
176 doCheck = false;
190 doCheck = false;
177 src = fetchurl {
191 src = fetchurl {
178 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
179 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
180 };
194 };
181 meta = {
195 meta = {
182 license = [ pkgs.lib.licenses.mit ];
196 license = [ pkgs.lib.licenses.mit ];
183 };
197 };
184 };
198 };
185 "celery" = super.buildPythonPackage {
199 "celery" = super.buildPythonPackage {
186 name = "celery-4.3.0";
200 name = "celery-4.3.0";
187 doCheck = false;
201 doCheck = false;
188 propagatedBuildInputs = [
202 propagatedBuildInputs = [
189 self."pytz"
203 self."pytz"
190 self."billiard"
204 self."billiard"
191 self."kombu"
205 self."kombu"
192 self."vine"
206 self."vine"
193 ];
207 ];
194 src = fetchurl {
208 src = fetchurl {
195 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
196 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
197 };
211 };
198 meta = {
212 meta = {
199 license = [ pkgs.lib.licenses.bsdOriginal ];
213 license = [ pkgs.lib.licenses.bsdOriginal ];
200 };
214 };
201 };
215 };
202 "certifi" = super.buildPythonPackage {
216 "certifi" = super.buildPythonPackage {
203 name = "certifi-2020.4.5.1";
217 name = "certifi-2020.4.5.1";
204 doCheck = false;
218 doCheck = false;
205 src = fetchurl {
219 src = fetchurl {
206 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
207 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
208 };
222 };
209 meta = {
223 meta = {
210 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
211 };
225 };
212 };
226 };
213 "cffi" = super.buildPythonPackage {
227 "cffi" = super.buildPythonPackage {
214 name = "cffi-1.12.3";
228 name = "cffi-1.12.3";
215 doCheck = false;
229 doCheck = false;
216 propagatedBuildInputs = [
230 propagatedBuildInputs = [
217 self."pycparser"
231 self."pycparser"
218 ];
232 ];
219 src = fetchurl {
233 src = fetchurl {
220 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
221 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
222 };
236 };
223 meta = {
237 meta = {
224 license = [ pkgs.lib.licenses.mit ];
238 license = [ pkgs.lib.licenses.mit ];
225 };
239 };
226 };
240 };
227 "chameleon" = super.buildPythonPackage {
241 "chameleon" = super.buildPythonPackage {
228 name = "chameleon-2.24";
242 name = "chameleon-2.24";
229 doCheck = false;
243 doCheck = false;
230 src = fetchurl {
244 src = fetchurl {
231 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
232 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
233 };
247 };
234 meta = {
248 meta = {
235 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
236 };
250 };
237 };
251 };
238 "channelstream" = super.buildPythonPackage {
252 "channelstream" = super.buildPythonPackage {
239 name = "channelstream-0.5.2";
253 name = "channelstream-0.6.14";
240 doCheck = false;
254 doCheck = false;
241 propagatedBuildInputs = [
255 propagatedBuildInputs = [
242 self."gevent"
256 self."gevent"
243 self."ws4py"
257 self."ws4py"
258 self."marshmallow"
259 self."python-dateutil"
244 self."pyramid"
260 self."pyramid"
245 self."pyramid-jinja2"
261 self."pyramid-jinja2"
262 self."pyramid-apispec"
246 self."itsdangerous"
263 self."itsdangerous"
247 self."requests"
264 self."requests"
248 self."six"
265 self."six"
249 ];
266 ];
250 src = fetchurl {
267 src = fetchurl {
251 url = "https://files.pythonhosted.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
252 sha256 = "1qbm4xdl5hfkja683x546bncg3rqq8qv79w1m1a1wd48cqqzb6rm";
269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
253 };
270 };
254 meta = {
271 meta = {
255 license = [ pkgs.lib.licenses.bsdOriginal ];
272 license = [ pkgs.lib.licenses.bsdOriginal ];
256 };
273 };
257 };
274 };
258 "chardet" = super.buildPythonPackage {
275 "chardet" = super.buildPythonPackage {
259 name = "chardet-3.0.4";
276 name = "chardet-3.0.4";
260 doCheck = false;
277 doCheck = false;
261 src = fetchurl {
278 src = fetchurl {
262 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
263 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
264 };
281 };
265 meta = {
282 meta = {
266 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
267 };
284 };
268 };
285 };
269 "click" = super.buildPythonPackage {
286 "click" = super.buildPythonPackage {
270 name = "click-7.0";
287 name = "click-7.0";
271 doCheck = false;
288 doCheck = false;
272 src = fetchurl {
289 src = fetchurl {
273 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
274 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
275 };
292 };
276 meta = {
293 meta = {
277 license = [ pkgs.lib.licenses.bsdOriginal ];
294 license = [ pkgs.lib.licenses.bsdOriginal ];
278 };
295 };
279 };
296 };
280 "colander" = super.buildPythonPackage {
297 "colander" = super.buildPythonPackage {
281 name = "colander-1.7.0";
298 name = "colander-1.7.0";
282 doCheck = false;
299 doCheck = false;
283 propagatedBuildInputs = [
300 propagatedBuildInputs = [
284 self."translationstring"
301 self."translationstring"
285 self."iso8601"
302 self."iso8601"
286 self."enum34"
303 self."enum34"
287 ];
304 ];
288 src = fetchurl {
305 src = fetchurl {
289 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
290 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
291 };
308 };
292 meta = {
309 meta = {
293 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
294 };
311 };
295 };
312 };
296 "configobj" = super.buildPythonPackage {
313 "configobj" = super.buildPythonPackage {
297 name = "configobj-5.0.6";
314 name = "configobj-5.0.6";
298 doCheck = false;
315 doCheck = false;
299 propagatedBuildInputs = [
316 propagatedBuildInputs = [
300 self."six"
317 self."six"
301 ];
318 ];
302 src = fetchurl {
319 src = fetchurl {
303 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
304 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
305 };
322 };
306 meta = {
323 meta = {
307 license = [ pkgs.lib.licenses.bsdOriginal ];
324 license = [ pkgs.lib.licenses.bsdOriginal ];
308 };
325 };
309 };
326 };
310 "configparser" = super.buildPythonPackage {
327 "configparser" = super.buildPythonPackage {
311 name = "configparser-4.0.2";
328 name = "configparser-4.0.2";
312 doCheck = false;
329 doCheck = false;
313 src = fetchurl {
330 src = fetchurl {
314 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
315 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
316 };
333 };
317 meta = {
334 meta = {
318 license = [ pkgs.lib.licenses.mit ];
335 license = [ pkgs.lib.licenses.mit ];
319 };
336 };
320 };
337 };
321 "contextlib2" = super.buildPythonPackage {
338 "contextlib2" = super.buildPythonPackage {
322 name = "contextlib2-0.6.0.post1";
339 name = "contextlib2-0.6.0.post1";
323 doCheck = false;
340 doCheck = false;
324 src = fetchurl {
341 src = fetchurl {
325 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
326 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
327 };
344 };
328 meta = {
345 meta = {
329 license = [ pkgs.lib.licenses.psfl ];
346 license = [ pkgs.lib.licenses.psfl ];
330 };
347 };
331 };
348 };
332 "cov-core" = super.buildPythonPackage {
349 "cov-core" = super.buildPythonPackage {
333 name = "cov-core-1.15.0";
350 name = "cov-core-1.15.0";
334 doCheck = false;
351 doCheck = false;
335 propagatedBuildInputs = [
352 propagatedBuildInputs = [
336 self."coverage"
353 self."coverage"
337 ];
354 ];
338 src = fetchurl {
355 src = fetchurl {
339 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
340 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
341 };
358 };
342 meta = {
359 meta = {
343 license = [ pkgs.lib.licenses.mit ];
360 license = [ pkgs.lib.licenses.mit ];
344 };
361 };
345 };
362 };
346 "coverage" = super.buildPythonPackage {
363 "coverage" = super.buildPythonPackage {
347 name = "coverage-4.5.4";
364 name = "coverage-4.5.4";
348 doCheck = false;
365 doCheck = false;
349 src = fetchurl {
366 src = fetchurl {
350 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
351 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
352 };
369 };
353 meta = {
370 meta = {
354 license = [ pkgs.lib.licenses.asl20 ];
371 license = [ pkgs.lib.licenses.asl20 ];
355 };
372 };
356 };
373 };
357 "cryptography" = super.buildPythonPackage {
374 "cryptography" = super.buildPythonPackage {
358 name = "cryptography-2.6.1";
375 name = "cryptography-2.6.1";
359 doCheck = false;
376 doCheck = false;
360 propagatedBuildInputs = [
377 propagatedBuildInputs = [
361 self."asn1crypto"
378 self."asn1crypto"
362 self."six"
379 self."six"
363 self."cffi"
380 self."cffi"
364 self."enum34"
381 self."enum34"
365 self."ipaddress"
382 self."ipaddress"
366 ];
383 ];
367 src = fetchurl {
384 src = fetchurl {
368 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
369 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
370 };
387 };
371 meta = {
388 meta = {
372 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
373 };
390 };
374 };
391 };
375 "cssselect" = super.buildPythonPackage {
392 "cssselect" = super.buildPythonPackage {
376 name = "cssselect-1.0.3";
393 name = "cssselect-1.0.3";
377 doCheck = false;
394 doCheck = false;
378 src = fetchurl {
395 src = fetchurl {
379 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
380 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
381 };
398 };
382 meta = {
399 meta = {
383 license = [ pkgs.lib.licenses.bsdOriginal ];
400 license = [ pkgs.lib.licenses.bsdOriginal ];
384 };
401 };
385 };
402 };
386 "cssutils" = super.buildPythonPackage {
403 "cssutils" = super.buildPythonPackage {
387 name = "cssutils-1.0.2";
404 name = "cssutils-1.0.2";
388 doCheck = false;
405 doCheck = false;
389 src = fetchurl {
406 src = fetchurl {
390 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
391 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
392 };
409 };
393 meta = {
410 meta = {
394 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
395 };
412 };
396 };
413 };
397 "decorator" = super.buildPythonPackage {
414 "decorator" = super.buildPythonPackage {
398 name = "decorator-4.1.2";
415 name = "decorator-4.1.2";
399 doCheck = false;
416 doCheck = false;
400 src = fetchurl {
417 src = fetchurl {
401 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
402 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
403 };
420 };
404 meta = {
421 meta = {
405 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
406 };
423 };
407 };
424 };
408 "deform" = super.buildPythonPackage {
425 "deform" = super.buildPythonPackage {
409 name = "deform-2.0.8";
426 name = "deform-2.0.8";
410 doCheck = false;
427 doCheck = false;
411 propagatedBuildInputs = [
428 propagatedBuildInputs = [
412 self."chameleon"
429 self."chameleon"
413 self."colander"
430 self."colander"
414 self."iso8601"
431 self."iso8601"
415 self."peppercorn"
432 self."peppercorn"
416 self."translationstring"
433 self."translationstring"
417 self."zope.deprecation"
434 self."zope.deprecation"
418 ];
435 ];
419 src = fetchurl {
436 src = fetchurl {
420 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
421 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
422 };
439 };
423 meta = {
440 meta = {
424 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
425 };
442 };
426 };
443 };
427 "defusedxml" = super.buildPythonPackage {
444 "defusedxml" = super.buildPythonPackage {
428 name = "defusedxml-0.6.0";
445 name = "defusedxml-0.6.0";
429 doCheck = false;
446 doCheck = false;
430 src = fetchurl {
447 src = fetchurl {
431 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
432 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
433 };
450 };
434 meta = {
451 meta = {
435 license = [ pkgs.lib.licenses.psfl ];
452 license = [ pkgs.lib.licenses.psfl ];
436 };
453 };
437 };
454 };
438 "dm.xmlsec.binding" = super.buildPythonPackage {
455 "dm.xmlsec.binding" = super.buildPythonPackage {
439 name = "dm.xmlsec.binding-1.3.7";
456 name = "dm.xmlsec.binding-1.3.7";
440 doCheck = false;
457 doCheck = false;
441 propagatedBuildInputs = [
458 propagatedBuildInputs = [
442 self."setuptools"
459 self."setuptools"
443 self."lxml"
460 self."lxml"
444 ];
461 ];
445 src = fetchurl {
462 src = fetchurl {
446 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
447 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
448 };
465 };
449 meta = {
466 meta = {
450 license = [ pkgs.lib.licenses.bsdOriginal ];
467 license = [ pkgs.lib.licenses.bsdOriginal ];
451 };
468 };
452 };
469 };
453 "docutils" = super.buildPythonPackage {
470 "docutils" = super.buildPythonPackage {
454 name = "docutils-0.16";
471 name = "docutils-0.16";
455 doCheck = false;
472 doCheck = false;
456 src = fetchurl {
473 src = fetchurl {
457 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
458 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
459 };
476 };
460 meta = {
477 meta = {
461 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
462 };
479 };
463 };
480 };
464 "dogpile.cache" = super.buildPythonPackage {
481 "dogpile.cache" = super.buildPythonPackage {
465 name = "dogpile.cache-0.9.0";
482 name = "dogpile.cache-0.9.0";
466 doCheck = false;
483 doCheck = false;
467 propagatedBuildInputs = [
484 propagatedBuildInputs = [
468 self."decorator"
485 self."decorator"
469 ];
486 ];
470 src = fetchurl {
487 src = fetchurl {
471 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
472 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
473 };
490 };
474 meta = {
491 meta = {
475 license = [ pkgs.lib.licenses.bsdOriginal ];
492 license = [ pkgs.lib.licenses.bsdOriginal ];
476 };
493 };
477 };
494 };
478 "dogpile.core" = super.buildPythonPackage {
495 "dogpile.core" = super.buildPythonPackage {
479 name = "dogpile.core-0.4.1";
496 name = "dogpile.core-0.4.1";
480 doCheck = false;
497 doCheck = false;
481 src = fetchurl {
498 src = fetchurl {
482 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
483 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
484 };
501 };
485 meta = {
502 meta = {
486 license = [ pkgs.lib.licenses.bsdOriginal ];
503 license = [ pkgs.lib.licenses.bsdOriginal ];
487 };
504 };
488 };
505 };
489 "ecdsa" = super.buildPythonPackage {
506 "ecdsa" = super.buildPythonPackage {
490 name = "ecdsa-0.13.2";
507 name = "ecdsa-0.13.2";
491 doCheck = false;
508 doCheck = false;
492 src = fetchurl {
509 src = fetchurl {
493 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
494 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
495 };
512 };
496 meta = {
513 meta = {
497 license = [ pkgs.lib.licenses.mit ];
514 license = [ pkgs.lib.licenses.mit ];
498 };
515 };
499 };
516 };
500 "elasticsearch" = super.buildPythonPackage {
517 "elasticsearch" = super.buildPythonPackage {
501 name = "elasticsearch-6.3.1";
518 name = "elasticsearch-6.3.1";
502 doCheck = false;
519 doCheck = false;
503 propagatedBuildInputs = [
520 propagatedBuildInputs = [
504 self."urllib3"
521 self."urllib3"
505 ];
522 ];
506 src = fetchurl {
523 src = fetchurl {
507 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
508 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
509 };
526 };
510 meta = {
527 meta = {
511 license = [ pkgs.lib.licenses.asl20 ];
528 license = [ pkgs.lib.licenses.asl20 ];
512 };
529 };
513 };
530 };
514 "elasticsearch-dsl" = super.buildPythonPackage {
531 "elasticsearch-dsl" = super.buildPythonPackage {
515 name = "elasticsearch-dsl-6.3.1";
532 name = "elasticsearch-dsl-6.3.1";
516 doCheck = false;
533 doCheck = false;
517 propagatedBuildInputs = [
534 propagatedBuildInputs = [
518 self."six"
535 self."six"
519 self."python-dateutil"
536 self."python-dateutil"
520 self."elasticsearch"
537 self."elasticsearch"
521 self."ipaddress"
538 self."ipaddress"
522 ];
539 ];
523 src = fetchurl {
540 src = fetchurl {
524 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
525 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
526 };
543 };
527 meta = {
544 meta = {
528 license = [ pkgs.lib.licenses.asl20 ];
545 license = [ pkgs.lib.licenses.asl20 ];
529 };
546 };
530 };
547 };
531 "elasticsearch1" = super.buildPythonPackage {
548 "elasticsearch1" = super.buildPythonPackage {
532 name = "elasticsearch1-1.10.0";
549 name = "elasticsearch1-1.10.0";
533 doCheck = false;
550 doCheck = false;
534 propagatedBuildInputs = [
551 propagatedBuildInputs = [
535 self."urllib3"
552 self."urllib3"
536 ];
553 ];
537 src = fetchurl {
554 src = fetchurl {
538 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
539 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
540 };
557 };
541 meta = {
558 meta = {
542 license = [ pkgs.lib.licenses.asl20 ];
559 license = [ pkgs.lib.licenses.asl20 ];
543 };
560 };
544 };
561 };
545 "elasticsearch1-dsl" = super.buildPythonPackage {
562 "elasticsearch1-dsl" = super.buildPythonPackage {
546 name = "elasticsearch1-dsl-0.0.12";
563 name = "elasticsearch1-dsl-0.0.12";
547 doCheck = false;
564 doCheck = false;
548 propagatedBuildInputs = [
565 propagatedBuildInputs = [
549 self."six"
566 self."six"
550 self."python-dateutil"
567 self."python-dateutil"
551 self."elasticsearch1"
568 self."elasticsearch1"
552 ];
569 ];
553 src = fetchurl {
570 src = fetchurl {
554 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
555 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
556 };
573 };
557 meta = {
574 meta = {
558 license = [ pkgs.lib.licenses.asl20 ];
575 license = [ pkgs.lib.licenses.asl20 ];
559 };
576 };
560 };
577 };
561 "elasticsearch2" = super.buildPythonPackage {
578 "elasticsearch2" = super.buildPythonPackage {
562 name = "elasticsearch2-2.5.1";
579 name = "elasticsearch2-2.5.1";
563 doCheck = false;
580 doCheck = false;
564 propagatedBuildInputs = [
581 propagatedBuildInputs = [
565 self."urllib3"
582 self."urllib3"
566 ];
583 ];
567 src = fetchurl {
584 src = fetchurl {
568 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
569 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
570 };
587 };
571 meta = {
588 meta = {
572 license = [ pkgs.lib.licenses.asl20 ];
589 license = [ pkgs.lib.licenses.asl20 ];
573 };
590 };
574 };
591 };
575 "entrypoints" = super.buildPythonPackage {
592 "entrypoints" = super.buildPythonPackage {
576 name = "entrypoints-0.2.2";
593 name = "entrypoints-0.2.2";
577 doCheck = false;
594 doCheck = false;
578 propagatedBuildInputs = [
595 propagatedBuildInputs = [
579 self."configparser"
596 self."configparser"
580 ];
597 ];
581 src = fetchurl {
598 src = fetchurl {
582 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
583 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
584 };
601 };
585 meta = {
602 meta = {
586 license = [ pkgs.lib.licenses.mit ];
603 license = [ pkgs.lib.licenses.mit ];
587 };
604 };
588 };
605 };
589 "enum34" = super.buildPythonPackage {
606 "enum34" = super.buildPythonPackage {
590 name = "enum34-1.1.10";
607 name = "enum34-1.1.10";
591 doCheck = false;
608 doCheck = false;
592 src = fetchurl {
609 src = fetchurl {
593 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
594 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
595 };
612 };
596 meta = {
613 meta = {
597 license = [ pkgs.lib.licenses.bsdOriginal ];
614 license = [ pkgs.lib.licenses.bsdOriginal ];
598 };
615 };
599 };
616 };
600 "formencode" = super.buildPythonPackage {
617 "formencode" = super.buildPythonPackage {
601 name = "formencode-1.2.4";
618 name = "formencode-1.2.4";
602 doCheck = false;
619 doCheck = false;
603 src = fetchurl {
620 src = fetchurl {
604 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
605 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
606 };
623 };
607 meta = {
624 meta = {
608 license = [ pkgs.lib.licenses.psfl ];
625 license = [ pkgs.lib.licenses.psfl ];
609 };
626 };
610 };
627 };
611 "funcsigs" = super.buildPythonPackage {
628 "funcsigs" = super.buildPythonPackage {
612 name = "funcsigs-1.0.2";
629 name = "funcsigs-1.0.2";
613 doCheck = false;
630 doCheck = false;
614 src = fetchurl {
631 src = fetchurl {
615 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
616 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
617 };
634 };
618 meta = {
635 meta = {
619 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
620 };
637 };
621 };
638 };
622 "functools32" = super.buildPythonPackage {
639 "functools32" = super.buildPythonPackage {
623 name = "functools32-3.2.3.post2";
640 name = "functools32-3.2.3.post2";
624 doCheck = false;
641 doCheck = false;
625 src = fetchurl {
642 src = fetchurl {
626 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
627 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
628 };
645 };
629 meta = {
646 meta = {
630 license = [ pkgs.lib.licenses.psfl ];
647 license = [ pkgs.lib.licenses.psfl ];
631 };
648 };
632 };
649 };
633 "future" = super.buildPythonPackage {
650 "future" = super.buildPythonPackage {
634 name = "future-0.14.3";
651 name = "future-0.14.3";
635 doCheck = false;
652 doCheck = false;
636 src = fetchurl {
653 src = fetchurl {
637 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
638 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
639 };
656 };
640 meta = {
657 meta = {
641 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
642 };
659 };
643 };
660 };
644 "futures" = super.buildPythonPackage {
661 "futures" = super.buildPythonPackage {
645 name = "futures-3.0.2";
662 name = "futures-3.0.2";
646 doCheck = false;
663 doCheck = false;
647 src = fetchurl {
664 src = fetchurl {
648 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
649 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
650 };
667 };
651 meta = {
668 meta = {
652 license = [ pkgs.lib.licenses.bsdOriginal ];
669 license = [ pkgs.lib.licenses.bsdOriginal ];
653 };
670 };
654 };
671 };
655 "gevent" = super.buildPythonPackage {
672 "gevent" = super.buildPythonPackage {
656 name = "gevent-1.5.0";
673 name = "gevent-1.5.0";
657 doCheck = false;
674 doCheck = false;
658 propagatedBuildInputs = [
675 propagatedBuildInputs = [
659 self."greenlet"
676 self."greenlet"
660 ];
677 ];
661 src = fetchurl {
678 src = fetchurl {
662 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
663 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
664 };
681 };
665 meta = {
682 meta = {
666 license = [ pkgs.lib.licenses.mit ];
683 license = [ pkgs.lib.licenses.mit ];
667 };
684 };
668 };
685 };
669 "gnureadline" = super.buildPythonPackage {
686 "gnureadline" = super.buildPythonPackage {
670 name = "gnureadline-6.3.8";
687 name = "gnureadline-6.3.8";
671 doCheck = false;
688 doCheck = false;
672 src = fetchurl {
689 src = fetchurl {
673 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
674 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
675 };
692 };
676 meta = {
693 meta = {
677 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
678 };
695 };
679 };
696 };
680 "gprof2dot" = super.buildPythonPackage {
697 "gprof2dot" = super.buildPythonPackage {
681 name = "gprof2dot-2017.9.19";
698 name = "gprof2dot-2017.9.19";
682 doCheck = false;
699 doCheck = false;
683 src = fetchurl {
700 src = fetchurl {
684 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
685 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
686 };
703 };
687 meta = {
704 meta = {
688 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
689 };
706 };
690 };
707 };
691 "greenlet" = super.buildPythonPackage {
708 "greenlet" = super.buildPythonPackage {
692 name = "greenlet-0.4.15";
709 name = "greenlet-0.4.15";
693 doCheck = false;
710 doCheck = false;
694 src = fetchurl {
711 src = fetchurl {
695 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
696 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
697 };
714 };
698 meta = {
715 meta = {
699 license = [ pkgs.lib.licenses.mit ];
716 license = [ pkgs.lib.licenses.mit ];
700 };
717 };
701 };
718 };
702 "gunicorn" = super.buildPythonPackage {
719 "gunicorn" = super.buildPythonPackage {
703 name = "gunicorn-19.9.0";
720 name = "gunicorn-19.9.0";
704 doCheck = false;
721 doCheck = false;
705 src = fetchurl {
722 src = fetchurl {
706 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
707 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
708 };
725 };
709 meta = {
726 meta = {
710 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
711 };
728 };
712 };
729 };
713 "hupper" = super.buildPythonPackage {
730 "hupper" = super.buildPythonPackage {
714 name = "hupper-1.10.2";
731 name = "hupper-1.10.2";
715 doCheck = false;
732 doCheck = false;
716 src = fetchurl {
733 src = fetchurl {
717 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
718 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
719 };
736 };
720 meta = {
737 meta = {
721 license = [ pkgs.lib.licenses.mit ];
738 license = [ pkgs.lib.licenses.mit ];
722 };
739 };
723 };
740 };
724 "idna" = super.buildPythonPackage {
741 "idna" = super.buildPythonPackage {
725 name = "idna-2.8";
742 name = "idna-2.8";
726 doCheck = false;
743 doCheck = false;
727 src = fetchurl {
744 src = fetchurl {
728 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
729 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
730 };
747 };
731 meta = {
748 meta = {
732 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
733 };
750 };
734 };
751 };
735 "importlib-metadata" = super.buildPythonPackage {
752 "importlib-metadata" = super.buildPythonPackage {
736 name = "importlib-metadata-1.6.0";
753 name = "importlib-metadata-1.6.0";
737 doCheck = false;
754 doCheck = false;
738 propagatedBuildInputs = [
755 propagatedBuildInputs = [
739 self."zipp"
756 self."zipp"
740 self."pathlib2"
757 self."pathlib2"
741 self."contextlib2"
758 self."contextlib2"
742 self."configparser"
759 self."configparser"
743 ];
760 ];
744 src = fetchurl {
761 src = fetchurl {
745 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
746 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
747 };
764 };
748 meta = {
765 meta = {
749 license = [ pkgs.lib.licenses.asl20 ];
766 license = [ pkgs.lib.licenses.asl20 ];
750 };
767 };
751 };
768 };
752 "infrae.cache" = super.buildPythonPackage {
769 "infrae.cache" = super.buildPythonPackage {
753 name = "infrae.cache-1.0.1";
770 name = "infrae.cache-1.0.1";
754 doCheck = false;
771 doCheck = false;
755 propagatedBuildInputs = [
772 propagatedBuildInputs = [
756 self."beaker"
773 self."beaker"
757 self."repoze.lru"
774 self."repoze.lru"
758 ];
775 ];
759 src = fetchurl {
776 src = fetchurl {
760 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
761 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
762 };
779 };
763 meta = {
780 meta = {
764 license = [ pkgs.lib.licenses.zpl21 ];
781 license = [ pkgs.lib.licenses.zpl21 ];
765 };
782 };
766 };
783 };
767 "invoke" = super.buildPythonPackage {
784 "invoke" = super.buildPythonPackage {
768 name = "invoke-0.13.0";
785 name = "invoke-0.13.0";
769 doCheck = false;
786 doCheck = false;
770 src = fetchurl {
787 src = fetchurl {
771 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
772 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
773 };
790 };
774 meta = {
791 meta = {
775 license = [ pkgs.lib.licenses.bsdOriginal ];
792 license = [ pkgs.lib.licenses.bsdOriginal ];
776 };
793 };
777 };
794 };
778 "ipaddress" = super.buildPythonPackage {
795 "ipaddress" = super.buildPythonPackage {
779 name = "ipaddress-1.0.23";
796 name = "ipaddress-1.0.23";
780 doCheck = false;
797 doCheck = false;
781 src = fetchurl {
798 src = fetchurl {
782 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
783 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
784 };
801 };
785 meta = {
802 meta = {
786 license = [ pkgs.lib.licenses.psfl ];
803 license = [ pkgs.lib.licenses.psfl ];
787 };
804 };
788 };
805 };
789 "ipdb" = super.buildPythonPackage {
806 "ipdb" = super.buildPythonPackage {
790 name = "ipdb-0.13.2";
807 name = "ipdb-0.13.2";
791 doCheck = false;
808 doCheck = false;
792 propagatedBuildInputs = [
809 propagatedBuildInputs = [
793 self."setuptools"
810 self."setuptools"
794 self."ipython"
811 self."ipython"
795 ];
812 ];
796 src = fetchurl {
813 src = fetchurl {
797 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
798 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
799 };
816 };
800 meta = {
817 meta = {
801 license = [ pkgs.lib.licenses.bsdOriginal ];
818 license = [ pkgs.lib.licenses.bsdOriginal ];
802 };
819 };
803 };
820 };
804 "ipython" = super.buildPythonPackage {
821 "ipython" = super.buildPythonPackage {
805 name = "ipython-5.1.0";
822 name = "ipython-5.1.0";
806 doCheck = false;
823 doCheck = false;
807 propagatedBuildInputs = [
824 propagatedBuildInputs = [
808 self."setuptools"
825 self."setuptools"
809 self."decorator"
826 self."decorator"
810 self."pickleshare"
827 self."pickleshare"
811 self."simplegeneric"
828 self."simplegeneric"
812 self."traitlets"
829 self."traitlets"
813 self."prompt-toolkit"
830 self."prompt-toolkit"
814 self."pygments"
831 self."pygments"
815 self."pexpect"
832 self."pexpect"
816 self."backports.shutil-get-terminal-size"
833 self."backports.shutil-get-terminal-size"
817 self."pathlib2"
834 self."pathlib2"
818 self."pexpect"
835 self."pexpect"
819 ];
836 ];
820 src = fetchurl {
837 src = fetchurl {
821 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
822 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
823 };
840 };
824 meta = {
841 meta = {
825 license = [ pkgs.lib.licenses.bsdOriginal ];
842 license = [ pkgs.lib.licenses.bsdOriginal ];
826 };
843 };
827 };
844 };
828 "ipython-genutils" = super.buildPythonPackage {
845 "ipython-genutils" = super.buildPythonPackage {
829 name = "ipython-genutils-0.2.0";
846 name = "ipython-genutils-0.2.0";
830 doCheck = false;
847 doCheck = false;
831 src = fetchurl {
848 src = fetchurl {
832 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
833 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
834 };
851 };
835 meta = {
852 meta = {
836 license = [ pkgs.lib.licenses.bsdOriginal ];
853 license = [ pkgs.lib.licenses.bsdOriginal ];
837 };
854 };
838 };
855 };
839 "iso8601" = super.buildPythonPackage {
856 "iso8601" = super.buildPythonPackage {
840 name = "iso8601-0.1.12";
857 name = "iso8601-0.1.12";
841 doCheck = false;
858 doCheck = false;
842 src = fetchurl {
859 src = fetchurl {
843 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
844 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
845 };
862 };
846 meta = {
863 meta = {
847 license = [ pkgs.lib.licenses.mit ];
864 license = [ pkgs.lib.licenses.mit ];
848 };
865 };
849 };
866 };
850 "isodate" = super.buildPythonPackage {
867 "isodate" = super.buildPythonPackage {
851 name = "isodate-0.6.0";
868 name = "isodate-0.6.0";
852 doCheck = false;
869 doCheck = false;
853 propagatedBuildInputs = [
870 propagatedBuildInputs = [
854 self."six"
871 self."six"
855 ];
872 ];
856 src = fetchurl {
873 src = fetchurl {
857 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
858 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
859 };
876 };
860 meta = {
877 meta = {
861 license = [ pkgs.lib.licenses.bsdOriginal ];
878 license = [ pkgs.lib.licenses.bsdOriginal ];
862 };
879 };
863 };
880 };
864 "itsdangerous" = super.buildPythonPackage {
881 "itsdangerous" = super.buildPythonPackage {
865 name = "itsdangerous-0.24";
882 name = "itsdangerous-1.1.0";
866 doCheck = false;
883 doCheck = false;
867 src = fetchurl {
884 src = fetchurl {
868 url = "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
869 sha256 = "06856q6x675ly542ig0plbqcyab6ksfzijlyf1hzhgg3sgwgrcyb";
886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
870 };
887 };
871 meta = {
888 meta = {
872 license = [ pkgs.lib.licenses.bsdOriginal ];
889 license = [ pkgs.lib.licenses.bsdOriginal ];
873 };
890 };
874 };
891 };
875 "jinja2" = super.buildPythonPackage {
892 "jinja2" = super.buildPythonPackage {
876 name = "jinja2-2.9.6";
893 name = "jinja2-2.9.6";
877 doCheck = false;
894 doCheck = false;
878 propagatedBuildInputs = [
895 propagatedBuildInputs = [
879 self."markupsafe"
896 self."markupsafe"
880 ];
897 ];
881 src = fetchurl {
898 src = fetchurl {
882 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
883 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
884 };
901 };
885 meta = {
902 meta = {
886 license = [ pkgs.lib.licenses.bsdOriginal ];
903 license = [ pkgs.lib.licenses.bsdOriginal ];
887 };
904 };
888 };
905 };
889 "jsonschema" = super.buildPythonPackage {
906 "jsonschema" = super.buildPythonPackage {
890 name = "jsonschema-2.6.0";
907 name = "jsonschema-2.6.0";
891 doCheck = false;
908 doCheck = false;
892 propagatedBuildInputs = [
909 propagatedBuildInputs = [
893 self."functools32"
910 self."functools32"
894 ];
911 ];
895 src = fetchurl {
912 src = fetchurl {
896 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
897 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
898 };
915 };
899 meta = {
916 meta = {
900 license = [ pkgs.lib.licenses.mit ];
917 license = [ pkgs.lib.licenses.mit ];
901 };
918 };
902 };
919 };
903 "jupyter-client" = super.buildPythonPackage {
920 "jupyter-client" = super.buildPythonPackage {
904 name = "jupyter-client-5.0.0";
921 name = "jupyter-client-5.0.0";
905 doCheck = false;
922 doCheck = false;
906 propagatedBuildInputs = [
923 propagatedBuildInputs = [
907 self."traitlets"
924 self."traitlets"
908 self."jupyter-core"
925 self."jupyter-core"
909 self."pyzmq"
926 self."pyzmq"
910 self."python-dateutil"
927 self."python-dateutil"
911 ];
928 ];
912 src = fetchurl {
929 src = fetchurl {
913 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
914 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
915 };
932 };
916 meta = {
933 meta = {
917 license = [ pkgs.lib.licenses.bsdOriginal ];
934 license = [ pkgs.lib.licenses.bsdOriginal ];
918 };
935 };
919 };
936 };
920 "jupyter-core" = super.buildPythonPackage {
937 "jupyter-core" = super.buildPythonPackage {
921 name = "jupyter-core-4.5.0";
938 name = "jupyter-core-4.5.0";
922 doCheck = false;
939 doCheck = false;
923 propagatedBuildInputs = [
940 propagatedBuildInputs = [
924 self."traitlets"
941 self."traitlets"
925 ];
942 ];
926 src = fetchurl {
943 src = fetchurl {
927 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
928 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
929 };
946 };
930 meta = {
947 meta = {
931 license = [ pkgs.lib.licenses.bsdOriginal ];
948 license = [ pkgs.lib.licenses.bsdOriginal ];
932 };
949 };
933 };
950 };
934 "kombu" = super.buildPythonPackage {
951 "kombu" = super.buildPythonPackage {
935 name = "kombu-4.6.6";
952 name = "kombu-4.6.6";
936 doCheck = false;
953 doCheck = false;
937 propagatedBuildInputs = [
954 propagatedBuildInputs = [
938 self."amqp"
955 self."amqp"
939 self."importlib-metadata"
956 self."importlib-metadata"
940 ];
957 ];
941 src = fetchurl {
958 src = fetchurl {
942 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
943 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
944 };
961 };
945 meta = {
962 meta = {
946 license = [ pkgs.lib.licenses.bsdOriginal ];
963 license = [ pkgs.lib.licenses.bsdOriginal ];
947 };
964 };
948 };
965 };
949 "lxml" = super.buildPythonPackage {
966 "lxml" = super.buildPythonPackage {
950 name = "lxml-4.2.5";
967 name = "lxml-4.2.5";
951 doCheck = false;
968 doCheck = false;
952 src = fetchurl {
969 src = fetchurl {
953 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
954 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
955 };
972 };
956 meta = {
973 meta = {
957 license = [ pkgs.lib.licenses.bsdOriginal ];
974 license = [ pkgs.lib.licenses.bsdOriginal ];
958 };
975 };
959 };
976 };
960 "mako" = super.buildPythonPackage {
977 "mako" = super.buildPythonPackage {
961 name = "mako-1.1.0";
978 name = "mako-1.1.0";
962 doCheck = false;
979 doCheck = false;
963 propagatedBuildInputs = [
980 propagatedBuildInputs = [
964 self."markupsafe"
981 self."markupsafe"
965 ];
982 ];
966 src = fetchurl {
983 src = fetchurl {
967 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
968 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
969 };
986 };
970 meta = {
987 meta = {
971 license = [ pkgs.lib.licenses.mit ];
988 license = [ pkgs.lib.licenses.mit ];
972 };
989 };
973 };
990 };
974 "markdown" = super.buildPythonPackage {
991 "markdown" = super.buildPythonPackage {
975 name = "markdown-2.6.11";
992 name = "markdown-2.6.11";
976 doCheck = false;
993 doCheck = false;
977 src = fetchurl {
994 src = fetchurl {
978 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
979 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
980 };
997 };
981 meta = {
998 meta = {
982 license = [ pkgs.lib.licenses.bsdOriginal ];
999 license = [ pkgs.lib.licenses.bsdOriginal ];
983 };
1000 };
984 };
1001 };
985 "markupsafe" = super.buildPythonPackage {
1002 "markupsafe" = super.buildPythonPackage {
986 name = "markupsafe-1.1.1";
1003 name = "markupsafe-1.1.1";
987 doCheck = false;
1004 doCheck = false;
988 src = fetchurl {
1005 src = fetchurl {
989 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
990 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
991 };
1008 };
992 meta = {
1009 meta = {
993 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
994 };
1011 };
995 };
1012 };
1013 "marshmallow" = super.buildPythonPackage {
1014 name = "marshmallow-2.18.0";
1015 doCheck = false;
1016 src = fetchurl {
1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1019 };
1020 meta = {
1021 license = [ pkgs.lib.licenses.mit ];
1022 };
1023 };
996 "mistune" = super.buildPythonPackage {
1024 "mistune" = super.buildPythonPackage {
997 name = "mistune-0.8.4";
1025 name = "mistune-0.8.4";
998 doCheck = false;
1026 doCheck = false;
999 src = fetchurl {
1027 src = fetchurl {
1000 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1001 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1002 };
1030 };
1003 meta = {
1031 meta = {
1004 license = [ pkgs.lib.licenses.bsdOriginal ];
1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1005 };
1033 };
1006 };
1034 };
1007 "mock" = super.buildPythonPackage {
1035 "mock" = super.buildPythonPackage {
1008 name = "mock-3.0.5";
1036 name = "mock-3.0.5";
1009 doCheck = false;
1037 doCheck = false;
1010 propagatedBuildInputs = [
1038 propagatedBuildInputs = [
1011 self."six"
1039 self."six"
1012 self."funcsigs"
1040 self."funcsigs"
1013 ];
1041 ];
1014 src = fetchurl {
1042 src = fetchurl {
1015 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1016 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1017 };
1045 };
1018 meta = {
1046 meta = {
1019 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1020 };
1048 };
1021 };
1049 };
1022 "more-itertools" = super.buildPythonPackage {
1050 "more-itertools" = super.buildPythonPackage {
1023 name = "more-itertools-5.0.0";
1051 name = "more-itertools-5.0.0";
1024 doCheck = false;
1052 doCheck = false;
1025 propagatedBuildInputs = [
1053 propagatedBuildInputs = [
1026 self."six"
1054 self."six"
1027 ];
1055 ];
1028 src = fetchurl {
1056 src = fetchurl {
1029 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1030 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1031 };
1059 };
1032 meta = {
1060 meta = {
1033 license = [ pkgs.lib.licenses.mit ];
1061 license = [ pkgs.lib.licenses.mit ];
1034 };
1062 };
1035 };
1063 };
1036 "msgpack-python" = super.buildPythonPackage {
1064 "msgpack-python" = super.buildPythonPackage {
1037 name = "msgpack-python-0.5.6";
1065 name = "msgpack-python-0.5.6";
1038 doCheck = false;
1066 doCheck = false;
1039 src = fetchurl {
1067 src = fetchurl {
1040 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1041 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1042 };
1070 };
1043 meta = {
1071 meta = {
1044 license = [ pkgs.lib.licenses.asl20 ];
1072 license = [ pkgs.lib.licenses.asl20 ];
1045 };
1073 };
1046 };
1074 };
1047 "mysql-python" = super.buildPythonPackage {
1075 "mysql-python" = super.buildPythonPackage {
1048 name = "mysql-python-1.2.5";
1076 name = "mysql-python-1.2.5";
1049 doCheck = false;
1077 doCheck = false;
1050 src = fetchurl {
1078 src = fetchurl {
1051 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1052 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1053 };
1081 };
1054 meta = {
1082 meta = {
1055 license = [ pkgs.lib.licenses.gpl1 ];
1083 license = [ pkgs.lib.licenses.gpl1 ];
1056 };
1084 };
1057 };
1085 };
1058 "nbconvert" = super.buildPythonPackage {
1086 "nbconvert" = super.buildPythonPackage {
1059 name = "nbconvert-5.3.1";
1087 name = "nbconvert-5.3.1";
1060 doCheck = false;
1088 doCheck = false;
1061 propagatedBuildInputs = [
1089 propagatedBuildInputs = [
1062 self."mistune"
1090 self."mistune"
1063 self."jinja2"
1091 self."jinja2"
1064 self."pygments"
1092 self."pygments"
1065 self."traitlets"
1093 self."traitlets"
1066 self."jupyter-core"
1094 self."jupyter-core"
1067 self."nbformat"
1095 self."nbformat"
1068 self."entrypoints"
1096 self."entrypoints"
1069 self."bleach"
1097 self."bleach"
1070 self."pandocfilters"
1098 self."pandocfilters"
1071 self."testpath"
1099 self."testpath"
1072 ];
1100 ];
1073 src = fetchurl {
1101 src = fetchurl {
1074 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1075 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1076 };
1104 };
1077 meta = {
1105 meta = {
1078 license = [ pkgs.lib.licenses.bsdOriginal ];
1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1079 };
1107 };
1080 };
1108 };
1081 "nbformat" = super.buildPythonPackage {
1109 "nbformat" = super.buildPythonPackage {
1082 name = "nbformat-4.4.0";
1110 name = "nbformat-4.4.0";
1083 doCheck = false;
1111 doCheck = false;
1084 propagatedBuildInputs = [
1112 propagatedBuildInputs = [
1085 self."ipython-genutils"
1113 self."ipython-genutils"
1086 self."traitlets"
1114 self."traitlets"
1087 self."jsonschema"
1115 self."jsonschema"
1088 self."jupyter-core"
1116 self."jupyter-core"
1089 ];
1117 ];
1090 src = fetchurl {
1118 src = fetchurl {
1091 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1092 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1093 };
1121 };
1094 meta = {
1122 meta = {
1095 license = [ pkgs.lib.licenses.bsdOriginal ];
1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1096 };
1124 };
1097 };
1125 };
1098 "packaging" = super.buildPythonPackage {
1126 "packaging" = super.buildPythonPackage {
1099 name = "packaging-20.3";
1127 name = "packaging-20.3";
1100 doCheck = false;
1128 doCheck = false;
1101 propagatedBuildInputs = [
1129 propagatedBuildInputs = [
1102 self."pyparsing"
1130 self."pyparsing"
1103 self."six"
1131 self."six"
1104 ];
1132 ];
1105 src = fetchurl {
1133 src = fetchurl {
1106 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1107 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1108 };
1136 };
1109 meta = {
1137 meta = {
1110 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1111 };
1139 };
1112 };
1140 };
1113 "pandocfilters" = super.buildPythonPackage {
1141 "pandocfilters" = super.buildPythonPackage {
1114 name = "pandocfilters-1.4.2";
1142 name = "pandocfilters-1.4.2";
1115 doCheck = false;
1143 doCheck = false;
1116 src = fetchurl {
1144 src = fetchurl {
1117 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1118 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1119 };
1147 };
1120 meta = {
1148 meta = {
1121 license = [ pkgs.lib.licenses.bsdOriginal ];
1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1122 };
1150 };
1123 };
1151 };
1124 "paste" = super.buildPythonPackage {
1152 "paste" = super.buildPythonPackage {
1125 name = "paste-3.4.0";
1153 name = "paste-3.4.0";
1126 doCheck = false;
1154 doCheck = false;
1127 propagatedBuildInputs = [
1155 propagatedBuildInputs = [
1128 self."six"
1156 self."six"
1129 ];
1157 ];
1130 src = fetchurl {
1158 src = fetchurl {
1131 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1132 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1133 };
1161 };
1134 meta = {
1162 meta = {
1135 license = [ pkgs.lib.licenses.mit ];
1163 license = [ pkgs.lib.licenses.mit ];
1136 };
1164 };
1137 };
1165 };
1138 "pastedeploy" = super.buildPythonPackage {
1166 "pastedeploy" = super.buildPythonPackage {
1139 name = "pastedeploy-2.1.0";
1167 name = "pastedeploy-2.1.0";
1140 doCheck = false;
1168 doCheck = false;
1141 src = fetchurl {
1169 src = fetchurl {
1142 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1143 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1144 };
1172 };
1145 meta = {
1173 meta = {
1146 license = [ pkgs.lib.licenses.mit ];
1174 license = [ pkgs.lib.licenses.mit ];
1147 };
1175 };
1148 };
1176 };
1149 "pastescript" = super.buildPythonPackage {
1177 "pastescript" = super.buildPythonPackage {
1150 name = "pastescript-3.2.0";
1178 name = "pastescript-3.2.0";
1151 doCheck = false;
1179 doCheck = false;
1152 propagatedBuildInputs = [
1180 propagatedBuildInputs = [
1153 self."paste"
1181 self."paste"
1154 self."pastedeploy"
1182 self."pastedeploy"
1155 self."six"
1183 self."six"
1156 ];
1184 ];
1157 src = fetchurl {
1185 src = fetchurl {
1158 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1159 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1160 };
1188 };
1161 meta = {
1189 meta = {
1162 license = [ pkgs.lib.licenses.mit ];
1190 license = [ pkgs.lib.licenses.mit ];
1163 };
1191 };
1164 };
1192 };
1165 "pathlib2" = super.buildPythonPackage {
1193 "pathlib2" = super.buildPythonPackage {
1166 name = "pathlib2-2.3.5";
1194 name = "pathlib2-2.3.5";
1167 doCheck = false;
1195 doCheck = false;
1168 propagatedBuildInputs = [
1196 propagatedBuildInputs = [
1169 self."six"
1197 self."six"
1170 self."scandir"
1198 self."scandir"
1171 ];
1199 ];
1172 src = fetchurl {
1200 src = fetchurl {
1173 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1174 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1175 };
1203 };
1176 meta = {
1204 meta = {
1177 license = [ pkgs.lib.licenses.mit ];
1205 license = [ pkgs.lib.licenses.mit ];
1178 };
1206 };
1179 };
1207 };
1180 "peppercorn" = super.buildPythonPackage {
1208 "peppercorn" = super.buildPythonPackage {
1181 name = "peppercorn-0.6";
1209 name = "peppercorn-0.6";
1182 doCheck = false;
1210 doCheck = false;
1183 src = fetchurl {
1211 src = fetchurl {
1184 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1185 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1186 };
1214 };
1187 meta = {
1215 meta = {
1188 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1189 };
1217 };
1190 };
1218 };
1191 "pexpect" = super.buildPythonPackage {
1219 "pexpect" = super.buildPythonPackage {
1192 name = "pexpect-4.8.0";
1220 name = "pexpect-4.8.0";
1193 doCheck = false;
1221 doCheck = false;
1194 propagatedBuildInputs = [
1222 propagatedBuildInputs = [
1195 self."ptyprocess"
1223 self."ptyprocess"
1196 ];
1224 ];
1197 src = fetchurl {
1225 src = fetchurl {
1198 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1199 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1200 };
1228 };
1201 meta = {
1229 meta = {
1202 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1203 };
1231 };
1204 };
1232 };
1205 "pickleshare" = super.buildPythonPackage {
1233 "pickleshare" = super.buildPythonPackage {
1206 name = "pickleshare-0.7.5";
1234 name = "pickleshare-0.7.5";
1207 doCheck = false;
1235 doCheck = false;
1208 propagatedBuildInputs = [
1236 propagatedBuildInputs = [
1209 self."pathlib2"
1237 self."pathlib2"
1210 ];
1238 ];
1211 src = fetchurl {
1239 src = fetchurl {
1212 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1213 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1214 };
1242 };
1215 meta = {
1243 meta = {
1216 license = [ pkgs.lib.licenses.mit ];
1244 license = [ pkgs.lib.licenses.mit ];
1217 };
1245 };
1218 };
1246 };
1219 "plaster" = super.buildPythonPackage {
1247 "plaster" = super.buildPythonPackage {
1220 name = "plaster-1.0";
1248 name = "plaster-1.0";
1221 doCheck = false;
1249 doCheck = false;
1222 propagatedBuildInputs = [
1250 propagatedBuildInputs = [
1223 self."setuptools"
1251 self."setuptools"
1224 ];
1252 ];
1225 src = fetchurl {
1253 src = fetchurl {
1226 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1227 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1228 };
1256 };
1229 meta = {
1257 meta = {
1230 license = [ pkgs.lib.licenses.mit ];
1258 license = [ pkgs.lib.licenses.mit ];
1231 };
1259 };
1232 };
1260 };
1233 "plaster-pastedeploy" = super.buildPythonPackage {
1261 "plaster-pastedeploy" = super.buildPythonPackage {
1234 name = "plaster-pastedeploy-0.7";
1262 name = "plaster-pastedeploy-0.7";
1235 doCheck = false;
1263 doCheck = false;
1236 propagatedBuildInputs = [
1264 propagatedBuildInputs = [
1237 self."pastedeploy"
1265 self."pastedeploy"
1238 self."plaster"
1266 self."plaster"
1239 ];
1267 ];
1240 src = fetchurl {
1268 src = fetchurl {
1241 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1242 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1243 };
1271 };
1244 meta = {
1272 meta = {
1245 license = [ pkgs.lib.licenses.mit ];
1273 license = [ pkgs.lib.licenses.mit ];
1246 };
1274 };
1247 };
1275 };
1248 "pluggy" = super.buildPythonPackage {
1276 "pluggy" = super.buildPythonPackage {
1249 name = "pluggy-0.13.1";
1277 name = "pluggy-0.13.1";
1250 doCheck = false;
1278 doCheck = false;
1251 propagatedBuildInputs = [
1279 propagatedBuildInputs = [
1252 self."importlib-metadata"
1280 self."importlib-metadata"
1253 ];
1281 ];
1254 src = fetchurl {
1282 src = fetchurl {
1255 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1256 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1257 };
1285 };
1258 meta = {
1286 meta = {
1259 license = [ pkgs.lib.licenses.mit ];
1287 license = [ pkgs.lib.licenses.mit ];
1260 };
1288 };
1261 };
1289 };
1262 "premailer" = super.buildPythonPackage {
1290 "premailer" = super.buildPythonPackage {
1263 name = "premailer-3.6.1";
1291 name = "premailer-3.6.1";
1264 doCheck = false;
1292 doCheck = false;
1265 propagatedBuildInputs = [
1293 propagatedBuildInputs = [
1266 self."lxml"
1294 self."lxml"
1267 self."cssselect"
1295 self."cssselect"
1268 self."cssutils"
1296 self."cssutils"
1269 self."requests"
1297 self."requests"
1270 self."cachetools"
1298 self."cachetools"
1271 ];
1299 ];
1272 src = fetchurl {
1300 src = fetchurl {
1273 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1274 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1275 };
1303 };
1276 meta = {
1304 meta = {
1277 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1278 };
1306 };
1279 };
1307 };
1280 "prompt-toolkit" = super.buildPythonPackage {
1308 "prompt-toolkit" = super.buildPythonPackage {
1281 name = "prompt-toolkit-1.0.18";
1309 name = "prompt-toolkit-1.0.18";
1282 doCheck = false;
1310 doCheck = false;
1283 propagatedBuildInputs = [
1311 propagatedBuildInputs = [
1284 self."six"
1312 self."six"
1285 self."wcwidth"
1313 self."wcwidth"
1286 ];
1314 ];
1287 src = fetchurl {
1315 src = fetchurl {
1288 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1289 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1290 };
1318 };
1291 meta = {
1319 meta = {
1292 license = [ pkgs.lib.licenses.bsdOriginal ];
1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1293 };
1321 };
1294 };
1322 };
1295 "psutil" = super.buildPythonPackage {
1323 "psutil" = super.buildPythonPackage {
1296 name = "psutil-5.7.0";
1324 name = "psutil-5.7.0";
1297 doCheck = false;
1325 doCheck = false;
1298 src = fetchurl {
1326 src = fetchurl {
1299 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1300 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1301 };
1329 };
1302 meta = {
1330 meta = {
1303 license = [ pkgs.lib.licenses.bsdOriginal ];
1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1304 };
1332 };
1305 };
1333 };
1306 "psycopg2" = super.buildPythonPackage {
1334 "psycopg2" = super.buildPythonPackage {
1307 name = "psycopg2-2.8.4";
1335 name = "psycopg2-2.8.4";
1308 doCheck = false;
1336 doCheck = false;
1309 src = fetchurl {
1337 src = fetchurl {
1310 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1311 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1312 };
1340 };
1313 meta = {
1341 meta = {
1314 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1315 };
1343 };
1316 };
1344 };
1317 "ptyprocess" = super.buildPythonPackage {
1345 "ptyprocess" = super.buildPythonPackage {
1318 name = "ptyprocess-0.6.0";
1346 name = "ptyprocess-0.6.0";
1319 doCheck = false;
1347 doCheck = false;
1320 src = fetchurl {
1348 src = fetchurl {
1321 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1322 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1323 };
1351 };
1324 meta = {
1352 meta = {
1325 license = [ ];
1353 license = [ ];
1326 };
1354 };
1327 };
1355 };
1328 "py" = super.buildPythonPackage {
1356 "py" = super.buildPythonPackage {
1329 name = "py-1.8.0";
1357 name = "py-1.8.0";
1330 doCheck = false;
1358 doCheck = false;
1331 src = fetchurl {
1359 src = fetchurl {
1332 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1333 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1334 };
1362 };
1335 meta = {
1363 meta = {
1336 license = [ pkgs.lib.licenses.mit ];
1364 license = [ pkgs.lib.licenses.mit ];
1337 };
1365 };
1338 };
1366 };
1339 "py-bcrypt" = super.buildPythonPackage {
1367 "py-bcrypt" = super.buildPythonPackage {
1340 name = "py-bcrypt-0.4";
1368 name = "py-bcrypt-0.4";
1341 doCheck = false;
1369 doCheck = false;
1342 src = fetchurl {
1370 src = fetchurl {
1343 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1344 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1345 };
1373 };
1346 meta = {
1374 meta = {
1347 license = [ pkgs.lib.licenses.bsdOriginal ];
1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1348 };
1376 };
1349 };
1377 };
1350 "py-gfm" = super.buildPythonPackage {
1378 "py-gfm" = super.buildPythonPackage {
1351 name = "py-gfm-0.1.4";
1379 name = "py-gfm-0.1.4";
1352 doCheck = false;
1380 doCheck = false;
1353 propagatedBuildInputs = [
1381 propagatedBuildInputs = [
1354 self."setuptools"
1382 self."setuptools"
1355 self."markdown"
1383 self."markdown"
1356 ];
1384 ];
1357 src = fetchurl {
1385 src = fetchurl {
1358 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1359 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1360 };
1388 };
1361 meta = {
1389 meta = {
1362 license = [ pkgs.lib.licenses.bsdOriginal ];
1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1363 };
1391 };
1364 };
1392 };
1365 "pyasn1" = super.buildPythonPackage {
1393 "pyasn1" = super.buildPythonPackage {
1366 name = "pyasn1-0.4.8";
1394 name = "pyasn1-0.4.8";
1367 doCheck = false;
1395 doCheck = false;
1368 src = fetchurl {
1396 src = fetchurl {
1369 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1370 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1371 };
1399 };
1372 meta = {
1400 meta = {
1373 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1374 };
1402 };
1375 };
1403 };
1376 "pyasn1-modules" = super.buildPythonPackage {
1404 "pyasn1-modules" = super.buildPythonPackage {
1377 name = "pyasn1-modules-0.2.6";
1405 name = "pyasn1-modules-0.2.6";
1378 doCheck = false;
1406 doCheck = false;
1379 propagatedBuildInputs = [
1407 propagatedBuildInputs = [
1380 self."pyasn1"
1408 self."pyasn1"
1381 ];
1409 ];
1382 src = fetchurl {
1410 src = fetchurl {
1383 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1384 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1385 };
1413 };
1386 meta = {
1414 meta = {
1387 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1388 };
1416 };
1389 };
1417 };
1390 "pycparser" = super.buildPythonPackage {
1418 "pycparser" = super.buildPythonPackage {
1391 name = "pycparser-2.20";
1419 name = "pycparser-2.20";
1392 doCheck = false;
1420 doCheck = false;
1393 src = fetchurl {
1421 src = fetchurl {
1394 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1395 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1396 };
1424 };
1397 meta = {
1425 meta = {
1398 license = [ pkgs.lib.licenses.bsdOriginal ];
1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1399 };
1427 };
1400 };
1428 };
1401 "pycrypto" = super.buildPythonPackage {
1429 "pycrypto" = super.buildPythonPackage {
1402 name = "pycrypto-2.6.1";
1430 name = "pycrypto-2.6.1";
1403 doCheck = false;
1431 doCheck = false;
1404 src = fetchurl {
1432 src = fetchurl {
1405 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1406 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1407 };
1435 };
1408 meta = {
1436 meta = {
1409 license = [ pkgs.lib.licenses.publicDomain ];
1437 license = [ pkgs.lib.licenses.publicDomain ];
1410 };
1438 };
1411 };
1439 };
1412 "pycurl" = super.buildPythonPackage {
1440 "pycurl" = super.buildPythonPackage {
1413 name = "pycurl-7.43.0.3";
1441 name = "pycurl-7.43.0.3";
1414 doCheck = false;
1442 doCheck = false;
1415 src = fetchurl {
1443 src = fetchurl {
1416 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1417 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1418 };
1446 };
1419 meta = {
1447 meta = {
1420 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1421 };
1449 };
1422 };
1450 };
1423 "pygments" = super.buildPythonPackage {
1451 "pygments" = super.buildPythonPackage {
1424 name = "pygments-2.4.2";
1452 name = "pygments-2.4.2";
1425 doCheck = false;
1453 doCheck = false;
1426 src = fetchurl {
1454 src = fetchurl {
1427 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1428 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1429 };
1457 };
1430 meta = {
1458 meta = {
1431 license = [ pkgs.lib.licenses.bsdOriginal ];
1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1432 };
1460 };
1433 };
1461 };
1434 "pymysql" = super.buildPythonPackage {
1462 "pymysql" = super.buildPythonPackage {
1435 name = "pymysql-0.8.1";
1463 name = "pymysql-0.8.1";
1436 doCheck = false;
1464 doCheck = false;
1437 src = fetchurl {
1465 src = fetchurl {
1438 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1439 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1440 };
1468 };
1441 meta = {
1469 meta = {
1442 license = [ pkgs.lib.licenses.mit ];
1470 license = [ pkgs.lib.licenses.mit ];
1443 };
1471 };
1444 };
1472 };
1445 "pyotp" = super.buildPythonPackage {
1473 "pyotp" = super.buildPythonPackage {
1446 name = "pyotp-2.3.0";
1474 name = "pyotp-2.3.0";
1447 doCheck = false;
1475 doCheck = false;
1448 src = fetchurl {
1476 src = fetchurl {
1449 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1450 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1451 };
1479 };
1452 meta = {
1480 meta = {
1453 license = [ pkgs.lib.licenses.mit ];
1481 license = [ pkgs.lib.licenses.mit ];
1454 };
1482 };
1455 };
1483 };
1456 "pyparsing" = super.buildPythonPackage {
1484 "pyparsing" = super.buildPythonPackage {
1457 name = "pyparsing-2.4.7";
1485 name = "pyparsing-2.4.7";
1458 doCheck = false;
1486 doCheck = false;
1459 src = fetchurl {
1487 src = fetchurl {
1460 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1461 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1462 };
1490 };
1463 meta = {
1491 meta = {
1464 license = [ pkgs.lib.licenses.mit ];
1492 license = [ pkgs.lib.licenses.mit ];
1465 };
1493 };
1466 };
1494 };
1467 "pyramid" = super.buildPythonPackage {
1495 "pyramid" = super.buildPythonPackage {
1468 name = "pyramid-1.10.4";
1496 name = "pyramid-1.10.4";
1469 doCheck = false;
1497 doCheck = false;
1470 propagatedBuildInputs = [
1498 propagatedBuildInputs = [
1471 self."hupper"
1499 self."hupper"
1472 self."plaster"
1500 self."plaster"
1473 self."plaster-pastedeploy"
1501 self."plaster-pastedeploy"
1474 self."setuptools"
1502 self."setuptools"
1475 self."translationstring"
1503 self."translationstring"
1476 self."venusian"
1504 self."venusian"
1477 self."webob"
1505 self."webob"
1478 self."zope.deprecation"
1506 self."zope.deprecation"
1479 self."zope.interface"
1507 self."zope.interface"
1480 self."repoze.lru"
1508 self."repoze.lru"
1481 ];
1509 ];
1482 src = fetchurl {
1510 src = fetchurl {
1483 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1484 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1485 };
1513 };
1486 meta = {
1514 meta = {
1487 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1488 };
1516 };
1489 };
1517 };
1490 "pyramid-debugtoolbar" = super.buildPythonPackage {
1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1491 name = "pyramid-debugtoolbar-4.6.1";
1519 name = "pyramid-debugtoolbar-4.6.1";
1492 doCheck = false;
1520 doCheck = false;
1493 propagatedBuildInputs = [
1521 propagatedBuildInputs = [
1494 self."pyramid"
1522 self."pyramid"
1495 self."pyramid-mako"
1523 self."pyramid-mako"
1496 self."repoze.lru"
1524 self."repoze.lru"
1497 self."pygments"
1525 self."pygments"
1498 self."ipaddress"
1526 self."ipaddress"
1499 ];
1527 ];
1500 src = fetchurl {
1528 src = fetchurl {
1501 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1502 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1503 };
1531 };
1504 meta = {
1532 meta = {
1505 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1506 };
1534 };
1507 };
1535 };
1508 "pyramid-jinja2" = super.buildPythonPackage {
1536 "pyramid-jinja2" = super.buildPythonPackage {
1509 name = "pyramid-jinja2-2.7";
1537 name = "pyramid-jinja2-2.7";
1510 doCheck = false;
1538 doCheck = false;
1511 propagatedBuildInputs = [
1539 propagatedBuildInputs = [
1512 self."pyramid"
1540 self."pyramid"
1513 self."zope.deprecation"
1541 self."zope.deprecation"
1514 self."jinja2"
1542 self."jinja2"
1515 self."markupsafe"
1543 self."markupsafe"
1516 ];
1544 ];
1517 src = fetchurl {
1545 src = fetchurl {
1518 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1519 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1520 };
1548 };
1521 meta = {
1549 meta = {
1522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1523 };
1551 };
1524 };
1552 };
1553 "pyramid-apispec" = super.buildPythonPackage {
1554 name = "pyramid-apispec-0.3.2";
1555 doCheck = false;
1556 propagatedBuildInputs = [
1557 self."apispec"
1558 ];
1559 src = fetchurl {
1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1562 };
1563 meta = {
1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1565 };
1566 };
1525 "pyramid-mailer" = super.buildPythonPackage {
1567 "pyramid-mailer" = super.buildPythonPackage {
1526 name = "pyramid-mailer-0.15.1";
1568 name = "pyramid-mailer-0.15.1";
1527 doCheck = false;
1569 doCheck = false;
1528 propagatedBuildInputs = [
1570 propagatedBuildInputs = [
1529 self."pyramid"
1571 self."pyramid"
1530 self."repoze.sendmail"
1572 self."repoze.sendmail"
1531 self."transaction"
1573 self."transaction"
1532 ];
1574 ];
1533 src = fetchurl {
1575 src = fetchurl {
1534 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1535 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1536 };
1578 };
1537 meta = {
1579 meta = {
1538 license = [ pkgs.lib.licenses.bsdOriginal ];
1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1539 };
1581 };
1540 };
1582 };
1541 "pyramid-mako" = super.buildPythonPackage {
1583 "pyramid-mako" = super.buildPythonPackage {
1542 name = "pyramid-mako-1.1.0";
1584 name = "pyramid-mako-1.1.0";
1543 doCheck = false;
1585 doCheck = false;
1544 propagatedBuildInputs = [
1586 propagatedBuildInputs = [
1545 self."pyramid"
1587 self."pyramid"
1546 self."mako"
1588 self."mako"
1547 ];
1589 ];
1548 src = fetchurl {
1590 src = fetchurl {
1549 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1550 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1551 };
1593 };
1552 meta = {
1594 meta = {
1553 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1554 };
1596 };
1555 };
1597 };
1556 "pysqlite" = super.buildPythonPackage {
1598 "pysqlite" = super.buildPythonPackage {
1557 name = "pysqlite-2.8.3";
1599 name = "pysqlite-2.8.3";
1558 doCheck = false;
1600 doCheck = false;
1559 src = fetchurl {
1601 src = fetchurl {
1560 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1561 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1562 };
1604 };
1563 meta = {
1605 meta = {
1564 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1565 };
1607 };
1566 };
1608 };
1567 "pytest" = super.buildPythonPackage {
1609 "pytest" = super.buildPythonPackage {
1568 name = "pytest-4.6.5";
1610 name = "pytest-4.6.5";
1569 doCheck = false;
1611 doCheck = false;
1570 propagatedBuildInputs = [
1612 propagatedBuildInputs = [
1571 self."py"
1613 self."py"
1572 self."six"
1614 self."six"
1573 self."packaging"
1615 self."packaging"
1574 self."attrs"
1616 self."attrs"
1575 self."atomicwrites"
1617 self."atomicwrites"
1576 self."pluggy"
1618 self."pluggy"
1577 self."importlib-metadata"
1619 self."importlib-metadata"
1578 self."wcwidth"
1620 self."wcwidth"
1579 self."funcsigs"
1621 self."funcsigs"
1580 self."pathlib2"
1622 self."pathlib2"
1581 self."more-itertools"
1623 self."more-itertools"
1582 ];
1624 ];
1583 src = fetchurl {
1625 src = fetchurl {
1584 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1585 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1586 };
1628 };
1587 meta = {
1629 meta = {
1588 license = [ pkgs.lib.licenses.mit ];
1630 license = [ pkgs.lib.licenses.mit ];
1589 };
1631 };
1590 };
1632 };
1591 "pytest-cov" = super.buildPythonPackage {
1633 "pytest-cov" = super.buildPythonPackage {
1592 name = "pytest-cov-2.7.1";
1634 name = "pytest-cov-2.7.1";
1593 doCheck = false;
1635 doCheck = false;
1594 propagatedBuildInputs = [
1636 propagatedBuildInputs = [
1595 self."pytest"
1637 self."pytest"
1596 self."coverage"
1638 self."coverage"
1597 ];
1639 ];
1598 src = fetchurl {
1640 src = fetchurl {
1599 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1600 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1601 };
1643 };
1602 meta = {
1644 meta = {
1603 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1604 };
1646 };
1605 };
1647 };
1606 "pytest-profiling" = super.buildPythonPackage {
1648 "pytest-profiling" = super.buildPythonPackage {
1607 name = "pytest-profiling-1.7.0";
1649 name = "pytest-profiling-1.7.0";
1608 doCheck = false;
1650 doCheck = false;
1609 propagatedBuildInputs = [
1651 propagatedBuildInputs = [
1610 self."six"
1652 self."six"
1611 self."pytest"
1653 self."pytest"
1612 self."gprof2dot"
1654 self."gprof2dot"
1613 ];
1655 ];
1614 src = fetchurl {
1656 src = fetchurl {
1615 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1616 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1617 };
1659 };
1618 meta = {
1660 meta = {
1619 license = [ pkgs.lib.licenses.mit ];
1661 license = [ pkgs.lib.licenses.mit ];
1620 };
1662 };
1621 };
1663 };
1622 "pytest-runner" = super.buildPythonPackage {
1664 "pytest-runner" = super.buildPythonPackage {
1623 name = "pytest-runner-5.1";
1665 name = "pytest-runner-5.1";
1624 doCheck = false;
1666 doCheck = false;
1625 src = fetchurl {
1667 src = fetchurl {
1626 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1627 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1628 };
1670 };
1629 meta = {
1671 meta = {
1630 license = [ pkgs.lib.licenses.mit ];
1672 license = [ pkgs.lib.licenses.mit ];
1631 };
1673 };
1632 };
1674 };
1633 "pytest-sugar" = super.buildPythonPackage {
1675 "pytest-sugar" = super.buildPythonPackage {
1634 name = "pytest-sugar-0.9.2";
1676 name = "pytest-sugar-0.9.2";
1635 doCheck = false;
1677 doCheck = false;
1636 propagatedBuildInputs = [
1678 propagatedBuildInputs = [
1637 self."pytest"
1679 self."pytest"
1638 self."termcolor"
1680 self."termcolor"
1639 self."packaging"
1681 self."packaging"
1640 ];
1682 ];
1641 src = fetchurl {
1683 src = fetchurl {
1642 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1643 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1644 };
1686 };
1645 meta = {
1687 meta = {
1646 license = [ pkgs.lib.licenses.bsdOriginal ];
1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1647 };
1689 };
1648 };
1690 };
1649 "pytest-timeout" = super.buildPythonPackage {
1691 "pytest-timeout" = super.buildPythonPackage {
1650 name = "pytest-timeout-1.3.3";
1692 name = "pytest-timeout-1.3.3";
1651 doCheck = false;
1693 doCheck = false;
1652 propagatedBuildInputs = [
1694 propagatedBuildInputs = [
1653 self."pytest"
1695 self."pytest"
1654 ];
1696 ];
1655 src = fetchurl {
1697 src = fetchurl {
1656 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1657 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1658 };
1700 };
1659 meta = {
1701 meta = {
1660 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1661 };
1703 };
1662 };
1704 };
1663 "python-dateutil" = super.buildPythonPackage {
1705 "python-dateutil" = super.buildPythonPackage {
1664 name = "python-dateutil-2.8.1";
1706 name = "python-dateutil-2.8.1";
1665 doCheck = false;
1707 doCheck = false;
1666 propagatedBuildInputs = [
1708 propagatedBuildInputs = [
1667 self."six"
1709 self."six"
1668 ];
1710 ];
1669 src = fetchurl {
1711 src = fetchurl {
1670 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1671 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1672 };
1714 };
1673 meta = {
1715 meta = {
1674 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1675 };
1717 };
1676 };
1718 };
1677 "python-editor" = super.buildPythonPackage {
1719 "python-editor" = super.buildPythonPackage {
1678 name = "python-editor-1.0.4";
1720 name = "python-editor-1.0.4";
1679 doCheck = false;
1721 doCheck = false;
1680 src = fetchurl {
1722 src = fetchurl {
1681 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1682 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1683 };
1725 };
1684 meta = {
1726 meta = {
1685 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1686 };
1728 };
1687 };
1729 };
1688 "python-ldap" = super.buildPythonPackage {
1730 "python-ldap" = super.buildPythonPackage {
1689 name = "python-ldap-3.2.0";
1731 name = "python-ldap-3.2.0";
1690 doCheck = false;
1732 doCheck = false;
1691 propagatedBuildInputs = [
1733 propagatedBuildInputs = [
1692 self."pyasn1"
1734 self."pyasn1"
1693 self."pyasn1-modules"
1735 self."pyasn1-modules"
1694 ];
1736 ];
1695 src = fetchurl {
1737 src = fetchurl {
1696 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1697 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1698 };
1740 };
1699 meta = {
1741 meta = {
1700 license = [ pkgs.lib.licenses.psfl ];
1742 license = [ pkgs.lib.licenses.psfl ];
1701 };
1743 };
1702 };
1744 };
1703 "python-memcached" = super.buildPythonPackage {
1745 "python-memcached" = super.buildPythonPackage {
1704 name = "python-memcached-1.59";
1746 name = "python-memcached-1.59";
1705 doCheck = false;
1747 doCheck = false;
1706 propagatedBuildInputs = [
1748 propagatedBuildInputs = [
1707 self."six"
1749 self."six"
1708 ];
1750 ];
1709 src = fetchurl {
1751 src = fetchurl {
1710 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1711 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1712 };
1754 };
1713 meta = {
1755 meta = {
1714 license = [ pkgs.lib.licenses.psfl ];
1756 license = [ pkgs.lib.licenses.psfl ];
1715 };
1757 };
1716 };
1758 };
1717 "python-pam" = super.buildPythonPackage {
1759 "python-pam" = super.buildPythonPackage {
1718 name = "python-pam-1.8.4";
1760 name = "python-pam-1.8.4";
1719 doCheck = false;
1761 doCheck = false;
1720 src = fetchurl {
1762 src = fetchurl {
1721 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1722 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1723 };
1765 };
1724 meta = {
1766 meta = {
1725 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1726 };
1768 };
1727 };
1769 };
1728 "python-saml" = super.buildPythonPackage {
1770 "python-saml" = super.buildPythonPackage {
1729 name = "python-saml-2.4.2";
1771 name = "python-saml-2.4.2";
1730 doCheck = false;
1772 doCheck = false;
1731 propagatedBuildInputs = [
1773 propagatedBuildInputs = [
1732 self."dm.xmlsec.binding"
1774 self."dm.xmlsec.binding"
1733 self."isodate"
1775 self."isodate"
1734 self."defusedxml"
1776 self."defusedxml"
1735 ];
1777 ];
1736 src = fetchurl {
1778 src = fetchurl {
1737 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1738 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1739 };
1781 };
1740 meta = {
1782 meta = {
1741 license = [ pkgs.lib.licenses.mit ];
1783 license = [ pkgs.lib.licenses.mit ];
1742 };
1784 };
1743 };
1785 };
1744 "pytz" = super.buildPythonPackage {
1786 "pytz" = super.buildPythonPackage {
1745 name = "pytz-2019.3";
1787 name = "pytz-2019.3";
1746 doCheck = false;
1788 doCheck = false;
1747 src = fetchurl {
1789 src = fetchurl {
1748 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1749 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1750 };
1792 };
1751 meta = {
1793 meta = {
1752 license = [ pkgs.lib.licenses.mit ];
1794 license = [ pkgs.lib.licenses.mit ];
1753 };
1795 };
1754 };
1796 };
1755 "pyzmq" = super.buildPythonPackage {
1797 "pyzmq" = super.buildPythonPackage {
1756 name = "pyzmq-14.6.0";
1798 name = "pyzmq-14.6.0";
1757 doCheck = false;
1799 doCheck = false;
1758 src = fetchurl {
1800 src = fetchurl {
1759 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1760 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1761 };
1803 };
1762 meta = {
1804 meta = {
1763 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1764 };
1806 };
1765 };
1807 };
1808 "PyYAML" = super.buildPythonPackage {
1809 name = "PyYAML-5.3.1";
1810 doCheck = false;
1811 src = fetchurl {
1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1814 };
1815 meta = {
1816 license = [ pkgs.lib.licenses.mit ];
1817 };
1818 };
1766 "redis" = super.buildPythonPackage {
1819 "redis" = super.buildPythonPackage {
1767 name = "redis-3.4.1";
1820 name = "redis-3.4.1";
1768 doCheck = false;
1821 doCheck = false;
1769 src = fetchurl {
1822 src = fetchurl {
1770 url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz";
1823 url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz";
1771 sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd";
1824 sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd";
1772 };
1825 };
1773 meta = {
1826 meta = {
1774 license = [ pkgs.lib.licenses.mit ];
1827 license = [ pkgs.lib.licenses.mit ];
1775 };
1828 };
1776 };
1829 };
1777 "repoze.lru" = super.buildPythonPackage {
1830 "repoze.lru" = super.buildPythonPackage {
1778 name = "repoze.lru-0.7";
1831 name = "repoze.lru-0.7";
1779 doCheck = false;
1832 doCheck = false;
1780 src = fetchurl {
1833 src = fetchurl {
1781 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1834 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1782 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1835 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1783 };
1836 };
1784 meta = {
1837 meta = {
1785 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1838 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1786 };
1839 };
1787 };
1840 };
1788 "repoze.sendmail" = super.buildPythonPackage {
1841 "repoze.sendmail" = super.buildPythonPackage {
1789 name = "repoze.sendmail-4.4.1";
1842 name = "repoze.sendmail-4.4.1";
1790 doCheck = false;
1843 doCheck = false;
1791 propagatedBuildInputs = [
1844 propagatedBuildInputs = [
1792 self."setuptools"
1845 self."setuptools"
1793 self."zope.interface"
1846 self."zope.interface"
1794 self."transaction"
1847 self."transaction"
1795 ];
1848 ];
1796 src = fetchurl {
1849 src = fetchurl {
1797 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1850 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1798 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1851 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1799 };
1852 };
1800 meta = {
1853 meta = {
1801 license = [ pkgs.lib.licenses.zpl21 ];
1854 license = [ pkgs.lib.licenses.zpl21 ];
1802 };
1855 };
1803 };
1856 };
1804 "requests" = super.buildPythonPackage {
1857 "requests" = super.buildPythonPackage {
1805 name = "requests-2.22.0";
1858 name = "requests-2.22.0";
1806 doCheck = false;
1859 doCheck = false;
1807 propagatedBuildInputs = [
1860 propagatedBuildInputs = [
1808 self."chardet"
1861 self."chardet"
1809 self."idna"
1862 self."idna"
1810 self."urllib3"
1863 self."urllib3"
1811 self."certifi"
1864 self."certifi"
1812 ];
1865 ];
1813 src = fetchurl {
1866 src = fetchurl {
1814 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1867 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1815 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1868 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1816 };
1869 };
1817 meta = {
1870 meta = {
1818 license = [ pkgs.lib.licenses.asl20 ];
1871 license = [ pkgs.lib.licenses.asl20 ];
1819 };
1872 };
1820 };
1873 };
1821 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1874 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1822 name = "rhodecode-enterprise-ce-4.20.1";
1875 name = "rhodecode-enterprise-ce-4.20.0";
1823 buildInputs = [
1876 buildInputs = [
1824 self."pytest"
1877 self."pytest"
1825 self."py"
1878 self."py"
1826 self."pytest-cov"
1879 self."pytest-cov"
1827 self."pytest-sugar"
1880 self."pytest-sugar"
1828 self."pytest-runner"
1881 self."pytest-runner"
1829 self."pytest-profiling"
1882 self."pytest-profiling"
1830 self."pytest-timeout"
1883 self."pytest-timeout"
1831 self."gprof2dot"
1884 self."gprof2dot"
1832 self."mock"
1885 self."mock"
1833 self."cov-core"
1886 self."cov-core"
1834 self."coverage"
1887 self."coverage"
1835 self."webtest"
1888 self."webtest"
1836 self."beautifulsoup4"
1889 self."beautifulsoup4"
1837 self."configobj"
1890 self."configobj"
1838 ];
1891 ];
1839 doCheck = true;
1892 doCheck = true;
1840 propagatedBuildInputs = [
1893 propagatedBuildInputs = [
1841 self."amqp"
1894 self."amqp"
1842 self."babel"
1895 self."babel"
1843 self."beaker"
1896 self."beaker"
1844 self."bleach"
1897 self."bleach"
1845 self."celery"
1898 self."celery"
1846 self."channelstream"
1899 self."channelstream"
1847 self."click"
1900 self."click"
1848 self."colander"
1901 self."colander"
1849 self."configobj"
1902 self."configobj"
1850 self."cssselect"
1903 self."cssselect"
1851 self."cryptography"
1904 self."cryptography"
1852 self."decorator"
1905 self."decorator"
1853 self."deform"
1906 self."deform"
1854 self."docutils"
1907 self."docutils"
1855 self."dogpile.cache"
1908 self."dogpile.cache"
1856 self."dogpile.core"
1909 self."dogpile.core"
1857 self."formencode"
1910 self."formencode"
1858 self."future"
1911 self."future"
1859 self."futures"
1912 self."futures"
1860 self."infrae.cache"
1913 self."infrae.cache"
1861 self."iso8601"
1914 self."iso8601"
1862 self."itsdangerous"
1915 self."itsdangerous"
1863 self."kombu"
1916 self."kombu"
1864 self."lxml"
1917 self."lxml"
1865 self."mako"
1918 self."mako"
1866 self."markdown"
1919 self."markdown"
1867 self."markupsafe"
1920 self."markupsafe"
1868 self."msgpack-python"
1921 self."msgpack-python"
1869 self."pyotp"
1922 self."pyotp"
1870 self."packaging"
1923 self."packaging"
1871 self."pathlib2"
1924 self."pathlib2"
1872 self."paste"
1925 self."paste"
1873 self."pastedeploy"
1926 self."pastedeploy"
1874 self."pastescript"
1927 self."pastescript"
1875 self."peppercorn"
1928 self."peppercorn"
1876 self."premailer"
1929 self."premailer"
1877 self."psutil"
1930 self."psutil"
1878 self."py-bcrypt"
1931 self."py-bcrypt"
1879 self."pycurl"
1932 self."pycurl"
1880 self."pycrypto"
1933 self."pycrypto"
1881 self."pygments"
1934 self."pygments"
1882 self."pyparsing"
1935 self."pyparsing"
1883 self."pyramid-debugtoolbar"
1936 self."pyramid-debugtoolbar"
1884 self."pyramid-mako"
1937 self."pyramid-mako"
1885 self."pyramid"
1938 self."pyramid"
1886 self."pyramid-mailer"
1939 self."pyramid-mailer"
1887 self."python-dateutil"
1940 self."python-dateutil"
1888 self."python-ldap"
1941 self."python-ldap"
1889 self."python-memcached"
1942 self."python-memcached"
1890 self."python-pam"
1943 self."python-pam"
1891 self."python-saml"
1944 self."python-saml"
1892 self."pytz"
1945 self."pytz"
1893 self."tzlocal"
1946 self."tzlocal"
1894 self."pyzmq"
1947 self."pyzmq"
1895 self."py-gfm"
1948 self."py-gfm"
1896 self."redis"
1949 self."redis"
1897 self."repoze.lru"
1950 self."repoze.lru"
1898 self."requests"
1951 self."requests"
1899 self."routes"
1952 self."routes"
1900 self."simplejson"
1953 self."simplejson"
1901 self."six"
1954 self."six"
1902 self."sqlalchemy"
1955 self."sqlalchemy"
1903 self."sshpubkeys"
1956 self."sshpubkeys"
1904 self."subprocess32"
1957 self."subprocess32"
1905 self."supervisor"
1958 self."supervisor"
1906 self."translationstring"
1959 self."translationstring"
1907 self."urllib3"
1960 self."urllib3"
1908 self."urlobject"
1961 self."urlobject"
1909 self."venusian"
1962 self."venusian"
1910 self."weberror"
1963 self."weberror"
1911 self."webhelpers2"
1964 self."webhelpers2"
1912 self."webob"
1965 self."webob"
1913 self."whoosh"
1966 self."whoosh"
1914 self."wsgiref"
1967 self."wsgiref"
1915 self."zope.cachedescriptors"
1968 self."zope.cachedescriptors"
1916 self."zope.deprecation"
1969 self."zope.deprecation"
1917 self."zope.event"
1970 self."zope.event"
1918 self."zope.interface"
1971 self."zope.interface"
1919 self."mysql-python"
1972 self."mysql-python"
1920 self."pymysql"
1973 self."pymysql"
1921 self."pysqlite"
1974 self."pysqlite"
1922 self."psycopg2"
1975 self."psycopg2"
1923 self."nbconvert"
1976 self."nbconvert"
1924 self."nbformat"
1977 self."nbformat"
1925 self."jupyter-client"
1978 self."jupyter-client"
1926 self."jupyter-core"
1979 self."jupyter-core"
1927 self."alembic"
1980 self."alembic"
1928 self."invoke"
1981 self."invoke"
1929 self."bumpversion"
1982 self."bumpversion"
1930 self."gevent"
1983 self."gevent"
1931 self."greenlet"
1984 self."greenlet"
1932 self."gunicorn"
1985 self."gunicorn"
1933 self."waitress"
1986 self."waitress"
1934 self."ipdb"
1987 self."ipdb"
1935 self."ipython"
1988 self."ipython"
1936 self."rhodecode-tools"
1989 self."rhodecode-tools"
1937 self."appenlight-client"
1990 self."appenlight-client"
1938 self."pytest"
1991 self."pytest"
1939 self."py"
1992 self."py"
1940 self."pytest-cov"
1993 self."pytest-cov"
1941 self."pytest-sugar"
1994 self."pytest-sugar"
1942 self."pytest-runner"
1995 self."pytest-runner"
1943 self."pytest-profiling"
1996 self."pytest-profiling"
1944 self."pytest-timeout"
1997 self."pytest-timeout"
1945 self."gprof2dot"
1998 self."gprof2dot"
1946 self."mock"
1999 self."mock"
1947 self."cov-core"
2000 self."cov-core"
1948 self."coverage"
2001 self."coverage"
1949 self."webtest"
2002 self."webtest"
1950 self."beautifulsoup4"
2003 self."beautifulsoup4"
1951 ];
2004 ];
1952 src = ./.;
2005 src = ./.;
1953 meta = {
2006 meta = {
1954 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2007 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1955 };
2008 };
1956 };
2009 };
1957 "rhodecode-tools" = super.buildPythonPackage {
2010 "rhodecode-tools" = super.buildPythonPackage {
1958 name = "rhodecode-tools-1.4.0";
2011 name = "rhodecode-tools-1.4.0";
1959 doCheck = false;
2012 doCheck = false;
1960 propagatedBuildInputs = [
2013 propagatedBuildInputs = [
1961 self."click"
2014 self."click"
1962 self."future"
2015 self."future"
1963 self."six"
2016 self."six"
1964 self."mako"
2017 self."mako"
1965 self."markupsafe"
2018 self."markupsafe"
1966 self."requests"
2019 self."requests"
1967 self."urllib3"
2020 self."urllib3"
1968 self."whoosh"
2021 self."whoosh"
1969 self."elasticsearch"
2022 self."elasticsearch"
1970 self."elasticsearch-dsl"
2023 self."elasticsearch-dsl"
1971 self."elasticsearch2"
2024 self."elasticsearch2"
1972 self."elasticsearch1-dsl"
2025 self."elasticsearch1-dsl"
1973 ];
2026 ];
1974 src = fetchurl {
2027 src = fetchurl {
1975 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2028 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
1976 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2029 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
1977 };
2030 };
1978 meta = {
2031 meta = {
1979 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2032 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
1980 };
2033 };
1981 };
2034 };
1982 "routes" = super.buildPythonPackage {
2035 "routes" = super.buildPythonPackage {
1983 name = "routes-2.4.1";
2036 name = "routes-2.4.1";
1984 doCheck = false;
2037 doCheck = false;
1985 propagatedBuildInputs = [
2038 propagatedBuildInputs = [
1986 self."six"
2039 self."six"
1987 self."repoze.lru"
2040 self."repoze.lru"
1988 ];
2041 ];
1989 src = fetchurl {
2042 src = fetchurl {
1990 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2043 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
1991 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2044 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
1992 };
2045 };
1993 meta = {
2046 meta = {
1994 license = [ pkgs.lib.licenses.mit ];
2047 license = [ pkgs.lib.licenses.mit ];
1995 };
2048 };
1996 };
2049 };
1997 "scandir" = super.buildPythonPackage {
2050 "scandir" = super.buildPythonPackage {
1998 name = "scandir-1.10.0";
2051 name = "scandir-1.10.0";
1999 doCheck = false;
2052 doCheck = false;
2000 src = fetchurl {
2053 src = fetchurl {
2001 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2054 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2002 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2055 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2003 };
2056 };
2004 meta = {
2057 meta = {
2005 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2058 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2006 };
2059 };
2007 };
2060 };
2008 "setproctitle" = super.buildPythonPackage {
2061 "setproctitle" = super.buildPythonPackage {
2009 name = "setproctitle-1.1.10";
2062 name = "setproctitle-1.1.10";
2010 doCheck = false;
2063 doCheck = false;
2011 src = fetchurl {
2064 src = fetchurl {
2012 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2065 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2013 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2066 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2014 };
2067 };
2015 meta = {
2068 meta = {
2016 license = [ pkgs.lib.licenses.bsdOriginal ];
2069 license = [ pkgs.lib.licenses.bsdOriginal ];
2017 };
2070 };
2018 };
2071 };
2019 "setuptools" = super.buildPythonPackage {
2072 "setuptools" = super.buildPythonPackage {
2020 name = "setuptools-44.1.0";
2073 name = "setuptools-44.1.0";
2021 doCheck = false;
2074 doCheck = false;
2022 src = fetchurl {
2075 src = fetchurl {
2023 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2076 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2024 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2077 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2025 };
2078 };
2026 meta = {
2079 meta = {
2027 license = [ pkgs.lib.licenses.mit ];
2080 license = [ pkgs.lib.licenses.mit ];
2028 };
2081 };
2029 };
2082 };
2030 "simplegeneric" = super.buildPythonPackage {
2083 "simplegeneric" = super.buildPythonPackage {
2031 name = "simplegeneric-0.8.1";
2084 name = "simplegeneric-0.8.1";
2032 doCheck = false;
2085 doCheck = false;
2033 src = fetchurl {
2086 src = fetchurl {
2034 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2087 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2035 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2088 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2036 };
2089 };
2037 meta = {
2090 meta = {
2038 license = [ pkgs.lib.licenses.zpl21 ];
2091 license = [ pkgs.lib.licenses.zpl21 ];
2039 };
2092 };
2040 };
2093 };
2041 "simplejson" = super.buildPythonPackage {
2094 "simplejson" = super.buildPythonPackage {
2042 name = "simplejson-3.16.0";
2095 name = "simplejson-3.16.0";
2043 doCheck = false;
2096 doCheck = false;
2044 src = fetchurl {
2097 src = fetchurl {
2045 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2098 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2046 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2099 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2047 };
2100 };
2048 meta = {
2101 meta = {
2049 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2102 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2050 };
2103 };
2051 };
2104 };
2052 "six" = super.buildPythonPackage {
2105 "six" = super.buildPythonPackage {
2053 name = "six-1.11.0";
2106 name = "six-1.11.0";
2054 doCheck = false;
2107 doCheck = false;
2055 src = fetchurl {
2108 src = fetchurl {
2056 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2109 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2057 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2110 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2058 };
2111 };
2059 meta = {
2112 meta = {
2060 license = [ pkgs.lib.licenses.mit ];
2113 license = [ pkgs.lib.licenses.mit ];
2061 };
2114 };
2062 };
2115 };
2063 "sqlalchemy" = super.buildPythonPackage {
2116 "sqlalchemy" = super.buildPythonPackage {
2064 name = "sqlalchemy-1.3.15";
2117 name = "sqlalchemy-1.3.15";
2065 doCheck = false;
2118 doCheck = false;
2066 src = fetchurl {
2119 src = fetchurl {
2067 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2120 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2068 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2121 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2069 };
2122 };
2070 meta = {
2123 meta = {
2071 license = [ pkgs.lib.licenses.mit ];
2124 license = [ pkgs.lib.licenses.mit ];
2072 };
2125 };
2073 };
2126 };
2074 "sshpubkeys" = super.buildPythonPackage {
2127 "sshpubkeys" = super.buildPythonPackage {
2075 name = "sshpubkeys-3.1.0";
2128 name = "sshpubkeys-3.1.0";
2076 doCheck = false;
2129 doCheck = false;
2077 propagatedBuildInputs = [
2130 propagatedBuildInputs = [
2078 self."cryptography"
2131 self."cryptography"
2079 self."ecdsa"
2132 self."ecdsa"
2080 ];
2133 ];
2081 src = fetchurl {
2134 src = fetchurl {
2082 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2135 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2083 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2136 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2084 };
2137 };
2085 meta = {
2138 meta = {
2086 license = [ pkgs.lib.licenses.bsdOriginal ];
2139 license = [ pkgs.lib.licenses.bsdOriginal ];
2087 };
2140 };
2088 };
2141 };
2089 "subprocess32" = super.buildPythonPackage {
2142 "subprocess32" = super.buildPythonPackage {
2090 name = "subprocess32-3.5.4";
2143 name = "subprocess32-3.5.4";
2091 doCheck = false;
2144 doCheck = false;
2092 src = fetchurl {
2145 src = fetchurl {
2093 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2146 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2094 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2147 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2095 };
2148 };
2096 meta = {
2149 meta = {
2097 license = [ pkgs.lib.licenses.psfl ];
2150 license = [ pkgs.lib.licenses.psfl ];
2098 };
2151 };
2099 };
2152 };
2100 "supervisor" = super.buildPythonPackage {
2153 "supervisor" = super.buildPythonPackage {
2101 name = "supervisor-4.1.0";
2154 name = "supervisor-4.1.0";
2102 doCheck = false;
2155 doCheck = false;
2103 src = fetchurl {
2156 src = fetchurl {
2104 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2157 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2105 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2158 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2106 };
2159 };
2107 meta = {
2160 meta = {
2108 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2161 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2109 };
2162 };
2110 };
2163 };
2111 "tempita" = super.buildPythonPackage {
2164 "tempita" = super.buildPythonPackage {
2112 name = "tempita-0.5.2";
2165 name = "tempita-0.5.2";
2113 doCheck = false;
2166 doCheck = false;
2114 src = fetchurl {
2167 src = fetchurl {
2115 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2168 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2116 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2169 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2117 };
2170 };
2118 meta = {
2171 meta = {
2119 license = [ pkgs.lib.licenses.mit ];
2172 license = [ pkgs.lib.licenses.mit ];
2120 };
2173 };
2121 };
2174 };
2122 "termcolor" = super.buildPythonPackage {
2175 "termcolor" = super.buildPythonPackage {
2123 name = "termcolor-1.1.0";
2176 name = "termcolor-1.1.0";
2124 doCheck = false;
2177 doCheck = false;
2125 src = fetchurl {
2178 src = fetchurl {
2126 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2179 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2127 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2180 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2128 };
2181 };
2129 meta = {
2182 meta = {
2130 license = [ pkgs.lib.licenses.mit ];
2183 license = [ pkgs.lib.licenses.mit ];
2131 };
2184 };
2132 };
2185 };
2133 "testpath" = super.buildPythonPackage {
2186 "testpath" = super.buildPythonPackage {
2134 name = "testpath-0.4.4";
2187 name = "testpath-0.4.4";
2135 doCheck = false;
2188 doCheck = false;
2136 src = fetchurl {
2189 src = fetchurl {
2137 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2190 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2138 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2191 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2139 };
2192 };
2140 meta = {
2193 meta = {
2141 license = [ ];
2194 license = [ ];
2142 };
2195 };
2143 };
2196 };
2144 "traitlets" = super.buildPythonPackage {
2197 "traitlets" = super.buildPythonPackage {
2145 name = "traitlets-4.3.3";
2198 name = "traitlets-4.3.3";
2146 doCheck = false;
2199 doCheck = false;
2147 propagatedBuildInputs = [
2200 propagatedBuildInputs = [
2148 self."ipython-genutils"
2201 self."ipython-genutils"
2149 self."six"
2202 self."six"
2150 self."decorator"
2203 self."decorator"
2151 self."enum34"
2204 self."enum34"
2152 ];
2205 ];
2153 src = fetchurl {
2206 src = fetchurl {
2154 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2207 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2155 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2208 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2156 };
2209 };
2157 meta = {
2210 meta = {
2158 license = [ pkgs.lib.licenses.bsdOriginal ];
2211 license = [ pkgs.lib.licenses.bsdOriginal ];
2159 };
2212 };
2160 };
2213 };
2161 "transaction" = super.buildPythonPackage {
2214 "transaction" = super.buildPythonPackage {
2162 name = "transaction-2.4.0";
2215 name = "transaction-2.4.0";
2163 doCheck = false;
2216 doCheck = false;
2164 propagatedBuildInputs = [
2217 propagatedBuildInputs = [
2165 self."zope.interface"
2218 self."zope.interface"
2166 ];
2219 ];
2167 src = fetchurl {
2220 src = fetchurl {
2168 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2221 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2169 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2222 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2170 };
2223 };
2171 meta = {
2224 meta = {
2172 license = [ pkgs.lib.licenses.zpl21 ];
2225 license = [ pkgs.lib.licenses.zpl21 ];
2173 };
2226 };
2174 };
2227 };
2175 "translationstring" = super.buildPythonPackage {
2228 "translationstring" = super.buildPythonPackage {
2176 name = "translationstring-1.3";
2229 name = "translationstring-1.3";
2177 doCheck = false;
2230 doCheck = false;
2178 src = fetchurl {
2231 src = fetchurl {
2179 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2232 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2180 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2233 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2181 };
2234 };
2182 meta = {
2235 meta = {
2183 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2236 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2184 };
2237 };
2185 };
2238 };
2186 "tzlocal" = super.buildPythonPackage {
2239 "tzlocal" = super.buildPythonPackage {
2187 name = "tzlocal-1.5.1";
2240 name = "tzlocal-1.5.1";
2188 doCheck = false;
2241 doCheck = false;
2189 propagatedBuildInputs = [
2242 propagatedBuildInputs = [
2190 self."pytz"
2243 self."pytz"
2191 ];
2244 ];
2192 src = fetchurl {
2245 src = fetchurl {
2193 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2246 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2194 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2247 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2195 };
2248 };
2196 meta = {
2249 meta = {
2197 license = [ pkgs.lib.licenses.mit ];
2250 license = [ pkgs.lib.licenses.mit ];
2198 };
2251 };
2199 };
2252 };
2200 "urllib3" = super.buildPythonPackage {
2253 "urllib3" = super.buildPythonPackage {
2201 name = "urllib3-1.25.2";
2254 name = "urllib3-1.25.2";
2202 doCheck = false;
2255 doCheck = false;
2203 src = fetchurl {
2256 src = fetchurl {
2204 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2257 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2205 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2258 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2206 };
2259 };
2207 meta = {
2260 meta = {
2208 license = [ pkgs.lib.licenses.mit ];
2261 license = [ pkgs.lib.licenses.mit ];
2209 };
2262 };
2210 };
2263 };
2211 "urlobject" = super.buildPythonPackage {
2264 "urlobject" = super.buildPythonPackage {
2212 name = "urlobject-2.4.3";
2265 name = "urlobject-2.4.3";
2213 doCheck = false;
2266 doCheck = false;
2214 src = fetchurl {
2267 src = fetchurl {
2215 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2268 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2216 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2269 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2217 };
2270 };
2218 meta = {
2271 meta = {
2219 license = [ pkgs.lib.licenses.publicDomain ];
2272 license = [ pkgs.lib.licenses.publicDomain ];
2220 };
2273 };
2221 };
2274 };
2222 "venusian" = super.buildPythonPackage {
2275 "venusian" = super.buildPythonPackage {
2223 name = "venusian-1.2.0";
2276 name = "venusian-1.2.0";
2224 doCheck = false;
2277 doCheck = false;
2225 src = fetchurl {
2278 src = fetchurl {
2226 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2279 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2227 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2280 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2228 };
2281 };
2229 meta = {
2282 meta = {
2230 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2283 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2231 };
2284 };
2232 };
2285 };
2233 "vine" = super.buildPythonPackage {
2286 "vine" = super.buildPythonPackage {
2234 name = "vine-1.3.0";
2287 name = "vine-1.3.0";
2235 doCheck = false;
2288 doCheck = false;
2236 src = fetchurl {
2289 src = fetchurl {
2237 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2290 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2238 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2291 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2239 };
2292 };
2240 meta = {
2293 meta = {
2241 license = [ pkgs.lib.licenses.bsdOriginal ];
2294 license = [ pkgs.lib.licenses.bsdOriginal ];
2242 };
2295 };
2243 };
2296 };
2244 "waitress" = super.buildPythonPackage {
2297 "waitress" = super.buildPythonPackage {
2245 name = "waitress-1.3.1";
2298 name = "waitress-1.3.1";
2246 doCheck = false;
2299 doCheck = false;
2247 src = fetchurl {
2300 src = fetchurl {
2248 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2301 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2249 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2302 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2250 };
2303 };
2251 meta = {
2304 meta = {
2252 license = [ pkgs.lib.licenses.zpl21 ];
2305 license = [ pkgs.lib.licenses.zpl21 ];
2253 };
2306 };
2254 };
2307 };
2255 "wcwidth" = super.buildPythonPackage {
2308 "wcwidth" = super.buildPythonPackage {
2256 name = "wcwidth-0.1.9";
2309 name = "wcwidth-0.1.9";
2257 doCheck = false;
2310 doCheck = false;
2258 src = fetchurl {
2311 src = fetchurl {
2259 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2312 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2260 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2313 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2261 };
2314 };
2262 meta = {
2315 meta = {
2263 license = [ pkgs.lib.licenses.mit ];
2316 license = [ pkgs.lib.licenses.mit ];
2264 };
2317 };
2265 };
2318 };
2266 "webencodings" = super.buildPythonPackage {
2319 "webencodings" = super.buildPythonPackage {
2267 name = "webencodings-0.5.1";
2320 name = "webencodings-0.5.1";
2268 doCheck = false;
2321 doCheck = false;
2269 src = fetchurl {
2322 src = fetchurl {
2270 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2323 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2271 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2324 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2272 };
2325 };
2273 meta = {
2326 meta = {
2274 license = [ pkgs.lib.licenses.bsdOriginal ];
2327 license = [ pkgs.lib.licenses.bsdOriginal ];
2275 };
2328 };
2276 };
2329 };
2277 "weberror" = super.buildPythonPackage {
2330 "weberror" = super.buildPythonPackage {
2278 name = "weberror-0.13.1";
2331 name = "weberror-0.13.1";
2279 doCheck = false;
2332 doCheck = false;
2280 propagatedBuildInputs = [
2333 propagatedBuildInputs = [
2281 self."webob"
2334 self."webob"
2282 self."tempita"
2335 self."tempita"
2283 self."pygments"
2336 self."pygments"
2284 self."paste"
2337 self."paste"
2285 ];
2338 ];
2286 src = fetchurl {
2339 src = fetchurl {
2287 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2340 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2288 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2341 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2289 };
2342 };
2290 meta = {
2343 meta = {
2291 license = [ pkgs.lib.licenses.mit ];
2344 license = [ pkgs.lib.licenses.mit ];
2292 };
2345 };
2293 };
2346 };
2294 "webhelpers2" = super.buildPythonPackage {
2347 "webhelpers2" = super.buildPythonPackage {
2295 name = "webhelpers2-2.0";
2348 name = "webhelpers2-2.0";
2296 doCheck = false;
2349 doCheck = false;
2297 propagatedBuildInputs = [
2350 propagatedBuildInputs = [
2298 self."markupsafe"
2351 self."markupsafe"
2299 self."six"
2352 self."six"
2300 ];
2353 ];
2301 src = fetchurl {
2354 src = fetchurl {
2302 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2355 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2303 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2356 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2304 };
2357 };
2305 meta = {
2358 meta = {
2306 license = [ pkgs.lib.licenses.mit ];
2359 license = [ pkgs.lib.licenses.mit ];
2307 };
2360 };
2308 };
2361 };
2309 "webob" = super.buildPythonPackage {
2362 "webob" = super.buildPythonPackage {
2310 name = "webob-1.8.5";
2363 name = "webob-1.8.5";
2311 doCheck = false;
2364 doCheck = false;
2312 src = fetchurl {
2365 src = fetchurl {
2313 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2366 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2314 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2367 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2315 };
2368 };
2316 meta = {
2369 meta = {
2317 license = [ pkgs.lib.licenses.mit ];
2370 license = [ pkgs.lib.licenses.mit ];
2318 };
2371 };
2319 };
2372 };
2320 "webtest" = super.buildPythonPackage {
2373 "webtest" = super.buildPythonPackage {
2321 name = "webtest-2.0.34";
2374 name = "webtest-2.0.34";
2322 doCheck = false;
2375 doCheck = false;
2323 propagatedBuildInputs = [
2376 propagatedBuildInputs = [
2324 self."six"
2377 self."six"
2325 self."webob"
2378 self."webob"
2326 self."waitress"
2379 self."waitress"
2327 self."beautifulsoup4"
2380 self."beautifulsoup4"
2328 ];
2381 ];
2329 src = fetchurl {
2382 src = fetchurl {
2330 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2383 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2331 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2384 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2332 };
2385 };
2333 meta = {
2386 meta = {
2334 license = [ pkgs.lib.licenses.mit ];
2387 license = [ pkgs.lib.licenses.mit ];
2335 };
2388 };
2336 };
2389 };
2337 "whoosh" = super.buildPythonPackage {
2390 "whoosh" = super.buildPythonPackage {
2338 name = "whoosh-2.7.4";
2391 name = "whoosh-2.7.4";
2339 doCheck = false;
2392 doCheck = false;
2340 src = fetchurl {
2393 src = fetchurl {
2341 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2394 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2342 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2395 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2343 };
2396 };
2344 meta = {
2397 meta = {
2345 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2398 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2346 };
2399 };
2347 };
2400 };
2348 "ws4py" = super.buildPythonPackage {
2401 "ws4py" = super.buildPythonPackage {
2349 name = "ws4py-0.5.1";
2402 name = "ws4py-0.5.1";
2350 doCheck = false;
2403 doCheck = false;
2351 src = fetchurl {
2404 src = fetchurl {
2352 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2405 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2353 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2406 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2354 };
2407 };
2355 meta = {
2408 meta = {
2356 license = [ pkgs.lib.licenses.bsdOriginal ];
2409 license = [ pkgs.lib.licenses.bsdOriginal ];
2357 };
2410 };
2358 };
2411 };
2359 "wsgiref" = super.buildPythonPackage {
2412 "wsgiref" = super.buildPythonPackage {
2360 name = "wsgiref-0.1.2";
2413 name = "wsgiref-0.1.2";
2361 doCheck = false;
2414 doCheck = false;
2362 src = fetchurl {
2415 src = fetchurl {
2363 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2416 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2364 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2417 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2365 };
2418 };
2366 meta = {
2419 meta = {
2367 license = [ { fullName = "PSF or ZPL"; } ];
2420 license = [ { fullName = "PSF or ZPL"; } ];
2368 };
2421 };
2369 };
2422 };
2370 "zipp" = super.buildPythonPackage {
2423 "zipp" = super.buildPythonPackage {
2371 name = "zipp-1.2.0";
2424 name = "zipp-1.2.0";
2372 doCheck = false;
2425 doCheck = false;
2373 propagatedBuildInputs = [
2426 propagatedBuildInputs = [
2374 self."contextlib2"
2427 self."contextlib2"
2375 ];
2428 ];
2376 src = fetchurl {
2429 src = fetchurl {
2377 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2430 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2378 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2431 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2379 };
2432 };
2380 meta = {
2433 meta = {
2381 license = [ pkgs.lib.licenses.mit ];
2434 license = [ pkgs.lib.licenses.mit ];
2382 };
2435 };
2383 };
2436 };
2384 "zope.cachedescriptors" = super.buildPythonPackage {
2437 "zope.cachedescriptors" = super.buildPythonPackage {
2385 name = "zope.cachedescriptors-4.3.1";
2438 name = "zope.cachedescriptors-4.3.1";
2386 doCheck = false;
2439 doCheck = false;
2387 propagatedBuildInputs = [
2440 propagatedBuildInputs = [
2388 self."setuptools"
2441 self."setuptools"
2389 ];
2442 ];
2390 src = fetchurl {
2443 src = fetchurl {
2391 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2444 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2392 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2445 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2393 };
2446 };
2394 meta = {
2447 meta = {
2395 license = [ pkgs.lib.licenses.zpl21 ];
2448 license = [ pkgs.lib.licenses.zpl21 ];
2396 };
2449 };
2397 };
2450 };
2398 "zope.deprecation" = super.buildPythonPackage {
2451 "zope.deprecation" = super.buildPythonPackage {
2399 name = "zope.deprecation-4.4.0";
2452 name = "zope.deprecation-4.4.0";
2400 doCheck = false;
2453 doCheck = false;
2401 propagatedBuildInputs = [
2454 propagatedBuildInputs = [
2402 self."setuptools"
2455 self."setuptools"
2403 ];
2456 ];
2404 src = fetchurl {
2457 src = fetchurl {
2405 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2458 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2406 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2459 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2407 };
2460 };
2408 meta = {
2461 meta = {
2409 license = [ pkgs.lib.licenses.zpl21 ];
2462 license = [ pkgs.lib.licenses.zpl21 ];
2410 };
2463 };
2411 };
2464 };
2412 "zope.event" = super.buildPythonPackage {
2465 "zope.event" = super.buildPythonPackage {
2413 name = "zope.event-4.4";
2466 name = "zope.event-4.4";
2414 doCheck = false;
2467 doCheck = false;
2415 propagatedBuildInputs = [
2468 propagatedBuildInputs = [
2416 self."setuptools"
2469 self."setuptools"
2417 ];
2470 ];
2418 src = fetchurl {
2471 src = fetchurl {
2419 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2472 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2420 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2473 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2421 };
2474 };
2422 meta = {
2475 meta = {
2423 license = [ pkgs.lib.licenses.zpl21 ];
2476 license = [ pkgs.lib.licenses.zpl21 ];
2424 };
2477 };
2425 };
2478 };
2426 "zope.interface" = super.buildPythonPackage {
2479 "zope.interface" = super.buildPythonPackage {
2427 name = "zope.interface-4.6.0";
2480 name = "zope.interface-4.6.0";
2428 doCheck = false;
2481 doCheck = false;
2429 propagatedBuildInputs = [
2482 propagatedBuildInputs = [
2430 self."setuptools"
2483 self."setuptools"
2431 ];
2484 ];
2432 src = fetchurl {
2485 src = fetchurl {
2433 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2486 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2434 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2487 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2435 };
2488 };
2436 meta = {
2489 meta = {
2437 license = [ pkgs.lib.licenses.zpl21 ];
2490 license = [ pkgs.lib.licenses.zpl21 ];
2438 };
2491 };
2439 };
2492 };
2440
2493
2441 ### Test requirements
2494 ### Test requirements
2442
2495
2443
2496
2444 }
2497 }
@@ -1,123 +1,123 b''
1 ## dependencies
1 ## dependencies
2
2
3 amqp==2.5.2
3 amqp==2.5.2
4 babel==1.3
4 babel==1.3
5 beaker==1.9.1
5 beaker==1.9.1
6 bleach==3.1.3
6 bleach==3.1.3
7 celery==4.3.0
7 celery==4.3.0
8 channelstream==0.5.2
8 channelstream==0.6.14
9 click==7.0
9 click==7.0
10 colander==1.7.0
10 colander==1.7.0
11 # our custom configobj
11 # our custom configobj
12 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
12 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
13 cssselect==1.0.3
13 cssselect==1.0.3
14 cryptography==2.6.1
14 cryptography==2.6.1
15 decorator==4.1.2
15 decorator==4.1.2
16 deform==2.0.8
16 deform==2.0.8
17 docutils==0.16.0
17 docutils==0.16.0
18 dogpile.cache==0.9.0
18 dogpile.cache==0.9.0
19 dogpile.core==0.4.1
19 dogpile.core==0.4.1
20 formencode==1.2.4
20 formencode==1.2.4
21 future==0.14.3
21 future==0.14.3
22 futures==3.0.2
22 futures==3.0.2
23 infrae.cache==1.0.1
23 infrae.cache==1.0.1
24 iso8601==0.1.12
24 iso8601==0.1.12
25 itsdangerous==0.24
25 itsdangerous==1.1.0
26 kombu==4.6.6
26 kombu==4.6.6
27 lxml==4.2.5
27 lxml==4.2.5
28 mako==1.1.0
28 mako==1.1.0
29 markdown==2.6.11
29 markdown==2.6.11
30 markupsafe==1.1.1
30 markupsafe==1.1.1
31 msgpack-python==0.5.6
31 msgpack-python==0.5.6
32 pyotp==2.3.0
32 pyotp==2.3.0
33 packaging==20.3
33 packaging==20.3
34 pathlib2==2.3.5
34 pathlib2==2.3.5
35 paste==3.4.0
35 paste==3.4.0
36 pastedeploy==2.1.0
36 pastedeploy==2.1.0
37 pastescript==3.2.0
37 pastescript==3.2.0
38 peppercorn==0.6
38 peppercorn==0.6
39 premailer==3.6.1
39 premailer==3.6.1
40 psutil==5.7.0
40 psutil==5.7.0
41 py-bcrypt==0.4
41 py-bcrypt==0.4
42 pycurl==7.43.0.3
42 pycurl==7.43.0.3
43 pycrypto==2.6.1
43 pycrypto==2.6.1
44 pygments==2.4.2
44 pygments==2.4.2
45 pyparsing==2.4.7
45 pyparsing==2.4.7
46 pyramid-debugtoolbar==4.6.1
46 pyramid-debugtoolbar==4.6.1
47 pyramid-mako==1.1.0
47 pyramid-mako==1.1.0
48 pyramid==1.10.4
48 pyramid==1.10.4
49 pyramid_mailer==0.15.1
49 pyramid_mailer==0.15.1
50 python-dateutil==2.8.1
50 python-dateutil==2.8.1
51 python-ldap==3.2.0
51 python-ldap==3.2.0
52 python-memcached==1.59
52 python-memcached==1.59
53 python-pam==1.8.4
53 python-pam==1.8.4
54 python-saml==2.4.2
54 python-saml==2.4.2
55 pytz==2019.3
55 pytz==2019.3
56 tzlocal==1.5.1
56 tzlocal==1.5.1
57 pyzmq==14.6.0
57 pyzmq==14.6.0
58 py-gfm==0.1.4
58 py-gfm==0.1.4
59 redis==3.4.1
59 redis==3.4.1
60 repoze.lru==0.7
60 repoze.lru==0.7
61 requests==2.22.0
61 requests==2.22.0
62 routes==2.4.1
62 routes==2.4.1
63 simplejson==3.16.0
63 simplejson==3.16.0
64 six==1.11.0
64 six==1.11.0
65 sqlalchemy==1.3.15
65 sqlalchemy==1.3.15
66 sshpubkeys==3.1.0
66 sshpubkeys==3.1.0
67 subprocess32==3.5.4
67 subprocess32==3.5.4
68 supervisor==4.1.0
68 supervisor==4.1.0
69 translationstring==1.3
69 translationstring==1.3
70 urllib3==1.25.2
70 urllib3==1.25.2
71 urlobject==2.4.3
71 urlobject==2.4.3
72 venusian==1.2.0
72 venusian==1.2.0
73 weberror==0.13.1
73 weberror==0.13.1
74 webhelpers2==2.0
74 webhelpers2==2.0
75 webob==1.8.5
75 webob==1.8.5
76 whoosh==2.7.4
76 whoosh==2.7.4
77 wsgiref==0.1.2
77 wsgiref==0.1.2
78 zope.cachedescriptors==4.3.1
78 zope.cachedescriptors==4.3.1
79 zope.deprecation==4.4.0
79 zope.deprecation==4.4.0
80 zope.event==4.4.0
80 zope.event==4.4.0
81 zope.interface==4.6.0
81 zope.interface==4.6.0
82
82
83 # DB drivers
83 # DB drivers
84 mysql-python==1.2.5
84 mysql-python==1.2.5
85 pymysql==0.8.1
85 pymysql==0.8.1
86 pysqlite==2.8.3
86 pysqlite==2.8.3
87 psycopg2==2.8.4
87 psycopg2==2.8.4
88
88
89 # IPYTHON RENDERING
89 # IPYTHON RENDERING
90 # entrypoints backport, pypi version doesn't support egg installs
90 # entrypoints backport, pypi version doesn't support egg installs
91 https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d#egg=entrypoints==0.2.2.rhodecode-upstream1
91 https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d#egg=entrypoints==0.2.2.rhodecode-upstream1
92 nbconvert==5.3.1
92 nbconvert==5.3.1
93 nbformat==4.4.0
93 nbformat==4.4.0
94 jupyter-client==5.0.0
94 jupyter-client==5.0.0
95 jupyter-core==4.5.0
95 jupyter-core==4.5.0
96
96
97 ## cli tools
97 ## cli tools
98 alembic==1.4.2
98 alembic==1.4.2
99 invoke==0.13.0
99 invoke==0.13.0
100 bumpversion==0.5.3
100 bumpversion==0.5.3
101
101
102 ## http servers
102 ## http servers
103 gevent==1.5.0
103 gevent==1.5.0
104 greenlet==0.4.15
104 greenlet==0.4.15
105 gunicorn==19.9.0
105 gunicorn==19.9.0
106 waitress==1.3.1
106 waitress==1.3.1
107
107
108 ## debug
108 ## debug
109 ipdb==0.13.2
109 ipdb==0.13.2
110 ipython==5.1.0
110 ipython==5.1.0
111
111
112 ## rhodecode-tools, special case, use file://PATH.tar.gz#egg=rhodecode-tools==X.Y.Z, to test local version
112 ## rhodecode-tools, special case, use file://PATH.tar.gz#egg=rhodecode-tools==X.Y.Z, to test local version
113 https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a#egg=rhodecode-tools==1.4.0
113 https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a#egg=rhodecode-tools==1.4.0
114
114
115
115
116 ## appenlight
116 ## appenlight
117 appenlight-client==0.6.26
117 appenlight-client==0.6.26
118
118
119 ## test related requirements
119 ## test related requirements
120 -r requirements_test.txt
120 -r requirements_test.txt
121
121
122 ## uncomment to add the debug libraries
122 ## uncomment to add the debug libraries
123 #-r requirements_debug.txt
123 #-r requirements_debug.txt
@@ -1,27 +1,28 b''
1 # contains not directly required libraries we want to pin the version.
1 # contains not directly required libraries we want to pin the version.
2
2
3 atomicwrites==1.3.0
3 atomicwrites==1.3.0
4 attrs==19.3.0
4 attrs==19.3.0
5 asn1crypto==0.24.0
5 asn1crypto==0.24.0
6 billiard==3.6.1.0
6 billiard==3.6.1.0
7 cffi==1.12.3
7 cffi==1.12.3
8 chameleon==2.24
8 chameleon==2.24
9 configparser==4.0.2
9 configparser==4.0.2
10 contextlib2==0.6.0.post1
10 contextlib2==0.6.0.post1
11 ecdsa==0.13.2
11 ecdsa==0.13.2
12 gnureadline==6.3.8
12 gnureadline==6.3.8
13 hupper==1.10.2
13 hupper==1.10.2
14 ipaddress==1.0.23
14 ipaddress==1.0.23
15 importlib-metadata==1.6.0
15 importlib-metadata==1.6.0
16 jinja2==2.9.6
16 jinja2==2.9.6
17 jsonschema==2.6.0
17 jsonschema==2.6.0
18 pluggy==0.13.1
18 pluggy==0.13.1
19 pyasn1-modules==0.2.6
19 pyasn1-modules==0.2.6
20 pyramid-jinja2==2.7
20 pyramid-jinja2==2.7
21 pyramid-apispec==0.3.2
21 scandir==1.10.0
22 scandir==1.10.0
22 setproctitle==1.1.10
23 setproctitle==1.1.10
23 tempita==0.5.2
24 tempita==0.5.2
24 testpath==0.4.4
25 testpath==0.4.4
25 transaction==2.4.0
26 transaction==2.4.0
26 vine==1.3.0
27 vine==1.3.0
27 wcwidth==0.1.9
28 wcwidth==0.1.9 No newline at end of file
@@ -1,1 +1,1 b''
1 4.20.1 No newline at end of file
1 4.21.0 No newline at end of file
@@ -1,60 +1,60 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 from collections import OrderedDict
22 from collections import OrderedDict
23
23
24 import sys
24 import sys
25 import platform
25 import platform
26
26
27 VERSION = tuple(open(os.path.join(
27 VERSION = tuple(open(os.path.join(
28 os.path.dirname(__file__), 'VERSION')).read().split('.'))
28 os.path.dirname(__file__), 'VERSION')).read().split('.'))
29
29
30 BACKENDS = OrderedDict()
30 BACKENDS = OrderedDict()
31
31
32 BACKENDS['hg'] = 'Mercurial repository'
32 BACKENDS['hg'] = 'Mercurial repository'
33 BACKENDS['git'] = 'Git repository'
33 BACKENDS['git'] = 'Git repository'
34 BACKENDS['svn'] = 'Subversion repository'
34 BACKENDS['svn'] = 'Subversion repository'
35
35
36
36
37 CELERY_ENABLED = False
37 CELERY_ENABLED = False
38 CELERY_EAGER = False
38 CELERY_EAGER = False
39
39
40 # link to config for pyramid
40 # link to config for pyramid
41 CONFIG = {}
41 CONFIG = {}
42
42
43 # Populated with the settings dictionary from application init in
43 # Populated with the settings dictionary from application init in
44 # rhodecode.conf.environment.load_pyramid_environment
44 # rhodecode.conf.environment.load_pyramid_environment
45 PYRAMID_SETTINGS = {}
45 PYRAMID_SETTINGS = {}
46
46
47 # Linked module for extensions
47 # Linked module for extensions
48 EXTENSIONS = {}
48 EXTENSIONS = {}
49
49
50 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
50 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
51 __dbversion__ = 108 # defines current db version for migrations
51 __dbversion__ = 109 # defines current db version for migrations
52 __platform__ = platform.system()
52 __platform__ = platform.system()
53 __license__ = 'AGPLv3, and Commercial License'
53 __license__ = 'AGPLv3, and Commercial License'
54 __author__ = 'RhodeCode GmbH'
54 __author__ = 'RhodeCode GmbH'
55 __url__ = 'https://code.rhodecode.com'
55 __url__ = 'https://code.rhodecode.com'
56
56
57 is_windows = __platform__ in ['Windows']
57 is_windows = __platform__ in ['Windows']
58 is_unix = not is_windows
58 is_unix = not is_windows
59 is_test = False
59 is_test = False
60 disable_error_handler = False
60 disable_error_handler = False
@@ -1,453 +1,452 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 JSON RPC utils
22 JSON RPC utils
23 """
23 """
24
24
25 import collections
25 import collections
26 import logging
26 import logging
27
27
28 from rhodecode.api.exc import JSONRPCError
28 from rhodecode.api.exc import JSONRPCError
29 from rhodecode.lib.auth import (
29 from rhodecode.lib.auth import (
30 HasPermissionAnyApi, HasRepoPermissionAnyApi, HasRepoGroupPermissionAnyApi)
30 HasPermissionAnyApi, HasRepoPermissionAnyApi, HasRepoGroupPermissionAnyApi)
31 from rhodecode.lib.utils import safe_unicode
31 from rhodecode.lib.utils import safe_unicode
32 from rhodecode.lib.vcs.exceptions import RepositoryError
32 from rhodecode.lib.vcs.exceptions import RepositoryError
33 from rhodecode.lib.view_utils import get_commit_from_ref_name
33 from rhodecode.lib.view_utils import get_commit_from_ref_name
34 from rhodecode.lib.utils2 import str2bool
34 from rhodecode.lib.utils2 import str2bool
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class OAttr(object):
39 class OAttr(object):
40 """
40 """
41 Special Option that defines other attribute, and can default to them
41 Special Option that defines other attribute, and can default to them
42
42
43 Example::
43 Example::
44
44
45 def test(apiuser, userid=Optional(OAttr('apiuser')):
45 def test(apiuser, userid=Optional(OAttr('apiuser')):
46 user = Optional.extract(userid, evaluate_locals=local())
46 user = Optional.extract(userid, evaluate_locals=local())
47 #if we pass in userid, we get it, else it will default to apiuser
47 #if we pass in userid, we get it, else it will default to apiuser
48 #attribute
48 #attribute
49 """
49 """
50
50
51 def __init__(self, attr_name):
51 def __init__(self, attr_name):
52 self.attr_name = attr_name
52 self.attr_name = attr_name
53
53
54 def __repr__(self):
54 def __repr__(self):
55 return '<OptionalAttr:%s>' % self.attr_name
55 return '<OptionalAttr:%s>' % self.attr_name
56
56
57 def __call__(self):
57 def __call__(self):
58 return self
58 return self
59
59
60
60
61 class Optional(object):
61 class Optional(object):
62 """
62 """
63 Defines an optional parameter::
63 Defines an optional parameter::
64
64
65 param = param.getval() if isinstance(param, Optional) else param
65 param = param.getval() if isinstance(param, Optional) else param
66 param = param() if isinstance(param, Optional) else param
66 param = param() if isinstance(param, Optional) else param
67
67
68 is equivalent of::
68 is equivalent of::
69
69
70 param = Optional.extract(param)
70 param = Optional.extract(param)
71
71
72 """
72 """
73
73
74 def __init__(self, type_):
74 def __init__(self, type_):
75 self.type_ = type_
75 self.type_ = type_
76
76
77 def __repr__(self):
77 def __repr__(self):
78 return '<Optional:%s>' % self.type_.__repr__()
78 return '<Optional:%s>' % self.type_.__repr__()
79
79
80 def __call__(self):
80 def __call__(self):
81 return self.getval()
81 return self.getval()
82
82
83 def getval(self, evaluate_locals=None):
83 def getval(self, evaluate_locals=None):
84 """
84 """
85 returns value from this Optional instance
85 returns value from this Optional instance
86 """
86 """
87 if isinstance(self.type_, OAttr):
87 if isinstance(self.type_, OAttr):
88 param_name = self.type_.attr_name
88 param_name = self.type_.attr_name
89 if evaluate_locals:
89 if evaluate_locals:
90 return evaluate_locals[param_name]
90 return evaluate_locals[param_name]
91 # use params name
91 # use params name
92 return param_name
92 return param_name
93 return self.type_
93 return self.type_
94
94
95 @classmethod
95 @classmethod
96 def extract(cls, val, evaluate_locals=None, binary=None):
96 def extract(cls, val, evaluate_locals=None, binary=None):
97 """
97 """
98 Extracts value from Optional() instance
98 Extracts value from Optional() instance
99
99
100 :param val:
100 :param val:
101 :return: original value if it's not Optional instance else
101 :return: original value if it's not Optional instance else
102 value of instance
102 value of instance
103 """
103 """
104 if isinstance(val, cls):
104 if isinstance(val, cls):
105 val = val.getval(evaluate_locals)
105 val = val.getval(evaluate_locals)
106
106
107 if binary:
107 if binary:
108 val = str2bool(val)
108 val = str2bool(val)
109
109
110 return val
110 return val
111
111
112
112
113 def parse_args(cli_args, key_prefix=''):
113 def parse_args(cli_args, key_prefix=''):
114 from rhodecode.lib.utils2 import (escape_split)
114 from rhodecode.lib.utils2 import (escape_split)
115 kwargs = collections.defaultdict(dict)
115 kwargs = collections.defaultdict(dict)
116 for el in escape_split(cli_args, ','):
116 for el in escape_split(cli_args, ','):
117 kv = escape_split(el, '=', 1)
117 kv = escape_split(el, '=', 1)
118 if len(kv) == 2:
118 if len(kv) == 2:
119 k, v = kv
119 k, v = kv
120 kwargs[key_prefix + k] = v
120 kwargs[key_prefix + k] = v
121 return kwargs
121 return kwargs
122
122
123
123
124 def get_origin(obj):
124 def get_origin(obj):
125 """
125 """
126 Get origin of permission from object.
126 Get origin of permission from object.
127
127
128 :param obj:
128 :param obj:
129 """
129 """
130 origin = 'permission'
130 origin = 'permission'
131
131
132 if getattr(obj, 'owner_row', '') and getattr(obj, 'admin_row', ''):
132 if getattr(obj, 'owner_row', '') and getattr(obj, 'admin_row', ''):
133 # admin and owner case, maybe we should use dual string ?
133 # admin and owner case, maybe we should use dual string ?
134 origin = 'owner'
134 origin = 'owner'
135 elif getattr(obj, 'owner_row', ''):
135 elif getattr(obj, 'owner_row', ''):
136 origin = 'owner'
136 origin = 'owner'
137 elif getattr(obj, 'admin_row', ''):
137 elif getattr(obj, 'admin_row', ''):
138 origin = 'super-admin'
138 origin = 'super-admin'
139 return origin
139 return origin
140
140
141
141
142 def store_update(updates, attr, name):
142 def store_update(updates, attr, name):
143 """
143 """
144 Stores param in updates dict if it's not instance of Optional
144 Stores param in updates dict if it's not instance of Optional
145 allows easy updates of passed in params
145 allows easy updates of passed in params
146 """
146 """
147 if not isinstance(attr, Optional):
147 if not isinstance(attr, Optional):
148 updates[name] = attr
148 updates[name] = attr
149
149
150
150
151 def has_superadmin_permission(apiuser):
151 def has_superadmin_permission(apiuser):
152 """
152 """
153 Return True if apiuser is admin or return False
153 Return True if apiuser is admin or return False
154
154
155 :param apiuser:
155 :param apiuser:
156 """
156 """
157 if HasPermissionAnyApi('hg.admin')(user=apiuser):
157 if HasPermissionAnyApi('hg.admin')(user=apiuser):
158 return True
158 return True
159 return False
159 return False
160
160
161
161
162 def validate_repo_permissions(apiuser, repoid, repo, perms):
162 def validate_repo_permissions(apiuser, repoid, repo, perms):
163 """
163 """
164 Raise JsonRPCError if apiuser is not authorized or return True
164 Raise JsonRPCError if apiuser is not authorized or return True
165
165
166 :param apiuser:
166 :param apiuser:
167 :param repoid:
167 :param repoid:
168 :param repo:
168 :param repo:
169 :param perms:
169 :param perms:
170 """
170 """
171 if not HasRepoPermissionAnyApi(*perms)(
171 if not HasRepoPermissionAnyApi(*perms)(
172 user=apiuser, repo_name=repo.repo_name):
172 user=apiuser, repo_name=repo.repo_name):
173 raise JSONRPCError(
173 raise JSONRPCError('repository `%s` does not exist' % repoid)
174 'repository `%s` does not exist' % repoid)
175
174
176 return True
175 return True
177
176
178
177
179 def validate_repo_group_permissions(apiuser, repogroupid, repo_group, perms):
178 def validate_repo_group_permissions(apiuser, repogroupid, repo_group, perms):
180 """
179 """
181 Raise JsonRPCError if apiuser is not authorized or return True
180 Raise JsonRPCError if apiuser is not authorized or return True
182
181
183 :param apiuser:
182 :param apiuser:
184 :param repogroupid: just the id of repository group
183 :param repogroupid: just the id of repository group
185 :param repo_group: instance of repo_group
184 :param repo_group: instance of repo_group
186 :param perms:
185 :param perms:
187 """
186 """
188 if not HasRepoGroupPermissionAnyApi(*perms)(
187 if not HasRepoGroupPermissionAnyApi(*perms)(
189 user=apiuser, group_name=repo_group.group_name):
188 user=apiuser, group_name=repo_group.group_name):
190 raise JSONRPCError(
189 raise JSONRPCError(
191 'repository group `%s` does not exist' % repogroupid)
190 'repository group `%s` does not exist' % repogroupid)
192
191
193 return True
192 return True
194
193
195
194
196 def validate_set_owner_permissions(apiuser, owner):
195 def validate_set_owner_permissions(apiuser, owner):
197 if isinstance(owner, Optional):
196 if isinstance(owner, Optional):
198 owner = get_user_or_error(apiuser.user_id)
197 owner = get_user_or_error(apiuser.user_id)
199 else:
198 else:
200 if has_superadmin_permission(apiuser):
199 if has_superadmin_permission(apiuser):
201 owner = get_user_or_error(owner)
200 owner = get_user_or_error(owner)
202 else:
201 else:
203 # forbid setting owner for non-admins
202 # forbid setting owner for non-admins
204 raise JSONRPCError(
203 raise JSONRPCError(
205 'Only RhodeCode super-admin can specify `owner` param')
204 'Only RhodeCode super-admin can specify `owner` param')
206 return owner
205 return owner
207
206
208
207
209 def get_user_or_error(userid):
208 def get_user_or_error(userid):
210 """
209 """
211 Get user by id or name or return JsonRPCError if not found
210 Get user by id or name or return JsonRPCError if not found
212
211
213 :param userid:
212 :param userid:
214 """
213 """
215 from rhodecode.model.user import UserModel
214 from rhodecode.model.user import UserModel
216 user_model = UserModel()
215 user_model = UserModel()
217
216
218 if isinstance(userid, (int, long)):
217 if isinstance(userid, (int, long)):
219 try:
218 try:
220 user = user_model.get_user(userid)
219 user = user_model.get_user(userid)
221 except ValueError:
220 except ValueError:
222 user = None
221 user = None
223 else:
222 else:
224 user = user_model.get_by_username(userid)
223 user = user_model.get_by_username(userid)
225
224
226 if user is None:
225 if user is None:
227 raise JSONRPCError(
226 raise JSONRPCError(
228 'user `%s` does not exist' % (userid,))
227 'user `%s` does not exist' % (userid,))
229 return user
228 return user
230
229
231
230
232 def get_repo_or_error(repoid):
231 def get_repo_or_error(repoid):
233 """
232 """
234 Get repo by id or name or return JsonRPCError if not found
233 Get repo by id or name or return JsonRPCError if not found
235
234
236 :param repoid:
235 :param repoid:
237 """
236 """
238 from rhodecode.model.repo import RepoModel
237 from rhodecode.model.repo import RepoModel
239 repo_model = RepoModel()
238 repo_model = RepoModel()
240
239
241 if isinstance(repoid, (int, long)):
240 if isinstance(repoid, (int, long)):
242 try:
241 try:
243 repo = repo_model.get_repo(repoid)
242 repo = repo_model.get_repo(repoid)
244 except ValueError:
243 except ValueError:
245 repo = None
244 repo = None
246 else:
245 else:
247 repo = repo_model.get_by_repo_name(repoid)
246 repo = repo_model.get_by_repo_name(repoid)
248
247
249 if repo is None:
248 if repo is None:
250 raise JSONRPCError(
249 raise JSONRPCError(
251 'repository `%s` does not exist' % (repoid,))
250 'repository `%s` does not exist' % (repoid,))
252 return repo
251 return repo
253
252
254
253
255 def get_repo_group_or_error(repogroupid):
254 def get_repo_group_or_error(repogroupid):
256 """
255 """
257 Get repo group by id or name or return JsonRPCError if not found
256 Get repo group by id or name or return JsonRPCError if not found
258
257
259 :param repogroupid:
258 :param repogroupid:
260 """
259 """
261 from rhodecode.model.repo_group import RepoGroupModel
260 from rhodecode.model.repo_group import RepoGroupModel
262 repo_group_model = RepoGroupModel()
261 repo_group_model = RepoGroupModel()
263
262
264 if isinstance(repogroupid, (int, long)):
263 if isinstance(repogroupid, (int, long)):
265 try:
264 try:
266 repo_group = repo_group_model._get_repo_group(repogroupid)
265 repo_group = repo_group_model._get_repo_group(repogroupid)
267 except ValueError:
266 except ValueError:
268 repo_group = None
267 repo_group = None
269 else:
268 else:
270 repo_group = repo_group_model.get_by_group_name(repogroupid)
269 repo_group = repo_group_model.get_by_group_name(repogroupid)
271
270
272 if repo_group is None:
271 if repo_group is None:
273 raise JSONRPCError(
272 raise JSONRPCError(
274 'repository group `%s` does not exist' % (repogroupid,))
273 'repository group `%s` does not exist' % (repogroupid,))
275 return repo_group
274 return repo_group
276
275
277
276
278 def get_user_group_or_error(usergroupid):
277 def get_user_group_or_error(usergroupid):
279 """
278 """
280 Get user group by id or name or return JsonRPCError if not found
279 Get user group by id or name or return JsonRPCError if not found
281
280
282 :param usergroupid:
281 :param usergroupid:
283 """
282 """
284 from rhodecode.model.user_group import UserGroupModel
283 from rhodecode.model.user_group import UserGroupModel
285 user_group_model = UserGroupModel()
284 user_group_model = UserGroupModel()
286
285
287 if isinstance(usergroupid, (int, long)):
286 if isinstance(usergroupid, (int, long)):
288 try:
287 try:
289 user_group = user_group_model.get_group(usergroupid)
288 user_group = user_group_model.get_group(usergroupid)
290 except ValueError:
289 except ValueError:
291 user_group = None
290 user_group = None
292 else:
291 else:
293 user_group = user_group_model.get_by_name(usergroupid)
292 user_group = user_group_model.get_by_name(usergroupid)
294
293
295 if user_group is None:
294 if user_group is None:
296 raise JSONRPCError(
295 raise JSONRPCError(
297 'user group `%s` does not exist' % (usergroupid,))
296 'user group `%s` does not exist' % (usergroupid,))
298 return user_group
297 return user_group
299
298
300
299
301 def get_perm_or_error(permid, prefix=None):
300 def get_perm_or_error(permid, prefix=None):
302 """
301 """
303 Get permission by id or name or return JsonRPCError if not found
302 Get permission by id or name or return JsonRPCError if not found
304
303
305 :param permid:
304 :param permid:
306 """
305 """
307 from rhodecode.model.permission import PermissionModel
306 from rhodecode.model.permission import PermissionModel
308
307
309 perm = PermissionModel.cls.get_by_key(permid)
308 perm = PermissionModel.cls.get_by_key(permid)
310 if perm is None:
309 if perm is None:
311 msg = 'permission `{}` does not exist.'.format(permid)
310 msg = 'permission `{}` does not exist.'.format(permid)
312 if prefix:
311 if prefix:
313 msg += ' Permission should start with prefix: `{}`'.format(prefix)
312 msg += ' Permission should start with prefix: `{}`'.format(prefix)
314 raise JSONRPCError(msg)
313 raise JSONRPCError(msg)
315
314
316 if prefix:
315 if prefix:
317 if not perm.permission_name.startswith(prefix):
316 if not perm.permission_name.startswith(prefix):
318 raise JSONRPCError('permission `%s` is invalid, '
317 raise JSONRPCError('permission `%s` is invalid, '
319 'should start with %s' % (permid, prefix))
318 'should start with %s' % (permid, prefix))
320 return perm
319 return perm
321
320
322
321
323 def get_gist_or_error(gistid):
322 def get_gist_or_error(gistid):
324 """
323 """
325 Get gist by id or gist_access_id or return JsonRPCError if not found
324 Get gist by id or gist_access_id or return JsonRPCError if not found
326
325
327 :param gistid:
326 :param gistid:
328 """
327 """
329 from rhodecode.model.gist import GistModel
328 from rhodecode.model.gist import GistModel
330
329
331 gist = GistModel.cls.get_by_access_id(gistid)
330 gist = GistModel.cls.get_by_access_id(gistid)
332 if gist is None:
331 if gist is None:
333 raise JSONRPCError('gist `%s` does not exist' % (gistid,))
332 raise JSONRPCError('gist `%s` does not exist' % (gistid,))
334 return gist
333 return gist
335
334
336
335
337 def get_pull_request_or_error(pullrequestid):
336 def get_pull_request_or_error(pullrequestid):
338 """
337 """
339 Get pull request by id or return JsonRPCError if not found
338 Get pull request by id or return JsonRPCError if not found
340
339
341 :param pullrequestid:
340 :param pullrequestid:
342 """
341 """
343 from rhodecode.model.pull_request import PullRequestModel
342 from rhodecode.model.pull_request import PullRequestModel
344
343
345 try:
344 try:
346 pull_request = PullRequestModel().get(int(pullrequestid))
345 pull_request = PullRequestModel().get(int(pullrequestid))
347 except ValueError:
346 except ValueError:
348 raise JSONRPCError('pullrequestid must be an integer')
347 raise JSONRPCError('pullrequestid must be an integer')
349 if not pull_request:
348 if not pull_request:
350 raise JSONRPCError('pull request `%s` does not exist' % (
349 raise JSONRPCError('pull request `%s` does not exist' % (
351 pullrequestid,))
350 pullrequestid,))
352 return pull_request
351 return pull_request
353
352
354
353
355 def build_commit_data(commit, detail_level):
354 def build_commit_data(commit, detail_level):
356 parsed_diff = []
355 parsed_diff = []
357 if detail_level == 'extended':
356 if detail_level == 'extended':
358 for f_path in commit.added_paths:
357 for f_path in commit.added_paths:
359 parsed_diff.append(_get_commit_dict(filename=f_path, op='A'))
358 parsed_diff.append(_get_commit_dict(filename=f_path, op='A'))
360 for f_path in commit.changed_paths:
359 for f_path in commit.changed_paths:
361 parsed_diff.append(_get_commit_dict(filename=f_path, op='M'))
360 parsed_diff.append(_get_commit_dict(filename=f_path, op='M'))
362 for f_path in commit.removed_paths:
361 for f_path in commit.removed_paths:
363 parsed_diff.append(_get_commit_dict(filename=f_path, op='D'))
362 parsed_diff.append(_get_commit_dict(filename=f_path, op='D'))
364
363
365 elif detail_level == 'full':
364 elif detail_level == 'full':
366 from rhodecode.lib.diffs import DiffProcessor
365 from rhodecode.lib.diffs import DiffProcessor
367 diff_processor = DiffProcessor(commit.diff())
366 diff_processor = DiffProcessor(commit.diff())
368 for dp in diff_processor.prepare():
367 for dp in diff_processor.prepare():
369 del dp['stats']['ops']
368 del dp['stats']['ops']
370 _stats = dp['stats']
369 _stats = dp['stats']
371 parsed_diff.append(_get_commit_dict(
370 parsed_diff.append(_get_commit_dict(
372 filename=dp['filename'], op=dp['operation'],
371 filename=dp['filename'], op=dp['operation'],
373 new_revision=dp['new_revision'],
372 new_revision=dp['new_revision'],
374 old_revision=dp['old_revision'],
373 old_revision=dp['old_revision'],
375 raw_diff=dp['raw_diff'], stats=_stats))
374 raw_diff=dp['raw_diff'], stats=_stats))
376
375
377 return parsed_diff
376 return parsed_diff
378
377
379
378
380 def get_commit_or_error(ref, repo):
379 def get_commit_or_error(ref, repo):
381 try:
380 try:
382 ref_type, _, ref_hash = ref.split(':')
381 ref_type, _, ref_hash = ref.split(':')
383 except ValueError:
382 except ValueError:
384 raise JSONRPCError(
383 raise JSONRPCError(
385 'Ref `{ref}` given in a wrong format. Please check the API'
384 'Ref `{ref}` given in a wrong format. Please check the API'
386 ' documentation for more details'.format(ref=ref))
385 ' documentation for more details'.format(ref=ref))
387 try:
386 try:
388 # TODO: dan: refactor this to use repo.scm_instance().get_commit()
387 # TODO: dan: refactor this to use repo.scm_instance().get_commit()
389 # once get_commit supports ref_types
388 # once get_commit supports ref_types
390 return get_commit_from_ref_name(repo, ref_hash)
389 return get_commit_from_ref_name(repo, ref_hash)
391 except RepositoryError:
390 except RepositoryError:
392 raise JSONRPCError('Ref `{ref}` does not exist'.format(ref=ref))
391 raise JSONRPCError('Ref `{ref}` does not exist'.format(ref=ref))
393
392
394
393
395 def _get_ref_hash(repo, type_, name):
394 def _get_ref_hash(repo, type_, name):
396 vcs_repo = repo.scm_instance()
395 vcs_repo = repo.scm_instance()
397 if type_ in ['branch'] and vcs_repo.alias in ('hg', 'git'):
396 if type_ in ['branch'] and vcs_repo.alias in ('hg', 'git'):
398 return vcs_repo.branches[name]
397 return vcs_repo.branches[name]
399 elif type_ in ['bookmark', 'book'] and vcs_repo.alias == 'hg':
398 elif type_ in ['bookmark', 'book'] and vcs_repo.alias == 'hg':
400 return vcs_repo.bookmarks[name]
399 return vcs_repo.bookmarks[name]
401 else:
400 else:
402 raise ValueError()
401 raise ValueError()
403
402
404
403
405 def resolve_ref_or_error(ref, repo, allowed_ref_types=None):
404 def resolve_ref_or_error(ref, repo, allowed_ref_types=None):
406 allowed_ref_types = allowed_ref_types or ['bookmark', 'book', 'tag', 'branch']
405 allowed_ref_types = allowed_ref_types or ['bookmark', 'book', 'tag', 'branch']
407
406
408 def _parse_ref(type_, name, hash_=None):
407 def _parse_ref(type_, name, hash_=None):
409 return type_, name, hash_
408 return type_, name, hash_
410
409
411 try:
410 try:
412 ref_type, ref_name, ref_hash = _parse_ref(*ref.split(':'))
411 ref_type, ref_name, ref_hash = _parse_ref(*ref.split(':'))
413 except TypeError:
412 except TypeError:
414 raise JSONRPCError(
413 raise JSONRPCError(
415 'Ref `{ref}` given in a wrong format. Please check the API'
414 'Ref `{ref}` given in a wrong format. Please check the API'
416 ' documentation for more details'.format(ref=ref))
415 ' documentation for more details'.format(ref=ref))
417
416
418 if ref_type not in allowed_ref_types:
417 if ref_type not in allowed_ref_types:
419 raise JSONRPCError(
418 raise JSONRPCError(
420 'Ref `{ref}` type is not allowed. '
419 'Ref `{ref}` type is not allowed. '
421 'Only:{allowed_refs} are possible.'.format(
420 'Only:{allowed_refs} are possible.'.format(
422 ref=ref, allowed_refs=allowed_ref_types))
421 ref=ref, allowed_refs=allowed_ref_types))
423
422
424 try:
423 try:
425 ref_hash = ref_hash or _get_ref_hash(repo, ref_type, ref_name)
424 ref_hash = ref_hash or _get_ref_hash(repo, ref_type, ref_name)
426 except (KeyError, ValueError):
425 except (KeyError, ValueError):
427 raise JSONRPCError(
426 raise JSONRPCError(
428 'The specified value:{type}:`{name}` does not exist, or is not allowed.'.format(
427 'The specified value:{type}:`{name}` does not exist, or is not allowed.'.format(
429 type=ref_type, name=ref_name))
428 type=ref_type, name=ref_name))
430
429
431 return ':'.join([ref_type, ref_name, ref_hash])
430 return ':'.join([ref_type, ref_name, ref_hash])
432
431
433
432
434 def _get_commit_dict(
433 def _get_commit_dict(
435 filename, op, new_revision=None, old_revision=None,
434 filename, op, new_revision=None, old_revision=None,
436 raw_diff=None, stats=None):
435 raw_diff=None, stats=None):
437 if stats is None:
436 if stats is None:
438 stats = {
437 stats = {
439 "added": None,
438 "added": None,
440 "binary": None,
439 "binary": None,
441 "deleted": None
440 "deleted": None
442 }
441 }
443 return {
442 return {
444 "filename": safe_unicode(filename),
443 "filename": safe_unicode(filename),
445 "op": op,
444 "op": op,
446
445
447 # extra details
446 # extra details
448 "new_revision": new_revision,
447 "new_revision": new_revision,
449 "old_revision": old_revision,
448 "old_revision": old_revision,
450
449
451 "raw_diff": raw_diff,
450 "raw_diff": raw_diff,
452 "stats": stats
451 "stats": stats
453 }
452 }
@@ -1,2506 +1,2507 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import time
22 import time
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.api import (
25 from rhodecode.api import (
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 from rhodecode.api.utils import (
27 from rhodecode.api.utils import (
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 validate_set_owner_permissions)
31 validate_set_owner_permissions)
32 from rhodecode.lib import audit_logger, rc_cache
32 from rhodecode.lib import audit_logger, rc_cache
33 from rhodecode.lib import repo_maintenance
33 from rhodecode.lib import repo_maintenance
34 from rhodecode.lib.auth import (
34 from rhodecode.lib.auth import (
35 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
35 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
36 HasRepoPermissionAnyApi)
36 HasRepoPermissionAnyApi)
37 from rhodecode.lib.celerylib.utils import get_task_id
37 from rhodecode.lib.celerylib.utils import get_task_id
38 from rhodecode.lib.utils2 import (
38 from rhodecode.lib.utils2 import (
39 str2bool, time_to_datetime, safe_str, safe_int, safe_unicode)
39 str2bool, time_to_datetime, safe_str, safe_int, safe_unicode)
40 from rhodecode.lib.ext_json import json
40 from rhodecode.lib.ext_json import json
41 from rhodecode.lib.exceptions import (
41 from rhodecode.lib.exceptions import (
42 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
42 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
43 from rhodecode.lib.vcs import RepositoryError
43 from rhodecode.lib.vcs import RepositoryError
44 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
44 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (
47 from rhodecode.model.db import (
48 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
48 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
49 ChangesetComment)
49 ChangesetComment)
50 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.permission import PermissionModel
51 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.pull_request import PullRequestModel
52 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo import RepoModel
53 from rhodecode.model.scm import ScmModel, RepoList
53 from rhodecode.model.scm import ScmModel, RepoList
54 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
54 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
55 from rhodecode.model import validation_schema
55 from rhodecode.model import validation_schema
56 from rhodecode.model.validation_schema.schemas import repo_schema
56 from rhodecode.model.validation_schema.schemas import repo_schema
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60
60
61 @jsonrpc_method()
61 @jsonrpc_method()
62 def get_repo(request, apiuser, repoid, cache=Optional(True)):
62 def get_repo(request, apiuser, repoid, cache=Optional(True)):
63 """
63 """
64 Gets an existing repository by its name or repository_id.
64 Gets an existing repository by its name or repository_id.
65
65
66 The members section so the output returns users groups or users
66 The members section so the output returns users groups or users
67 associated with that repository.
67 associated with that repository.
68
68
69 This command can only be run using an |authtoken| with admin rights,
69 This command can only be run using an |authtoken| with admin rights,
70 or users with at least read rights to the |repo|.
70 or users with at least read rights to the |repo|.
71
71
72 :param apiuser: This is filled automatically from the |authtoken|.
72 :param apiuser: This is filled automatically from the |authtoken|.
73 :type apiuser: AuthUser
73 :type apiuser: AuthUser
74 :param repoid: The repository name or repository id.
74 :param repoid: The repository name or repository id.
75 :type repoid: str or int
75 :type repoid: str or int
76 :param cache: use the cached value for last changeset
76 :param cache: use the cached value for last changeset
77 :type: cache: Optional(bool)
77 :type: cache: Optional(bool)
78
78
79 Example output:
79 Example output:
80
80
81 .. code-block:: bash
81 .. code-block:: bash
82
82
83 {
83 {
84 "error": null,
84 "error": null,
85 "id": <repo_id>,
85 "id": <repo_id>,
86 "result": {
86 "result": {
87 "clone_uri": null,
87 "clone_uri": null,
88 "created_on": "timestamp",
88 "created_on": "timestamp",
89 "description": "repo description",
89 "description": "repo description",
90 "enable_downloads": false,
90 "enable_downloads": false,
91 "enable_locking": false,
91 "enable_locking": false,
92 "enable_statistics": false,
92 "enable_statistics": false,
93 "followers": [
93 "followers": [
94 {
94 {
95 "active": true,
95 "active": true,
96 "admin": false,
96 "admin": false,
97 "api_key": "****************************************",
97 "api_key": "****************************************",
98 "api_keys": [
98 "api_keys": [
99 "****************************************"
99 "****************************************"
100 ],
100 ],
101 "email": "user@example.com",
101 "email": "user@example.com",
102 "emails": [
102 "emails": [
103 "user@example.com"
103 "user@example.com"
104 ],
104 ],
105 "extern_name": "rhodecode",
105 "extern_name": "rhodecode",
106 "extern_type": "rhodecode",
106 "extern_type": "rhodecode",
107 "firstname": "username",
107 "firstname": "username",
108 "ip_addresses": [],
108 "ip_addresses": [],
109 "language": null,
109 "language": null,
110 "last_login": "2015-09-16T17:16:35.854",
110 "last_login": "2015-09-16T17:16:35.854",
111 "lastname": "surname",
111 "lastname": "surname",
112 "user_id": <user_id>,
112 "user_id": <user_id>,
113 "username": "name"
113 "username": "name"
114 }
114 }
115 ],
115 ],
116 "fork_of": "parent-repo",
116 "fork_of": "parent-repo",
117 "landing_rev": [
117 "landing_rev": [
118 "rev",
118 "rev",
119 "tip"
119 "tip"
120 ],
120 ],
121 "last_changeset": {
121 "last_changeset": {
122 "author": "User <user@example.com>",
122 "author": "User <user@example.com>",
123 "branch": "default",
123 "branch": "default",
124 "date": "timestamp",
124 "date": "timestamp",
125 "message": "last commit message",
125 "message": "last commit message",
126 "parents": [
126 "parents": [
127 {
127 {
128 "raw_id": "commit-id"
128 "raw_id": "commit-id"
129 }
129 }
130 ],
130 ],
131 "raw_id": "commit-id",
131 "raw_id": "commit-id",
132 "revision": <revision number>,
132 "revision": <revision number>,
133 "short_id": "short id"
133 "short_id": "short id"
134 },
134 },
135 "lock_reason": null,
135 "lock_reason": null,
136 "locked_by": null,
136 "locked_by": null,
137 "locked_date": null,
137 "locked_date": null,
138 "owner": "owner-name",
138 "owner": "owner-name",
139 "permissions": [
139 "permissions": [
140 {
140 {
141 "name": "super-admin-name",
141 "name": "super-admin-name",
142 "origin": "super-admin",
142 "origin": "super-admin",
143 "permission": "repository.admin",
143 "permission": "repository.admin",
144 "type": "user"
144 "type": "user"
145 },
145 },
146 {
146 {
147 "name": "owner-name",
147 "name": "owner-name",
148 "origin": "owner",
148 "origin": "owner",
149 "permission": "repository.admin",
149 "permission": "repository.admin",
150 "type": "user"
150 "type": "user"
151 },
151 },
152 {
152 {
153 "name": "user-group-name",
153 "name": "user-group-name",
154 "origin": "permission",
154 "origin": "permission",
155 "permission": "repository.write",
155 "permission": "repository.write",
156 "type": "user_group"
156 "type": "user_group"
157 }
157 }
158 ],
158 ],
159 "private": true,
159 "private": true,
160 "repo_id": 676,
160 "repo_id": 676,
161 "repo_name": "user-group/repo-name",
161 "repo_name": "user-group/repo-name",
162 "repo_type": "hg"
162 "repo_type": "hg"
163 }
163 }
164 }
164 }
165 """
165 """
166
166
167 repo = get_repo_or_error(repoid)
167 repo = get_repo_or_error(repoid)
168 cache = Optional.extract(cache)
168 cache = Optional.extract(cache)
169
169
170 include_secrets = False
170 include_secrets = False
171 if has_superadmin_permission(apiuser):
171 if has_superadmin_permission(apiuser):
172 include_secrets = True
172 include_secrets = True
173 else:
173 else:
174 # check if we have at least read permission for this repo !
174 # check if we have at least read permission for this repo !
175 _perms = (
175 _perms = (
176 'repository.admin', 'repository.write', 'repository.read',)
176 'repository.admin', 'repository.write', 'repository.read',)
177 validate_repo_permissions(apiuser, repoid, repo, _perms)
177 validate_repo_permissions(apiuser, repoid, repo, _perms)
178
178
179 permissions = []
179 permissions = []
180 for _user in repo.permissions():
180 for _user in repo.permissions():
181 user_data = {
181 user_data = {
182 'name': _user.username,
182 'name': _user.username,
183 'permission': _user.permission,
183 'permission': _user.permission,
184 'origin': get_origin(_user),
184 'origin': get_origin(_user),
185 'type': "user",
185 'type': "user",
186 }
186 }
187 permissions.append(user_data)
187 permissions.append(user_data)
188
188
189 for _user_group in repo.permission_user_groups():
189 for _user_group in repo.permission_user_groups():
190 user_group_data = {
190 user_group_data = {
191 'name': _user_group.users_group_name,
191 'name': _user_group.users_group_name,
192 'permission': _user_group.permission,
192 'permission': _user_group.permission,
193 'origin': get_origin(_user_group),
193 'origin': get_origin(_user_group),
194 'type': "user_group",
194 'type': "user_group",
195 }
195 }
196 permissions.append(user_group_data)
196 permissions.append(user_group_data)
197
197
198 following_users = [
198 following_users = [
199 user.user.get_api_data(include_secrets=include_secrets)
199 user.user.get_api_data(include_secrets=include_secrets)
200 for user in repo.followers]
200 for user in repo.followers]
201
201
202 if not cache:
202 if not cache:
203 repo.update_commit_cache()
203 repo.update_commit_cache()
204 data = repo.get_api_data(include_secrets=include_secrets)
204 data = repo.get_api_data(include_secrets=include_secrets)
205 data['permissions'] = permissions
205 data['permissions'] = permissions
206 data['followers'] = following_users
206 data['followers'] = following_users
207 return data
207 return data
208
208
209
209
210 @jsonrpc_method()
210 @jsonrpc_method()
211 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
211 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
212 """
212 """
213 Lists all existing repositories.
213 Lists all existing repositories.
214
214
215 This command can only be run using an |authtoken| with admin rights,
215 This command can only be run using an |authtoken| with admin rights,
216 or users with at least read rights to |repos|.
216 or users with at least read rights to |repos|.
217
217
218 :param apiuser: This is filled automatically from the |authtoken|.
218 :param apiuser: This is filled automatically from the |authtoken|.
219 :type apiuser: AuthUser
219 :type apiuser: AuthUser
220 :param root: specify root repository group to fetch repositories.
220 :param root: specify root repository group to fetch repositories.
221 filters the returned repositories to be members of given root group.
221 filters the returned repositories to be members of given root group.
222 :type root: Optional(None)
222 :type root: Optional(None)
223 :param traverse: traverse given root into subrepositories. With this flag
223 :param traverse: traverse given root into subrepositories. With this flag
224 set to False, it will only return top-level repositories from `root`.
224 set to False, it will only return top-level repositories from `root`.
225 if root is empty it will return just top-level repositories.
225 if root is empty it will return just top-level repositories.
226 :type traverse: Optional(True)
226 :type traverse: Optional(True)
227
227
228
228
229 Example output:
229 Example output:
230
230
231 .. code-block:: bash
231 .. code-block:: bash
232
232
233 id : <id_given_in_input>
233 id : <id_given_in_input>
234 result: [
234 result: [
235 {
235 {
236 "repo_id" : "<repo_id>",
236 "repo_id" : "<repo_id>",
237 "repo_name" : "<reponame>"
237 "repo_name" : "<reponame>"
238 "repo_type" : "<repo_type>",
238 "repo_type" : "<repo_type>",
239 "clone_uri" : "<clone_uri>",
239 "clone_uri" : "<clone_uri>",
240 "private": : "<bool>",
240 "private": : "<bool>",
241 "created_on" : "<datetimecreated>",
241 "created_on" : "<datetimecreated>",
242 "description" : "<description>",
242 "description" : "<description>",
243 "landing_rev": "<landing_rev>",
243 "landing_rev": "<landing_rev>",
244 "owner": "<repo_owner>",
244 "owner": "<repo_owner>",
245 "fork_of": "<name_of_fork_parent>",
245 "fork_of": "<name_of_fork_parent>",
246 "enable_downloads": "<bool>",
246 "enable_downloads": "<bool>",
247 "enable_locking": "<bool>",
247 "enable_locking": "<bool>",
248 "enable_statistics": "<bool>",
248 "enable_statistics": "<bool>",
249 },
249 },
250 ...
250 ...
251 ]
251 ]
252 error: null
252 error: null
253 """
253 """
254
254
255 include_secrets = has_superadmin_permission(apiuser)
255 include_secrets = has_superadmin_permission(apiuser)
256 _perms = ('repository.read', 'repository.write', 'repository.admin',)
256 _perms = ('repository.read', 'repository.write', 'repository.admin',)
257 extras = {'user': apiuser}
257 extras = {'user': apiuser}
258
258
259 root = Optional.extract(root)
259 root = Optional.extract(root)
260 traverse = Optional.extract(traverse, binary=True)
260 traverse = Optional.extract(traverse, binary=True)
261
261
262 if root:
262 if root:
263 # verify parent existance, if it's empty return an error
263 # verify parent existance, if it's empty return an error
264 parent = RepoGroup.get_by_group_name(root)
264 parent = RepoGroup.get_by_group_name(root)
265 if not parent:
265 if not parent:
266 raise JSONRPCError(
266 raise JSONRPCError(
267 'Root repository group `{}` does not exist'.format(root))
267 'Root repository group `{}` does not exist'.format(root))
268
268
269 if traverse:
269 if traverse:
270 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
270 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
271 else:
271 else:
272 repos = RepoModel().get_repos_for_root(root=parent)
272 repos = RepoModel().get_repos_for_root(root=parent)
273 else:
273 else:
274 if traverse:
274 if traverse:
275 repos = RepoModel().get_all()
275 repos = RepoModel().get_all()
276 else:
276 else:
277 # return just top-level
277 # return just top-level
278 repos = RepoModel().get_repos_for_root(root=None)
278 repos = RepoModel().get_repos_for_root(root=None)
279
279
280 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
280 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
281 return [repo.get_api_data(include_secrets=include_secrets)
281 return [repo.get_api_data(include_secrets=include_secrets)
282 for repo in repo_list]
282 for repo in repo_list]
283
283
284
284
285 @jsonrpc_method()
285 @jsonrpc_method()
286 def get_repo_changeset(request, apiuser, repoid, revision,
286 def get_repo_changeset(request, apiuser, repoid, revision,
287 details=Optional('basic')):
287 details=Optional('basic')):
288 """
288 """
289 Returns information about a changeset.
289 Returns information about a changeset.
290
290
291 Additionally parameters define the amount of details returned by
291 Additionally parameters define the amount of details returned by
292 this function.
292 this function.
293
293
294 This command can only be run using an |authtoken| with admin rights,
294 This command can only be run using an |authtoken| with admin rights,
295 or users with at least read rights to the |repo|.
295 or users with at least read rights to the |repo|.
296
296
297 :param apiuser: This is filled automatically from the |authtoken|.
297 :param apiuser: This is filled automatically from the |authtoken|.
298 :type apiuser: AuthUser
298 :type apiuser: AuthUser
299 :param repoid: The repository name or repository id
299 :param repoid: The repository name or repository id
300 :type repoid: str or int
300 :type repoid: str or int
301 :param revision: revision for which listing should be done
301 :param revision: revision for which listing should be done
302 :type revision: str
302 :type revision: str
303 :param details: details can be 'basic|extended|full' full gives diff
303 :param details: details can be 'basic|extended|full' full gives diff
304 info details like the diff itself, and number of changed files etc.
304 info details like the diff itself, and number of changed files etc.
305 :type details: Optional(str)
305 :type details: Optional(str)
306
306
307 """
307 """
308 repo = get_repo_or_error(repoid)
308 repo = get_repo_or_error(repoid)
309 if not has_superadmin_permission(apiuser):
309 if not has_superadmin_permission(apiuser):
310 _perms = (
310 _perms = ('repository.admin', 'repository.write', 'repository.read',)
311 'repository.admin', 'repository.write', 'repository.read',)
312 validate_repo_permissions(apiuser, repoid, repo, _perms)
311 validate_repo_permissions(apiuser, repoid, repo, _perms)
313
312
314 changes_details = Optional.extract(details)
313 changes_details = Optional.extract(details)
315 _changes_details_types = ['basic', 'extended', 'full']
314 _changes_details_types = ['basic', 'extended', 'full']
316 if changes_details not in _changes_details_types:
315 if changes_details not in _changes_details_types:
317 raise JSONRPCError(
316 raise JSONRPCError(
318 'ret_type must be one of %s' % (
317 'ret_type must be one of %s' % (
319 ','.join(_changes_details_types)))
318 ','.join(_changes_details_types)))
320
319
321 pre_load = ['author', 'branch', 'date', 'message', 'parents',
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
322 'status', '_commit', '_file_paths']
321 'status', '_commit', '_file_paths']
323
322
324 try:
323 try:
325 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
324 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
326 except TypeError as e:
325 except TypeError as e:
327 raise JSONRPCError(safe_str(e))
326 raise JSONRPCError(safe_str(e))
328 _cs_json = cs.__json__()
327 _cs_json = cs.__json__()
329 _cs_json['diff'] = build_commit_data(cs, changes_details)
328 _cs_json['diff'] = build_commit_data(cs, changes_details)
330 if changes_details == 'full':
329 if changes_details == 'full':
331 _cs_json['refs'] = cs._get_refs()
330 _cs_json['refs'] = cs._get_refs()
332 return _cs_json
331 return _cs_json
333
332
334
333
335 @jsonrpc_method()
334 @jsonrpc_method()
336 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
337 details=Optional('basic')):
336 details=Optional('basic')):
338 """
337 """
339 Returns a set of commits limited by the number starting
338 Returns a set of commits limited by the number starting
340 from the `start_rev` option.
339 from the `start_rev` option.
341
340
342 Additional parameters define the amount of details returned by this
341 Additional parameters define the amount of details returned by this
343 function.
342 function.
344
343
345 This command can only be run using an |authtoken| with admin rights,
344 This command can only be run using an |authtoken| with admin rights,
346 or users with at least read rights to |repos|.
345 or users with at least read rights to |repos|.
347
346
348 :param apiuser: This is filled automatically from the |authtoken|.
347 :param apiuser: This is filled automatically from the |authtoken|.
349 :type apiuser: AuthUser
348 :type apiuser: AuthUser
350 :param repoid: The repository name or repository ID.
349 :param repoid: The repository name or repository ID.
351 :type repoid: str or int
350 :type repoid: str or int
352 :param start_rev: The starting revision from where to get changesets.
351 :param start_rev: The starting revision from where to get changesets.
353 :type start_rev: str
352 :type start_rev: str
354 :param limit: Limit the number of commits to this amount
353 :param limit: Limit the number of commits to this amount
355 :type limit: str or int
354 :type limit: str or int
356 :param details: Set the level of detail returned. Valid option are:
355 :param details: Set the level of detail returned. Valid option are:
357 ``basic``, ``extended`` and ``full``.
356 ``basic``, ``extended`` and ``full``.
358 :type details: Optional(str)
357 :type details: Optional(str)
359
358
360 .. note::
359 .. note::
361
360
362 Setting the parameter `details` to the value ``full`` is extensive
361 Setting the parameter `details` to the value ``full`` is extensive
363 and returns details like the diff itself, and the number
362 and returns details like the diff itself, and the number
364 of changed files.
363 of changed files.
365
364
366 """
365 """
367 repo = get_repo_or_error(repoid)
366 repo = get_repo_or_error(repoid)
368 if not has_superadmin_permission(apiuser):
367 if not has_superadmin_permission(apiuser):
369 _perms = (
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
370 'repository.admin', 'repository.write', 'repository.read',)
371 validate_repo_permissions(apiuser, repoid, repo, _perms)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
372
370
373 changes_details = Optional.extract(details)
371 changes_details = Optional.extract(details)
374 _changes_details_types = ['basic', 'extended', 'full']
372 _changes_details_types = ['basic', 'extended', 'full']
375 if changes_details not in _changes_details_types:
373 if changes_details not in _changes_details_types:
376 raise JSONRPCError(
374 raise JSONRPCError(
377 'ret_type must be one of %s' % (
375 'ret_type must be one of %s' % (
378 ','.join(_changes_details_types)))
376 ','.join(_changes_details_types)))
379
377
380 limit = int(limit)
378 limit = int(limit)
381 pre_load = ['author', 'branch', 'date', 'message', 'parents',
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
382 'status', '_commit', '_file_paths']
380 'status', '_commit', '_file_paths']
383
381
384 vcs_repo = repo.scm_instance()
382 vcs_repo = repo.scm_instance()
385 # SVN needs a special case to distinguish its index and commit id
383 # SVN needs a special case to distinguish its index and commit id
386 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
387 start_rev = vcs_repo.commit_ids[0]
385 start_rev = vcs_repo.commit_ids[0]
388
386
389 try:
387 try:
390 commits = vcs_repo.get_commits(
388 commits = vcs_repo.get_commits(
391 start_id=start_rev, pre_load=pre_load, translate_tags=False)
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
392 except TypeError as e:
390 except TypeError as e:
393 raise JSONRPCError(safe_str(e))
391 raise JSONRPCError(safe_str(e))
394 except Exception:
392 except Exception:
395 log.exception('Fetching of commits failed')
393 log.exception('Fetching of commits failed')
396 raise JSONRPCError('Error occurred during commit fetching')
394 raise JSONRPCError('Error occurred during commit fetching')
397
395
398 ret = []
396 ret = []
399 for cnt, commit in enumerate(commits):
397 for cnt, commit in enumerate(commits):
400 if cnt >= limit != -1:
398 if cnt >= limit != -1:
401 break
399 break
402 _cs_json = commit.__json__()
400 _cs_json = commit.__json__()
403 _cs_json['diff'] = build_commit_data(commit, changes_details)
401 _cs_json['diff'] = build_commit_data(commit, changes_details)
404 if changes_details == 'full':
402 if changes_details == 'full':
405 _cs_json['refs'] = {
403 _cs_json['refs'] = {
406 'branches': [commit.branch],
404 'branches': [commit.branch],
407 'bookmarks': getattr(commit, 'bookmarks', []),
405 'bookmarks': getattr(commit, 'bookmarks', []),
408 'tags': commit.tags
406 'tags': commit.tags
409 }
407 }
410 ret.append(_cs_json)
408 ret.append(_cs_json)
411 return ret
409 return ret
412
410
413
411
414 @jsonrpc_method()
412 @jsonrpc_method()
415 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
416 ret_type=Optional('all'), details=Optional('basic'),
414 ret_type=Optional('all'), details=Optional('basic'),
417 max_file_bytes=Optional(None)):
415 max_file_bytes=Optional(None)):
418 """
416 """
419 Returns a list of nodes and children in a flat list for a given
417 Returns a list of nodes and children in a flat list for a given
420 path at given revision.
418 path at given revision.
421
419
422 It's possible to specify ret_type to show only `files` or `dirs`.
420 It's possible to specify ret_type to show only `files` or `dirs`.
423
421
424 This command can only be run using an |authtoken| with admin rights,
422 This command can only be run using an |authtoken| with admin rights,
425 or users with at least read rights to |repos|.
423 or users with at least read rights to |repos|.
426
424
427 :param apiuser: This is filled automatically from the |authtoken|.
425 :param apiuser: This is filled automatically from the |authtoken|.
428 :type apiuser: AuthUser
426 :type apiuser: AuthUser
429 :param repoid: The repository name or repository ID.
427 :param repoid: The repository name or repository ID.
430 :type repoid: str or int
428 :type repoid: str or int
431 :param revision: The revision for which listing should be done.
429 :param revision: The revision for which listing should be done.
432 :type revision: str
430 :type revision: str
433 :param root_path: The path from which to start displaying.
431 :param root_path: The path from which to start displaying.
434 :type root_path: str
432 :type root_path: str
435 :param ret_type: Set the return type. Valid options are
433 :param ret_type: Set the return type. Valid options are
436 ``all`` (default), ``files`` and ``dirs``.
434 ``all`` (default), ``files`` and ``dirs``.
437 :type ret_type: Optional(str)
435 :type ret_type: Optional(str)
438 :param details: Returns extended information about nodes, such as
436 :param details: Returns extended information about nodes, such as
439 md5, binary, and or content.
437 md5, binary, and or content.
440 The valid options are ``basic`` and ``full``.
438 The valid options are ``basic`` and ``full``.
441 :type details: Optional(str)
439 :type details: Optional(str)
442 :param max_file_bytes: Only return file content under this file size bytes
440 :param max_file_bytes: Only return file content under this file size bytes
443 :type details: Optional(int)
441 :type details: Optional(int)
444
442
445 Example output:
443 Example output:
446
444
447 .. code-block:: bash
445 .. code-block:: bash
448
446
449 id : <id_given_in_input>
447 id : <id_given_in_input>
450 result: [
448 result: [
451 {
449 {
452 "binary": false,
450 "binary": false,
453 "content": "File line",
451 "content": "File line",
454 "extension": "md",
452 "extension": "md",
455 "lines": 2,
453 "lines": 2,
456 "md5": "059fa5d29b19c0657e384749480f6422",
454 "md5": "059fa5d29b19c0657e384749480f6422",
457 "mimetype": "text/x-minidsrc",
455 "mimetype": "text/x-minidsrc",
458 "name": "file.md",
456 "name": "file.md",
459 "size": 580,
457 "size": 580,
460 "type": "file"
458 "type": "file"
461 },
459 },
462 ...
460 ...
463 ]
461 ]
464 error: null
462 error: null
465 """
463 """
466
464
467 repo = get_repo_or_error(repoid)
465 repo = get_repo_or_error(repoid)
468 if not has_superadmin_permission(apiuser):
466 if not has_superadmin_permission(apiuser):
469 _perms = ('repository.admin', 'repository.write', 'repository.read',)
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
470 validate_repo_permissions(apiuser, repoid, repo, _perms)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
471
469
472 ret_type = Optional.extract(ret_type)
470 ret_type = Optional.extract(ret_type)
473 details = Optional.extract(details)
471 details = Optional.extract(details)
474 _extended_types = ['basic', 'full']
472 _extended_types = ['basic', 'full']
475 if details not in _extended_types:
473 if details not in _extended_types:
476 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
474 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
477 extended_info = False
475 extended_info = False
478 content = False
476 content = False
479 if details == 'basic':
477 if details == 'basic':
480 extended_info = True
478 extended_info = True
481
479
482 if details == 'full':
480 if details == 'full':
483 extended_info = content = True
481 extended_info = content = True
484
482
485 _map = {}
483 _map = {}
486 try:
484 try:
487 # check if repo is not empty by any chance, skip quicker if it is.
485 # check if repo is not empty by any chance, skip quicker if it is.
488 _scm = repo.scm_instance()
486 _scm = repo.scm_instance()
489 if _scm.is_empty():
487 if _scm.is_empty():
490 return []
488 return []
491
489
492 _d, _f = ScmModel().get_nodes(
490 _d, _f = ScmModel().get_nodes(
493 repo, revision, root_path, flat=False,
491 repo, revision, root_path, flat=False,
494 extended_info=extended_info, content=content,
492 extended_info=extended_info, content=content,
495 max_file_bytes=max_file_bytes)
493 max_file_bytes=max_file_bytes)
496 _map = {
494 _map = {
497 'all': _d + _f,
495 'all': _d + _f,
498 'files': _f,
496 'files': _f,
499 'dirs': _d,
497 'dirs': _d,
500 }
498 }
501 return _map[ret_type]
499 return _map[ret_type]
502 except KeyError:
500 except KeyError:
503 raise JSONRPCError(
501 raise JSONRPCError(
504 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
502 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
505 except Exception:
503 except Exception:
506 log.exception("Exception occurred while trying to get repo nodes")
504 log.exception("Exception occurred while trying to get repo nodes")
507 raise JSONRPCError(
505 raise JSONRPCError(
508 'failed to get repo: `%s` nodes' % repo.repo_name
506 'failed to get repo: `%s` nodes' % repo.repo_name
509 )
507 )
510
508
511
509
512 @jsonrpc_method()
510 @jsonrpc_method()
513 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
511 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
514 max_file_bytes=Optional(None), details=Optional('basic'),
512 max_file_bytes=Optional(None), details=Optional('basic'),
515 cache=Optional(True)):
513 cache=Optional(True)):
516 """
514 """
517 Returns a single file from repository at given revision.
515 Returns a single file from repository at given revision.
518
516
519 This command can only be run using an |authtoken| with admin rights,
517 This command can only be run using an |authtoken| with admin rights,
520 or users with at least read rights to |repos|.
518 or users with at least read rights to |repos|.
521
519
522 :param apiuser: This is filled automatically from the |authtoken|.
520 :param apiuser: This is filled automatically from the |authtoken|.
523 :type apiuser: AuthUser
521 :type apiuser: AuthUser
524 :param repoid: The repository name or repository ID.
522 :param repoid: The repository name or repository ID.
525 :type repoid: str or int
523 :type repoid: str or int
526 :param commit_id: The revision for which listing should be done.
524 :param commit_id: The revision for which listing should be done.
527 :type commit_id: str
525 :type commit_id: str
528 :param file_path: The path from which to start displaying.
526 :param file_path: The path from which to start displaying.
529 :type file_path: str
527 :type file_path: str
530 :param details: Returns different set of information about nodes.
528 :param details: Returns different set of information about nodes.
531 The valid options are ``minimal`` ``basic`` and ``full``.
529 The valid options are ``minimal`` ``basic`` and ``full``.
532 :type details: Optional(str)
530 :type details: Optional(str)
533 :param max_file_bytes: Only return file content under this file size bytes
531 :param max_file_bytes: Only return file content under this file size bytes
534 :type max_file_bytes: Optional(int)
532 :type max_file_bytes: Optional(int)
535 :param cache: Use internal caches for fetching files. If disabled fetching
533 :param cache: Use internal caches for fetching files. If disabled fetching
536 files is slower but more memory efficient
534 files is slower but more memory efficient
537 :type cache: Optional(bool)
535 :type cache: Optional(bool)
538
536
539 Example output:
537 Example output:
540
538
541 .. code-block:: bash
539 .. code-block:: bash
542
540
543 id : <id_given_in_input>
541 id : <id_given_in_input>
544 result: {
542 result: {
545 "binary": false,
543 "binary": false,
546 "extension": "py",
544 "extension": "py",
547 "lines": 35,
545 "lines": 35,
548 "content": "....",
546 "content": "....",
549 "md5": "76318336366b0f17ee249e11b0c99c41",
547 "md5": "76318336366b0f17ee249e11b0c99c41",
550 "mimetype": "text/x-python",
548 "mimetype": "text/x-python",
551 "name": "python.py",
549 "name": "python.py",
552 "size": 817,
550 "size": 817,
553 "type": "file",
551 "type": "file",
554 }
552 }
555 error: null
553 error: null
556 """
554 """
557
555
558 repo = get_repo_or_error(repoid)
556 repo = get_repo_or_error(repoid)
559 if not has_superadmin_permission(apiuser):
557 if not has_superadmin_permission(apiuser):
560 _perms = ('repository.admin', 'repository.write', 'repository.read',)
558 _perms = ('repository.admin', 'repository.write', 'repository.read',)
561 validate_repo_permissions(apiuser, repoid, repo, _perms)
559 validate_repo_permissions(apiuser, repoid, repo, _perms)
562
560
563 cache = Optional.extract(cache, binary=True)
561 cache = Optional.extract(cache, binary=True)
564 details = Optional.extract(details)
562 details = Optional.extract(details)
565 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
563 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
566 if details not in _extended_types:
564 if details not in _extended_types:
567 raise JSONRPCError(
565 raise JSONRPCError(
568 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
566 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
569 extended_info = False
567 extended_info = False
570 content = False
568 content = False
571
569
572 if details == 'minimal':
570 if details == 'minimal':
573 extended_info = False
571 extended_info = False
574
572
575 elif details == 'basic':
573 elif details == 'basic':
576 extended_info = True
574 extended_info = True
577
575
578 elif details == 'full':
576 elif details == 'full':
579 extended_info = content = True
577 extended_info = content = True
580
578
581 file_path = safe_unicode(file_path)
579 file_path = safe_unicode(file_path)
582 try:
580 try:
583 # check if repo is not empty by any chance, skip quicker if it is.
581 # check if repo is not empty by any chance, skip quicker if it is.
584 _scm = repo.scm_instance()
582 _scm = repo.scm_instance()
585 if _scm.is_empty():
583 if _scm.is_empty():
586 return None
584 return None
587
585
588 node = ScmModel().get_node(
586 node = ScmModel().get_node(
589 repo, commit_id, file_path, extended_info=extended_info,
587 repo, commit_id, file_path, extended_info=extended_info,
590 content=content, max_file_bytes=max_file_bytes, cache=cache)
588 content=content, max_file_bytes=max_file_bytes, cache=cache)
591 except NodeDoesNotExistError:
589 except NodeDoesNotExistError:
592 raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
590 raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
593 repo.repo_name, file_path, commit_id))
591 repo.repo_name, file_path, commit_id))
594 except Exception:
592 except Exception:
595 log.exception(u"Exception occurred while trying to get repo %s file",
593 log.exception(u"Exception occurred while trying to get repo %s file",
596 repo.repo_name)
594 repo.repo_name)
597 raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format(
595 raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format(
598 repo.repo_name, file_path))
596 repo.repo_name, file_path))
599
597
600 return node
598 return node
601
599
602
600
603 @jsonrpc_method()
601 @jsonrpc_method()
604 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
602 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
605 """
603 """
606 Returns a list of tree nodes for path at given revision. This api is built
604 Returns a list of tree nodes for path at given revision. This api is built
607 strictly for usage in full text search building, and shouldn't be consumed
605 strictly for usage in full text search building, and shouldn't be consumed
608
606
609 This command can only be run using an |authtoken| with admin rights,
607 This command can only be run using an |authtoken| with admin rights,
610 or users with at least read rights to |repos|.
608 or users with at least read rights to |repos|.
611
609
612 """
610 """
613
611
614 repo = get_repo_or_error(repoid)
612 repo = get_repo_or_error(repoid)
615 if not has_superadmin_permission(apiuser):
613 if not has_superadmin_permission(apiuser):
616 _perms = ('repository.admin', 'repository.write', 'repository.read',)
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
617 validate_repo_permissions(apiuser, repoid, repo, _perms)
615 validate_repo_permissions(apiuser, repoid, repo, _perms)
618
616
619 repo_id = repo.repo_id
617 repo_id = repo.repo_id
620 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
618 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
621 cache_on = cache_seconds > 0
619 cache_on = cache_seconds > 0
622
620
623 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
621 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
624 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
622 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
625
623
626 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
624 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
627 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
625 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
628
626
629 try:
627 try:
630 # check if repo is not empty by any chance, skip quicker if it is.
628 # check if repo is not empty by any chance, skip quicker if it is.
631 _scm = repo.scm_instance()
629 _scm = repo.scm_instance()
632 if _scm.is_empty():
630 if _scm.is_empty():
633 return []
631 return []
634 except RepositoryError:
632 except RepositoryError:
635 log.exception("Exception occurred while trying to get repo nodes")
633 log.exception("Exception occurred while trying to get repo nodes")
636 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
634 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
637
635
638 try:
636 try:
639 # we need to resolve commit_id to a FULL sha for cache to work correctly.
637 # we need to resolve commit_id to a FULL sha for cache to work correctly.
640 # sending 'master' is a pointer that needs to be translated to current commit.
638 # sending 'master' is a pointer that needs to be translated to current commit.
641 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
639 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
642 log.debug(
640 log.debug(
643 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
641 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
644 'with caching: %s[TTL: %ss]' % (
642 'with caching: %s[TTL: %ss]' % (
645 repo_id, commit_id, cache_on, cache_seconds or 0))
643 repo_id, commit_id, cache_on, cache_seconds or 0))
646
644
647 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
645 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
648 return tree_files
646 return tree_files
649
647
650 except Exception:
648 except Exception:
651 log.exception("Exception occurred while trying to get repo nodes")
649 log.exception("Exception occurred while trying to get repo nodes")
652 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
650 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
653
651
654
652
655 @jsonrpc_method()
653 @jsonrpc_method()
656 def get_repo_refs(request, apiuser, repoid):
654 def get_repo_refs(request, apiuser, repoid):
657 """
655 """
658 Returns a dictionary of current references. It returns
656 Returns a dictionary of current references. It returns
659 bookmarks, branches, closed_branches, and tags for given repository
657 bookmarks, branches, closed_branches, and tags for given repository
660
658
661 It's possible to specify ret_type to show only `files` or `dirs`.
659 It's possible to specify ret_type to show only `files` or `dirs`.
662
660
663 This command can only be run using an |authtoken| with admin rights,
661 This command can only be run using an |authtoken| with admin rights,
664 or users with at least read rights to |repos|.
662 or users with at least read rights to |repos|.
665
663
666 :param apiuser: This is filled automatically from the |authtoken|.
664 :param apiuser: This is filled automatically from the |authtoken|.
667 :type apiuser: AuthUser
665 :type apiuser: AuthUser
668 :param repoid: The repository name or repository ID.
666 :param repoid: The repository name or repository ID.
669 :type repoid: str or int
667 :type repoid: str or int
670
668
671 Example output:
669 Example output:
672
670
673 .. code-block:: bash
671 .. code-block:: bash
674
672
675 id : <id_given_in_input>
673 id : <id_given_in_input>
676 "result": {
674 "result": {
677 "bookmarks": {
675 "bookmarks": {
678 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
676 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
679 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
677 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
680 },
678 },
681 "branches": {
679 "branches": {
682 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
680 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
683 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
681 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
684 },
682 },
685 "branches_closed": {},
683 "branches_closed": {},
686 "tags": {
684 "tags": {
687 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
685 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
688 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
686 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
689 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
687 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
690 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
688 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
691 }
689 }
692 }
690 }
693 error: null
691 error: null
694 """
692 """
695
693
696 repo = get_repo_or_error(repoid)
694 repo = get_repo_or_error(repoid)
697 if not has_superadmin_permission(apiuser):
695 if not has_superadmin_permission(apiuser):
698 _perms = ('repository.admin', 'repository.write', 'repository.read',)
696 _perms = ('repository.admin', 'repository.write', 'repository.read',)
699 validate_repo_permissions(apiuser, repoid, repo, _perms)
697 validate_repo_permissions(apiuser, repoid, repo, _perms)
700
698
701 try:
699 try:
702 # check if repo is not empty by any chance, skip quicker if it is.
700 # check if repo is not empty by any chance, skip quicker if it is.
703 vcs_instance = repo.scm_instance()
701 vcs_instance = repo.scm_instance()
704 refs = vcs_instance.refs()
702 refs = vcs_instance.refs()
705 return refs
703 return refs
706 except Exception:
704 except Exception:
707 log.exception("Exception occurred while trying to get repo refs")
705 log.exception("Exception occurred while trying to get repo refs")
708 raise JSONRPCError(
706 raise JSONRPCError(
709 'failed to get repo: `%s` references' % repo.repo_name
707 'failed to get repo: `%s` references' % repo.repo_name
710 )
708 )
711
709
712
710
713 @jsonrpc_method()
711 @jsonrpc_method()
714 def create_repo(
712 def create_repo(
715 request, apiuser, repo_name, repo_type,
713 request, apiuser, repo_name, repo_type,
716 owner=Optional(OAttr('apiuser')),
714 owner=Optional(OAttr('apiuser')),
717 description=Optional(''),
715 description=Optional(''),
718 private=Optional(False),
716 private=Optional(False),
719 clone_uri=Optional(None),
717 clone_uri=Optional(None),
720 push_uri=Optional(None),
718 push_uri=Optional(None),
721 landing_rev=Optional(None),
719 landing_rev=Optional(None),
722 enable_statistics=Optional(False),
720 enable_statistics=Optional(False),
723 enable_locking=Optional(False),
721 enable_locking=Optional(False),
724 enable_downloads=Optional(False),
722 enable_downloads=Optional(False),
725 copy_permissions=Optional(False)):
723 copy_permissions=Optional(False)):
726 """
724 """
727 Creates a repository.
725 Creates a repository.
728
726
729 * If the repository name contains "/", repository will be created inside
727 * If the repository name contains "/", repository will be created inside
730 a repository group or nested repository groups
728 a repository group or nested repository groups
731
729
732 For example "foo/bar/repo1" will create |repo| called "repo1" inside
730 For example "foo/bar/repo1" will create |repo| called "repo1" inside
733 group "foo/bar". You have to have permissions to access and write to
731 group "foo/bar". You have to have permissions to access and write to
734 the last repository group ("bar" in this example)
732 the last repository group ("bar" in this example)
735
733
736 This command can only be run using an |authtoken| with at least
734 This command can only be run using an |authtoken| with at least
737 permissions to create repositories, or write permissions to
735 permissions to create repositories, or write permissions to
738 parent repository groups.
736 parent repository groups.
739
737
740 :param apiuser: This is filled automatically from the |authtoken|.
738 :param apiuser: This is filled automatically from the |authtoken|.
741 :type apiuser: AuthUser
739 :type apiuser: AuthUser
742 :param repo_name: Set the repository name.
740 :param repo_name: Set the repository name.
743 :type repo_name: str
741 :type repo_name: str
744 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
742 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
745 :type repo_type: str
743 :type repo_type: str
746 :param owner: user_id or username
744 :param owner: user_id or username
747 :type owner: Optional(str)
745 :type owner: Optional(str)
748 :param description: Set the repository description.
746 :param description: Set the repository description.
749 :type description: Optional(str)
747 :type description: Optional(str)
750 :param private: set repository as private
748 :param private: set repository as private
751 :type private: bool
749 :type private: bool
752 :param clone_uri: set clone_uri
750 :param clone_uri: set clone_uri
753 :type clone_uri: str
751 :type clone_uri: str
754 :param push_uri: set push_uri
752 :param push_uri: set push_uri
755 :type push_uri: str
753 :type push_uri: str
756 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
754 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
757 :type landing_rev: str
755 :type landing_rev: str
758 :param enable_locking:
756 :param enable_locking:
759 :type enable_locking: bool
757 :type enable_locking: bool
760 :param enable_downloads:
758 :param enable_downloads:
761 :type enable_downloads: bool
759 :type enable_downloads: bool
762 :param enable_statistics:
760 :param enable_statistics:
763 :type enable_statistics: bool
761 :type enable_statistics: bool
764 :param copy_permissions: Copy permission from group in which the
762 :param copy_permissions: Copy permission from group in which the
765 repository is being created.
763 repository is being created.
766 :type copy_permissions: bool
764 :type copy_permissions: bool
767
765
768
766
769 Example output:
767 Example output:
770
768
771 .. code-block:: bash
769 .. code-block:: bash
772
770
773 id : <id_given_in_input>
771 id : <id_given_in_input>
774 result: {
772 result: {
775 "msg": "Created new repository `<reponame>`",
773 "msg": "Created new repository `<reponame>`",
776 "success": true,
774 "success": true,
777 "task": "<celery task id or None if done sync>"
775 "task": "<celery task id or None if done sync>"
778 }
776 }
779 error: null
777 error: null
780
778
781
779
782 Example error output:
780 Example error output:
783
781
784 .. code-block:: bash
782 .. code-block:: bash
785
783
786 id : <id_given_in_input>
784 id : <id_given_in_input>
787 result : null
785 result : null
788 error : {
786 error : {
789 'failed to create repository `<repo_name>`'
787 'failed to create repository `<repo_name>`'
790 }
788 }
791
789
792 """
790 """
793
791
794 owner = validate_set_owner_permissions(apiuser, owner)
792 owner = validate_set_owner_permissions(apiuser, owner)
795
793
796 description = Optional.extract(description)
794 description = Optional.extract(description)
797 copy_permissions = Optional.extract(copy_permissions)
795 copy_permissions = Optional.extract(copy_permissions)
798 clone_uri = Optional.extract(clone_uri)
796 clone_uri = Optional.extract(clone_uri)
799 push_uri = Optional.extract(push_uri)
797 push_uri = Optional.extract(push_uri)
800
798
801 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
799 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
802 if isinstance(private, Optional):
800 if isinstance(private, Optional):
803 private = defs.get('repo_private') or Optional.extract(private)
801 private = defs.get('repo_private') or Optional.extract(private)
804 if isinstance(repo_type, Optional):
802 if isinstance(repo_type, Optional):
805 repo_type = defs.get('repo_type')
803 repo_type = defs.get('repo_type')
806 if isinstance(enable_statistics, Optional):
804 if isinstance(enable_statistics, Optional):
807 enable_statistics = defs.get('repo_enable_statistics')
805 enable_statistics = defs.get('repo_enable_statistics')
808 if isinstance(enable_locking, Optional):
806 if isinstance(enable_locking, Optional):
809 enable_locking = defs.get('repo_enable_locking')
807 enable_locking = defs.get('repo_enable_locking')
810 if isinstance(enable_downloads, Optional):
808 if isinstance(enable_downloads, Optional):
811 enable_downloads = defs.get('repo_enable_downloads')
809 enable_downloads = defs.get('repo_enable_downloads')
812
810
813 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
811 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
814 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
812 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
815 ref_choices = list(set(ref_choices + [landing_ref]))
813 ref_choices = list(set(ref_choices + [landing_ref]))
816
814
817 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
815 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
818
816
819 schema = repo_schema.RepoSchema().bind(
817 schema = repo_schema.RepoSchema().bind(
820 repo_type_options=rhodecode.BACKENDS.keys(),
818 repo_type_options=rhodecode.BACKENDS.keys(),
821 repo_ref_options=ref_choices,
819 repo_ref_options=ref_choices,
822 repo_type=repo_type,
820 repo_type=repo_type,
823 # user caller
821 # user caller
824 user=apiuser)
822 user=apiuser)
825
823
826 try:
824 try:
827 schema_data = schema.deserialize(dict(
825 schema_data = schema.deserialize(dict(
828 repo_name=repo_name,
826 repo_name=repo_name,
829 repo_type=repo_type,
827 repo_type=repo_type,
830 repo_owner=owner.username,
828 repo_owner=owner.username,
831 repo_description=description,
829 repo_description=description,
832 repo_landing_commit_ref=landing_commit_ref,
830 repo_landing_commit_ref=landing_commit_ref,
833 repo_clone_uri=clone_uri,
831 repo_clone_uri=clone_uri,
834 repo_push_uri=push_uri,
832 repo_push_uri=push_uri,
835 repo_private=private,
833 repo_private=private,
836 repo_copy_permissions=copy_permissions,
834 repo_copy_permissions=copy_permissions,
837 repo_enable_statistics=enable_statistics,
835 repo_enable_statistics=enable_statistics,
838 repo_enable_downloads=enable_downloads,
836 repo_enable_downloads=enable_downloads,
839 repo_enable_locking=enable_locking))
837 repo_enable_locking=enable_locking))
840 except validation_schema.Invalid as err:
838 except validation_schema.Invalid as err:
841 raise JSONRPCValidationError(colander_exc=err)
839 raise JSONRPCValidationError(colander_exc=err)
842
840
843 try:
841 try:
844 data = {
842 data = {
845 'owner': owner,
843 'owner': owner,
846 'repo_name': schema_data['repo_group']['repo_name_without_group'],
844 'repo_name': schema_data['repo_group']['repo_name_without_group'],
847 'repo_name_full': schema_data['repo_name'],
845 'repo_name_full': schema_data['repo_name'],
848 'repo_group': schema_data['repo_group']['repo_group_id'],
846 'repo_group': schema_data['repo_group']['repo_group_id'],
849 'repo_type': schema_data['repo_type'],
847 'repo_type': schema_data['repo_type'],
850 'repo_description': schema_data['repo_description'],
848 'repo_description': schema_data['repo_description'],
851 'repo_private': schema_data['repo_private'],
849 'repo_private': schema_data['repo_private'],
852 'clone_uri': schema_data['repo_clone_uri'],
850 'clone_uri': schema_data['repo_clone_uri'],
853 'push_uri': schema_data['repo_push_uri'],
851 'push_uri': schema_data['repo_push_uri'],
854 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
852 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
855 'enable_statistics': schema_data['repo_enable_statistics'],
853 'enable_statistics': schema_data['repo_enable_statistics'],
856 'enable_locking': schema_data['repo_enable_locking'],
854 'enable_locking': schema_data['repo_enable_locking'],
857 'enable_downloads': schema_data['repo_enable_downloads'],
855 'enable_downloads': schema_data['repo_enable_downloads'],
858 'repo_copy_permissions': schema_data['repo_copy_permissions'],
856 'repo_copy_permissions': schema_data['repo_copy_permissions'],
859 }
857 }
860
858
861 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
859 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
862 task_id = get_task_id(task)
860 task_id = get_task_id(task)
863 # no commit, it's done in RepoModel, or async via celery
861 # no commit, it's done in RepoModel, or async via celery
864 return {
862 return {
865 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
863 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
866 'success': True, # cannot return the repo data here since fork
864 'success': True, # cannot return the repo data here since fork
867 # can be done async
865 # can be done async
868 'task': task_id
866 'task': task_id
869 }
867 }
870 except Exception:
868 except Exception:
871 log.exception(
869 log.exception(
872 u"Exception while trying to create the repository %s",
870 u"Exception while trying to create the repository %s",
873 schema_data['repo_name'])
871 schema_data['repo_name'])
874 raise JSONRPCError(
872 raise JSONRPCError(
875 'failed to create repository `%s`' % (schema_data['repo_name'],))
873 'failed to create repository `%s`' % (schema_data['repo_name'],))
876
874
877
875
878 @jsonrpc_method()
876 @jsonrpc_method()
879 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
877 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
880 description=Optional('')):
878 description=Optional('')):
881 """
879 """
882 Adds an extra field to a repository.
880 Adds an extra field to a repository.
883
881
884 This command can only be run using an |authtoken| with at least
882 This command can only be run using an |authtoken| with at least
885 write permissions to the |repo|.
883 write permissions to the |repo|.
886
884
887 :param apiuser: This is filled automatically from the |authtoken|.
885 :param apiuser: This is filled automatically from the |authtoken|.
888 :type apiuser: AuthUser
886 :type apiuser: AuthUser
889 :param repoid: Set the repository name or repository id.
887 :param repoid: Set the repository name or repository id.
890 :type repoid: str or int
888 :type repoid: str or int
891 :param key: Create a unique field key for this repository.
889 :param key: Create a unique field key for this repository.
892 :type key: str
890 :type key: str
893 :param label:
891 :param label:
894 :type label: Optional(str)
892 :type label: Optional(str)
895 :param description:
893 :param description:
896 :type description: Optional(str)
894 :type description: Optional(str)
897 """
895 """
898 repo = get_repo_or_error(repoid)
896 repo = get_repo_or_error(repoid)
899 if not has_superadmin_permission(apiuser):
897 if not has_superadmin_permission(apiuser):
900 _perms = ('repository.admin',)
898 _perms = ('repository.admin',)
901 validate_repo_permissions(apiuser, repoid, repo, _perms)
899 validate_repo_permissions(apiuser, repoid, repo, _perms)
902
900
903 label = Optional.extract(label) or key
901 label = Optional.extract(label) or key
904 description = Optional.extract(description)
902 description = Optional.extract(description)
905
903
906 field = RepositoryField.get_by_key_name(key, repo)
904 field = RepositoryField.get_by_key_name(key, repo)
907 if field:
905 if field:
908 raise JSONRPCError('Field with key '
906 raise JSONRPCError('Field with key '
909 '`%s` exists for repo `%s`' % (key, repoid))
907 '`%s` exists for repo `%s`' % (key, repoid))
910
908
911 try:
909 try:
912 RepoModel().add_repo_field(repo, key, field_label=label,
910 RepoModel().add_repo_field(repo, key, field_label=label,
913 field_desc=description)
911 field_desc=description)
914 Session().commit()
912 Session().commit()
915 return {
913 return {
916 'msg': "Added new repository field `%s`" % (key,),
914 'msg': "Added new repository field `%s`" % (key,),
917 'success': True,
915 'success': True,
918 }
916 }
919 except Exception:
917 except Exception:
920 log.exception("Exception occurred while trying to add field to repo")
918 log.exception("Exception occurred while trying to add field to repo")
921 raise JSONRPCError(
919 raise JSONRPCError(
922 'failed to create new field for repository `%s`' % (repoid,))
920 'failed to create new field for repository `%s`' % (repoid,))
923
921
924
922
925 @jsonrpc_method()
923 @jsonrpc_method()
926 def remove_field_from_repo(request, apiuser, repoid, key):
924 def remove_field_from_repo(request, apiuser, repoid, key):
927 """
925 """
928 Removes an extra field from a repository.
926 Removes an extra field from a repository.
929
927
930 This command can only be run using an |authtoken| with at least
928 This command can only be run using an |authtoken| with at least
931 write permissions to the |repo|.
929 write permissions to the |repo|.
932
930
933 :param apiuser: This is filled automatically from the |authtoken|.
931 :param apiuser: This is filled automatically from the |authtoken|.
934 :type apiuser: AuthUser
932 :type apiuser: AuthUser
935 :param repoid: Set the repository name or repository ID.
933 :param repoid: Set the repository name or repository ID.
936 :type repoid: str or int
934 :type repoid: str or int
937 :param key: Set the unique field key for this repository.
935 :param key: Set the unique field key for this repository.
938 :type key: str
936 :type key: str
939 """
937 """
940
938
941 repo = get_repo_or_error(repoid)
939 repo = get_repo_or_error(repoid)
942 if not has_superadmin_permission(apiuser):
940 if not has_superadmin_permission(apiuser):
943 _perms = ('repository.admin',)
941 _perms = ('repository.admin',)
944 validate_repo_permissions(apiuser, repoid, repo, _perms)
942 validate_repo_permissions(apiuser, repoid, repo, _perms)
945
943
946 field = RepositoryField.get_by_key_name(key, repo)
944 field = RepositoryField.get_by_key_name(key, repo)
947 if not field:
945 if not field:
948 raise JSONRPCError('Field with key `%s` does not '
946 raise JSONRPCError('Field with key `%s` does not '
949 'exists for repo `%s`' % (key, repoid))
947 'exists for repo `%s`' % (key, repoid))
950
948
951 try:
949 try:
952 RepoModel().delete_repo_field(repo, field_key=key)
950 RepoModel().delete_repo_field(repo, field_key=key)
953 Session().commit()
951 Session().commit()
954 return {
952 return {
955 'msg': "Deleted repository field `%s`" % (key,),
953 'msg': "Deleted repository field `%s`" % (key,),
956 'success': True,
954 'success': True,
957 }
955 }
958 except Exception:
956 except Exception:
959 log.exception(
957 log.exception(
960 "Exception occurred while trying to delete field from repo")
958 "Exception occurred while trying to delete field from repo")
961 raise JSONRPCError(
959 raise JSONRPCError(
962 'failed to delete field for repository `%s`' % (repoid,))
960 'failed to delete field for repository `%s`' % (repoid,))
963
961
964
962
965 @jsonrpc_method()
963 @jsonrpc_method()
966 def update_repo(
964 def update_repo(
967 request, apiuser, repoid, repo_name=Optional(None),
965 request, apiuser, repoid, repo_name=Optional(None),
968 owner=Optional(OAttr('apiuser')), description=Optional(''),
966 owner=Optional(OAttr('apiuser')), description=Optional(''),
969 private=Optional(False),
967 private=Optional(False),
970 clone_uri=Optional(None), push_uri=Optional(None),
968 clone_uri=Optional(None), push_uri=Optional(None),
971 landing_rev=Optional(None), fork_of=Optional(None),
969 landing_rev=Optional(None), fork_of=Optional(None),
972 enable_statistics=Optional(False),
970 enable_statistics=Optional(False),
973 enable_locking=Optional(False),
971 enable_locking=Optional(False),
974 enable_downloads=Optional(False), fields=Optional('')):
972 enable_downloads=Optional(False), fields=Optional('')):
975 """
973 """
976 Updates a repository with the given information.
974 Updates a repository with the given information.
977
975
978 This command can only be run using an |authtoken| with at least
976 This command can only be run using an |authtoken| with at least
979 admin permissions to the |repo|.
977 admin permissions to the |repo|.
980
978
981 * If the repository name contains "/", repository will be updated
979 * If the repository name contains "/", repository will be updated
982 accordingly with a repository group or nested repository groups
980 accordingly with a repository group or nested repository groups
983
981
984 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
982 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
985 called "repo-test" and place it inside group "foo/bar".
983 called "repo-test" and place it inside group "foo/bar".
986 You have to have permissions to access and write to the last repository
984 You have to have permissions to access and write to the last repository
987 group ("bar" in this example)
985 group ("bar" in this example)
988
986
989 :param apiuser: This is filled automatically from the |authtoken|.
987 :param apiuser: This is filled automatically from the |authtoken|.
990 :type apiuser: AuthUser
988 :type apiuser: AuthUser
991 :param repoid: repository name or repository ID.
989 :param repoid: repository name or repository ID.
992 :type repoid: str or int
990 :type repoid: str or int
993 :param repo_name: Update the |repo| name, including the
991 :param repo_name: Update the |repo| name, including the
994 repository group it's in.
992 repository group it's in.
995 :type repo_name: str
993 :type repo_name: str
996 :param owner: Set the |repo| owner.
994 :param owner: Set the |repo| owner.
997 :type owner: str
995 :type owner: str
998 :param fork_of: Set the |repo| as fork of another |repo|.
996 :param fork_of: Set the |repo| as fork of another |repo|.
999 :type fork_of: str
997 :type fork_of: str
1000 :param description: Update the |repo| description.
998 :param description: Update the |repo| description.
1001 :type description: str
999 :type description: str
1002 :param private: Set the |repo| as private. (True | False)
1000 :param private: Set the |repo| as private. (True | False)
1003 :type private: bool
1001 :type private: bool
1004 :param clone_uri: Update the |repo| clone URI.
1002 :param clone_uri: Update the |repo| clone URI.
1005 :type clone_uri: str
1003 :type clone_uri: str
1006 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1004 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1007 :type landing_rev: str
1005 :type landing_rev: str
1008 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1006 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1009 :type enable_statistics: bool
1007 :type enable_statistics: bool
1010 :param enable_locking: Enable |repo| locking.
1008 :param enable_locking: Enable |repo| locking.
1011 :type enable_locking: bool
1009 :type enable_locking: bool
1012 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1010 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1013 :type enable_downloads: bool
1011 :type enable_downloads: bool
1014 :param fields: Add extra fields to the |repo|. Use the following
1012 :param fields: Add extra fields to the |repo|. Use the following
1015 example format: ``field_key=field_val,field_key2=fieldval2``.
1013 example format: ``field_key=field_val,field_key2=fieldval2``.
1016 Escape ', ' with \,
1014 Escape ', ' with \,
1017 :type fields: str
1015 :type fields: str
1018 """
1016 """
1019
1017
1020 repo = get_repo_or_error(repoid)
1018 repo = get_repo_or_error(repoid)
1021
1019
1022 include_secrets = False
1020 include_secrets = False
1023 if not has_superadmin_permission(apiuser):
1021 if not has_superadmin_permission(apiuser):
1024 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
1022 _perms = ('repository.admin',)
1023 validate_repo_permissions(apiuser, repoid, repo, _perms)
1025 else:
1024 else:
1026 include_secrets = True
1025 include_secrets = True
1027
1026
1028 updates = dict(
1027 updates = dict(
1029 repo_name=repo_name
1028 repo_name=repo_name
1030 if not isinstance(repo_name, Optional) else repo.repo_name,
1029 if not isinstance(repo_name, Optional) else repo.repo_name,
1031
1030
1032 fork_id=fork_of
1031 fork_id=fork_of
1033 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1032 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1034
1033
1035 user=owner
1034 user=owner
1036 if not isinstance(owner, Optional) else repo.user.username,
1035 if not isinstance(owner, Optional) else repo.user.username,
1037
1036
1038 repo_description=description
1037 repo_description=description
1039 if not isinstance(description, Optional) else repo.description,
1038 if not isinstance(description, Optional) else repo.description,
1040
1039
1041 repo_private=private
1040 repo_private=private
1042 if not isinstance(private, Optional) else repo.private,
1041 if not isinstance(private, Optional) else repo.private,
1043
1042
1044 clone_uri=clone_uri
1043 clone_uri=clone_uri
1045 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1044 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1046
1045
1047 push_uri=push_uri
1046 push_uri=push_uri
1048 if not isinstance(push_uri, Optional) else repo.push_uri,
1047 if not isinstance(push_uri, Optional) else repo.push_uri,
1049
1048
1050 repo_landing_rev=landing_rev
1049 repo_landing_rev=landing_rev
1051 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1050 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1052
1051
1053 repo_enable_statistics=enable_statistics
1052 repo_enable_statistics=enable_statistics
1054 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1053 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1055
1054
1056 repo_enable_locking=enable_locking
1055 repo_enable_locking=enable_locking
1057 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1056 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1058
1057
1059 repo_enable_downloads=enable_downloads
1058 repo_enable_downloads=enable_downloads
1060 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1059 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1061
1060
1062 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1061 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1063 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1062 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1064 request.translate, repo=repo)
1063 request.translate, repo=repo)
1065 ref_choices = list(set(ref_choices + [landing_ref]))
1064 ref_choices = list(set(ref_choices + [landing_ref]))
1066
1065
1067 old_values = repo.get_api_data()
1066 old_values = repo.get_api_data()
1068 repo_type = repo.repo_type
1067 repo_type = repo.repo_type
1069 schema = repo_schema.RepoSchema().bind(
1068 schema = repo_schema.RepoSchema().bind(
1070 repo_type_options=rhodecode.BACKENDS.keys(),
1069 repo_type_options=rhodecode.BACKENDS.keys(),
1071 repo_ref_options=ref_choices,
1070 repo_ref_options=ref_choices,
1072 repo_type=repo_type,
1071 repo_type=repo_type,
1073 # user caller
1072 # user caller
1074 user=apiuser,
1073 user=apiuser,
1075 old_values=old_values)
1074 old_values=old_values)
1076 try:
1075 try:
1077 schema_data = schema.deserialize(dict(
1076 schema_data = schema.deserialize(dict(
1078 # we save old value, users cannot change type
1077 # we save old value, users cannot change type
1079 repo_type=repo_type,
1078 repo_type=repo_type,
1080
1079
1081 repo_name=updates['repo_name'],
1080 repo_name=updates['repo_name'],
1082 repo_owner=updates['user'],
1081 repo_owner=updates['user'],
1083 repo_description=updates['repo_description'],
1082 repo_description=updates['repo_description'],
1084 repo_clone_uri=updates['clone_uri'],
1083 repo_clone_uri=updates['clone_uri'],
1085 repo_push_uri=updates['push_uri'],
1084 repo_push_uri=updates['push_uri'],
1086 repo_fork_of=updates['fork_id'],
1085 repo_fork_of=updates['fork_id'],
1087 repo_private=updates['repo_private'],
1086 repo_private=updates['repo_private'],
1088 repo_landing_commit_ref=updates['repo_landing_rev'],
1087 repo_landing_commit_ref=updates['repo_landing_rev'],
1089 repo_enable_statistics=updates['repo_enable_statistics'],
1088 repo_enable_statistics=updates['repo_enable_statistics'],
1090 repo_enable_downloads=updates['repo_enable_downloads'],
1089 repo_enable_downloads=updates['repo_enable_downloads'],
1091 repo_enable_locking=updates['repo_enable_locking']))
1090 repo_enable_locking=updates['repo_enable_locking']))
1092 except validation_schema.Invalid as err:
1091 except validation_schema.Invalid as err:
1093 raise JSONRPCValidationError(colander_exc=err)
1092 raise JSONRPCValidationError(colander_exc=err)
1094
1093
1095 # save validated data back into the updates dict
1094 # save validated data back into the updates dict
1096 validated_updates = dict(
1095 validated_updates = dict(
1097 repo_name=schema_data['repo_group']['repo_name_without_group'],
1096 repo_name=schema_data['repo_group']['repo_name_without_group'],
1098 repo_group=schema_data['repo_group']['repo_group_id'],
1097 repo_group=schema_data['repo_group']['repo_group_id'],
1099
1098
1100 user=schema_data['repo_owner'],
1099 user=schema_data['repo_owner'],
1101 repo_description=schema_data['repo_description'],
1100 repo_description=schema_data['repo_description'],
1102 repo_private=schema_data['repo_private'],
1101 repo_private=schema_data['repo_private'],
1103 clone_uri=schema_data['repo_clone_uri'],
1102 clone_uri=schema_data['repo_clone_uri'],
1104 push_uri=schema_data['repo_push_uri'],
1103 push_uri=schema_data['repo_push_uri'],
1105 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1104 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1106 repo_enable_statistics=schema_data['repo_enable_statistics'],
1105 repo_enable_statistics=schema_data['repo_enable_statistics'],
1107 repo_enable_locking=schema_data['repo_enable_locking'],
1106 repo_enable_locking=schema_data['repo_enable_locking'],
1108 repo_enable_downloads=schema_data['repo_enable_downloads'],
1107 repo_enable_downloads=schema_data['repo_enable_downloads'],
1109 )
1108 )
1110
1109
1111 if schema_data['repo_fork_of']:
1110 if schema_data['repo_fork_of']:
1112 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1111 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1113 validated_updates['fork_id'] = fork_repo.repo_id
1112 validated_updates['fork_id'] = fork_repo.repo_id
1114
1113
1115 # extra fields
1114 # extra fields
1116 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1115 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1117 if fields:
1116 if fields:
1118 validated_updates.update(fields)
1117 validated_updates.update(fields)
1119
1118
1120 try:
1119 try:
1121 RepoModel().update(repo, **validated_updates)
1120 RepoModel().update(repo, **validated_updates)
1122 audit_logger.store_api(
1121 audit_logger.store_api(
1123 'repo.edit', action_data={'old_data': old_values},
1122 'repo.edit', action_data={'old_data': old_values},
1124 user=apiuser, repo=repo)
1123 user=apiuser, repo=repo)
1125 Session().commit()
1124 Session().commit()
1126 return {
1125 return {
1127 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1126 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1128 'repository': repo.get_api_data(include_secrets=include_secrets)
1127 'repository': repo.get_api_data(include_secrets=include_secrets)
1129 }
1128 }
1130 except Exception:
1129 except Exception:
1131 log.exception(
1130 log.exception(
1132 u"Exception while trying to update the repository %s",
1131 u"Exception while trying to update the repository %s",
1133 repoid)
1132 repoid)
1134 raise JSONRPCError('failed to update repo `%s`' % repoid)
1133 raise JSONRPCError('failed to update repo `%s`' % repoid)
1135
1134
1136
1135
1137 @jsonrpc_method()
1136 @jsonrpc_method()
1138 def fork_repo(request, apiuser, repoid, fork_name,
1137 def fork_repo(request, apiuser, repoid, fork_name,
1139 owner=Optional(OAttr('apiuser')),
1138 owner=Optional(OAttr('apiuser')),
1140 description=Optional(''),
1139 description=Optional(''),
1141 private=Optional(False),
1140 private=Optional(False),
1142 clone_uri=Optional(None),
1141 clone_uri=Optional(None),
1143 landing_rev=Optional(None),
1142 landing_rev=Optional(None),
1144 copy_permissions=Optional(False)):
1143 copy_permissions=Optional(False)):
1145 """
1144 """
1146 Creates a fork of the specified |repo|.
1145 Creates a fork of the specified |repo|.
1147
1146
1148 * If the fork_name contains "/", fork will be created inside
1147 * If the fork_name contains "/", fork will be created inside
1149 a repository group or nested repository groups
1148 a repository group or nested repository groups
1150
1149
1151 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1150 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1152 inside group "foo/bar". You have to have permissions to access and
1151 inside group "foo/bar". You have to have permissions to access and
1153 write to the last repository group ("bar" in this example)
1152 write to the last repository group ("bar" in this example)
1154
1153
1155 This command can only be run using an |authtoken| with minimum
1154 This command can only be run using an |authtoken| with minimum
1156 read permissions of the forked repo, create fork permissions for an user.
1155 read permissions of the forked repo, create fork permissions for an user.
1157
1156
1158 :param apiuser: This is filled automatically from the |authtoken|.
1157 :param apiuser: This is filled automatically from the |authtoken|.
1159 :type apiuser: AuthUser
1158 :type apiuser: AuthUser
1160 :param repoid: Set repository name or repository ID.
1159 :param repoid: Set repository name or repository ID.
1161 :type repoid: str or int
1160 :type repoid: str or int
1162 :param fork_name: Set the fork name, including it's repository group membership.
1161 :param fork_name: Set the fork name, including it's repository group membership.
1163 :type fork_name: str
1162 :type fork_name: str
1164 :param owner: Set the fork owner.
1163 :param owner: Set the fork owner.
1165 :type owner: str
1164 :type owner: str
1166 :param description: Set the fork description.
1165 :param description: Set the fork description.
1167 :type description: str
1166 :type description: str
1168 :param copy_permissions: Copy permissions from parent |repo|. The
1167 :param copy_permissions: Copy permissions from parent |repo|. The
1169 default is False.
1168 default is False.
1170 :type copy_permissions: bool
1169 :type copy_permissions: bool
1171 :param private: Make the fork private. The default is False.
1170 :param private: Make the fork private. The default is False.
1172 :type private: bool
1171 :type private: bool
1173 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1172 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1174
1173
1175 Example output:
1174 Example output:
1176
1175
1177 .. code-block:: bash
1176 .. code-block:: bash
1178
1177
1179 id : <id_for_response>
1178 id : <id_for_response>
1180 api_key : "<api_key>"
1179 api_key : "<api_key>"
1181 args: {
1180 args: {
1182 "repoid" : "<reponame or repo_id>",
1181 "repoid" : "<reponame or repo_id>",
1183 "fork_name": "<forkname>",
1182 "fork_name": "<forkname>",
1184 "owner": "<username or user_id = Optional(=apiuser)>",
1183 "owner": "<username or user_id = Optional(=apiuser)>",
1185 "description": "<description>",
1184 "description": "<description>",
1186 "copy_permissions": "<bool>",
1185 "copy_permissions": "<bool>",
1187 "private": "<bool>",
1186 "private": "<bool>",
1188 "landing_rev": "<landing_rev>"
1187 "landing_rev": "<landing_rev>"
1189 }
1188 }
1190
1189
1191 Example error output:
1190 Example error output:
1192
1191
1193 .. code-block:: bash
1192 .. code-block:: bash
1194
1193
1195 id : <id_given_in_input>
1194 id : <id_given_in_input>
1196 result: {
1195 result: {
1197 "msg": "Created fork of `<reponame>` as `<forkname>`",
1196 "msg": "Created fork of `<reponame>` as `<forkname>`",
1198 "success": true,
1197 "success": true,
1199 "task": "<celery task id or None if done sync>"
1198 "task": "<celery task id or None if done sync>"
1200 }
1199 }
1201 error: null
1200 error: null
1202
1201
1203 """
1202 """
1204
1203
1205 repo = get_repo_or_error(repoid)
1204 repo = get_repo_or_error(repoid)
1206 repo_name = repo.repo_name
1205 repo_name = repo.repo_name
1207
1206
1208 if not has_superadmin_permission(apiuser):
1207 if not has_superadmin_permission(apiuser):
1209 # check if we have at least read permission for
1208 # check if we have at least read permission for
1210 # this repo that we fork !
1209 # this repo that we fork !
1211 _perms = (
1210 _perms = ('repository.admin', 'repository.write', 'repository.read')
1212 'repository.admin', 'repository.write', 'repository.read')
1213 validate_repo_permissions(apiuser, repoid, repo, _perms)
1211 validate_repo_permissions(apiuser, repoid, repo, _perms)
1214
1212
1215 # check if the regular user has at least fork permissions as well
1213 # check if the regular user has at least fork permissions as well
1216 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1214 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1217 raise JSONRPCForbidden()
1215 raise JSONRPCForbidden()
1218
1216
1219 # check if user can set owner parameter
1217 # check if user can set owner parameter
1220 owner = validate_set_owner_permissions(apiuser, owner)
1218 owner = validate_set_owner_permissions(apiuser, owner)
1221
1219
1222 description = Optional.extract(description)
1220 description = Optional.extract(description)
1223 copy_permissions = Optional.extract(copy_permissions)
1221 copy_permissions = Optional.extract(copy_permissions)
1224 clone_uri = Optional.extract(clone_uri)
1222 clone_uri = Optional.extract(clone_uri)
1225
1223
1226 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1224 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1227 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1225 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1228 ref_choices = list(set(ref_choices + [landing_ref]))
1226 ref_choices = list(set(ref_choices + [landing_ref]))
1229 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1227 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1230
1228
1231 private = Optional.extract(private)
1229 private = Optional.extract(private)
1232
1230
1233 schema = repo_schema.RepoSchema().bind(
1231 schema = repo_schema.RepoSchema().bind(
1234 repo_type_options=rhodecode.BACKENDS.keys(),
1232 repo_type_options=rhodecode.BACKENDS.keys(),
1235 repo_ref_options=ref_choices,
1233 repo_ref_options=ref_choices,
1236 repo_type=repo.repo_type,
1234 repo_type=repo.repo_type,
1237 # user caller
1235 # user caller
1238 user=apiuser)
1236 user=apiuser)
1239
1237
1240 try:
1238 try:
1241 schema_data = schema.deserialize(dict(
1239 schema_data = schema.deserialize(dict(
1242 repo_name=fork_name,
1240 repo_name=fork_name,
1243 repo_type=repo.repo_type,
1241 repo_type=repo.repo_type,
1244 repo_owner=owner.username,
1242 repo_owner=owner.username,
1245 repo_description=description,
1243 repo_description=description,
1246 repo_landing_commit_ref=landing_commit_ref,
1244 repo_landing_commit_ref=landing_commit_ref,
1247 repo_clone_uri=clone_uri,
1245 repo_clone_uri=clone_uri,
1248 repo_private=private,
1246 repo_private=private,
1249 repo_copy_permissions=copy_permissions))
1247 repo_copy_permissions=copy_permissions))
1250 except validation_schema.Invalid as err:
1248 except validation_schema.Invalid as err:
1251 raise JSONRPCValidationError(colander_exc=err)
1249 raise JSONRPCValidationError(colander_exc=err)
1252
1250
1253 try:
1251 try:
1254 data = {
1252 data = {
1255 'fork_parent_id': repo.repo_id,
1253 'fork_parent_id': repo.repo_id,
1256
1254
1257 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1255 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1258 'repo_name_full': schema_data['repo_name'],
1256 'repo_name_full': schema_data['repo_name'],
1259 'repo_group': schema_data['repo_group']['repo_group_id'],
1257 'repo_group': schema_data['repo_group']['repo_group_id'],
1260 'repo_type': schema_data['repo_type'],
1258 'repo_type': schema_data['repo_type'],
1261 'description': schema_data['repo_description'],
1259 'description': schema_data['repo_description'],
1262 'private': schema_data['repo_private'],
1260 'private': schema_data['repo_private'],
1263 'copy_permissions': schema_data['repo_copy_permissions'],
1261 'copy_permissions': schema_data['repo_copy_permissions'],
1264 'landing_rev': schema_data['repo_landing_commit_ref'],
1262 'landing_rev': schema_data['repo_landing_commit_ref'],
1265 }
1263 }
1266
1264
1267 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1265 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1268 # no commit, it's done in RepoModel, or async via celery
1266 # no commit, it's done in RepoModel, or async via celery
1269 task_id = get_task_id(task)
1267 task_id = get_task_id(task)
1270
1268
1271 return {
1269 return {
1272 'msg': 'Created fork of `%s` as `%s`' % (
1270 'msg': 'Created fork of `%s` as `%s`' % (
1273 repo.repo_name, schema_data['repo_name']),
1271 repo.repo_name, schema_data['repo_name']),
1274 'success': True, # cannot return the repo data here since fork
1272 'success': True, # cannot return the repo data here since fork
1275 # can be done async
1273 # can be done async
1276 'task': task_id
1274 'task': task_id
1277 }
1275 }
1278 except Exception:
1276 except Exception:
1279 log.exception(
1277 log.exception(
1280 u"Exception while trying to create fork %s",
1278 u"Exception while trying to create fork %s",
1281 schema_data['repo_name'])
1279 schema_data['repo_name'])
1282 raise JSONRPCError(
1280 raise JSONRPCError(
1283 'failed to fork repository `%s` as `%s`' % (
1281 'failed to fork repository `%s` as `%s`' % (
1284 repo_name, schema_data['repo_name']))
1282 repo_name, schema_data['repo_name']))
1285
1283
1286
1284
1287 @jsonrpc_method()
1285 @jsonrpc_method()
1288 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1286 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1289 """
1287 """
1290 Deletes a repository.
1288 Deletes a repository.
1291
1289
1292 * When the `forks` parameter is set it's possible to detach or delete
1290 * When the `forks` parameter is set it's possible to detach or delete
1293 forks of deleted repository.
1291 forks of deleted repository.
1294
1292
1295 This command can only be run using an |authtoken| with admin
1293 This command can only be run using an |authtoken| with admin
1296 permissions on the |repo|.
1294 permissions on the |repo|.
1297
1295
1298 :param apiuser: This is filled automatically from the |authtoken|.
1296 :param apiuser: This is filled automatically from the |authtoken|.
1299 :type apiuser: AuthUser
1297 :type apiuser: AuthUser
1300 :param repoid: Set the repository name or repository ID.
1298 :param repoid: Set the repository name or repository ID.
1301 :type repoid: str or int
1299 :type repoid: str or int
1302 :param forks: Set to `detach` or `delete` forks from the |repo|.
1300 :param forks: Set to `detach` or `delete` forks from the |repo|.
1303 :type forks: Optional(str)
1301 :type forks: Optional(str)
1304
1302
1305 Example error output:
1303 Example error output:
1306
1304
1307 .. code-block:: bash
1305 .. code-block:: bash
1308
1306
1309 id : <id_given_in_input>
1307 id : <id_given_in_input>
1310 result: {
1308 result: {
1311 "msg": "Deleted repository `<reponame>`",
1309 "msg": "Deleted repository `<reponame>`",
1312 "success": true
1310 "success": true
1313 }
1311 }
1314 error: null
1312 error: null
1315 """
1313 """
1316
1314
1317 repo = get_repo_or_error(repoid)
1315 repo = get_repo_or_error(repoid)
1318 repo_name = repo.repo_name
1316 repo_name = repo.repo_name
1319 if not has_superadmin_permission(apiuser):
1317 if not has_superadmin_permission(apiuser):
1320 _perms = ('repository.admin',)
1318 _perms = ('repository.admin',)
1321 validate_repo_permissions(apiuser, repoid, repo, _perms)
1319 validate_repo_permissions(apiuser, repoid, repo, _perms)
1322
1320
1323 try:
1321 try:
1324 handle_forks = Optional.extract(forks)
1322 handle_forks = Optional.extract(forks)
1325 _forks_msg = ''
1323 _forks_msg = ''
1326 _forks = [f for f in repo.forks]
1324 _forks = [f for f in repo.forks]
1327 if handle_forks == 'detach':
1325 if handle_forks == 'detach':
1328 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1326 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1329 elif handle_forks == 'delete':
1327 elif handle_forks == 'delete':
1330 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1328 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1331 elif _forks:
1329 elif _forks:
1332 raise JSONRPCError(
1330 raise JSONRPCError(
1333 'Cannot delete `%s` it still contains attached forks' %
1331 'Cannot delete `%s` it still contains attached forks' %
1334 (repo.repo_name,)
1332 (repo.repo_name,)
1335 )
1333 )
1336 old_data = repo.get_api_data()
1334 old_data = repo.get_api_data()
1337 RepoModel().delete(repo, forks=forks)
1335 RepoModel().delete(repo, forks=forks)
1338
1336
1339 repo = audit_logger.RepoWrap(repo_id=None,
1337 repo = audit_logger.RepoWrap(repo_id=None,
1340 repo_name=repo.repo_name)
1338 repo_name=repo.repo_name)
1341
1339
1342 audit_logger.store_api(
1340 audit_logger.store_api(
1343 'repo.delete', action_data={'old_data': old_data},
1341 'repo.delete', action_data={'old_data': old_data},
1344 user=apiuser, repo=repo)
1342 user=apiuser, repo=repo)
1345
1343
1346 ScmModel().mark_for_invalidation(repo_name, delete=True)
1344 ScmModel().mark_for_invalidation(repo_name, delete=True)
1347 Session().commit()
1345 Session().commit()
1348 return {
1346 return {
1349 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1347 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1350 'success': True
1348 'success': True
1351 }
1349 }
1352 except Exception:
1350 except Exception:
1353 log.exception("Exception occurred while trying to delete repo")
1351 log.exception("Exception occurred while trying to delete repo")
1354 raise JSONRPCError(
1352 raise JSONRPCError(
1355 'failed to delete repository `%s`' % (repo_name,)
1353 'failed to delete repository `%s`' % (repo_name,)
1356 )
1354 )
1357
1355
1358
1356
1359 #TODO: marcink, change name ?
1357 #TODO: marcink, change name ?
1360 @jsonrpc_method()
1358 @jsonrpc_method()
1361 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1359 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1362 """
1360 """
1363 Invalidates the cache for the specified repository.
1361 Invalidates the cache for the specified repository.
1364
1362
1365 This command can only be run using an |authtoken| with admin rights to
1363 This command can only be run using an |authtoken| with admin rights to
1366 the specified repository.
1364 the specified repository.
1367
1365
1368 This command takes the following options:
1366 This command takes the following options:
1369
1367
1370 :param apiuser: This is filled automatically from |authtoken|.
1368 :param apiuser: This is filled automatically from |authtoken|.
1371 :type apiuser: AuthUser
1369 :type apiuser: AuthUser
1372 :param repoid: Sets the repository name or repository ID.
1370 :param repoid: Sets the repository name or repository ID.
1373 :type repoid: str or int
1371 :type repoid: str or int
1374 :param delete_keys: This deletes the invalidated keys instead of
1372 :param delete_keys: This deletes the invalidated keys instead of
1375 just flagging them.
1373 just flagging them.
1376 :type delete_keys: Optional(``True`` | ``False``)
1374 :type delete_keys: Optional(``True`` | ``False``)
1377
1375
1378 Example output:
1376 Example output:
1379
1377
1380 .. code-block:: bash
1378 .. code-block:: bash
1381
1379
1382 id : <id_given_in_input>
1380 id : <id_given_in_input>
1383 result : {
1381 result : {
1384 'msg': Cache for repository `<repository name>` was invalidated,
1382 'msg': Cache for repository `<repository name>` was invalidated,
1385 'repository': <repository name>
1383 'repository': <repository name>
1386 }
1384 }
1387 error : null
1385 error : null
1388
1386
1389 Example error output:
1387 Example error output:
1390
1388
1391 .. code-block:: bash
1389 .. code-block:: bash
1392
1390
1393 id : <id_given_in_input>
1391 id : <id_given_in_input>
1394 result : null
1392 result : null
1395 error : {
1393 error : {
1396 'Error occurred during cache invalidation action'
1394 'Error occurred during cache invalidation action'
1397 }
1395 }
1398
1396
1399 """
1397 """
1400
1398
1401 repo = get_repo_or_error(repoid)
1399 repo = get_repo_or_error(repoid)
1402 if not has_superadmin_permission(apiuser):
1400 if not has_superadmin_permission(apiuser):
1403 _perms = ('repository.admin', 'repository.write',)
1401 _perms = ('repository.admin', 'repository.write',)
1404 validate_repo_permissions(apiuser, repoid, repo, _perms)
1402 validate_repo_permissions(apiuser, repoid, repo, _perms)
1405
1403
1406 delete = Optional.extract(delete_keys)
1404 delete = Optional.extract(delete_keys)
1407 try:
1405 try:
1408 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1406 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1409 return {
1407 return {
1410 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1408 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1411 'repository': repo.repo_name
1409 'repository': repo.repo_name
1412 }
1410 }
1413 except Exception:
1411 except Exception:
1414 log.exception(
1412 log.exception(
1415 "Exception occurred while trying to invalidate repo cache")
1413 "Exception occurred while trying to invalidate repo cache")
1416 raise JSONRPCError(
1414 raise JSONRPCError(
1417 'Error occurred during cache invalidation action'
1415 'Error occurred during cache invalidation action'
1418 )
1416 )
1419
1417
1420
1418
1421 #TODO: marcink, change name ?
1419 #TODO: marcink, change name ?
1422 @jsonrpc_method()
1420 @jsonrpc_method()
1423 def lock(request, apiuser, repoid, locked=Optional(None),
1421 def lock(request, apiuser, repoid, locked=Optional(None),
1424 userid=Optional(OAttr('apiuser'))):
1422 userid=Optional(OAttr('apiuser'))):
1425 """
1423 """
1426 Sets the lock state of the specified |repo| by the given user.
1424 Sets the lock state of the specified |repo| by the given user.
1427 From more information, see :ref:`repo-locking`.
1425 From more information, see :ref:`repo-locking`.
1428
1426
1429 * If the ``userid`` option is not set, the repository is locked to the
1427 * If the ``userid`` option is not set, the repository is locked to the
1430 user who called the method.
1428 user who called the method.
1431 * If the ``locked`` parameter is not set, the current lock state of the
1429 * If the ``locked`` parameter is not set, the current lock state of the
1432 repository is displayed.
1430 repository is displayed.
1433
1431
1434 This command can only be run using an |authtoken| with admin rights to
1432 This command can only be run using an |authtoken| with admin rights to
1435 the specified repository.
1433 the specified repository.
1436
1434
1437 This command takes the following options:
1435 This command takes the following options:
1438
1436
1439 :param apiuser: This is filled automatically from the |authtoken|.
1437 :param apiuser: This is filled automatically from the |authtoken|.
1440 :type apiuser: AuthUser
1438 :type apiuser: AuthUser
1441 :param repoid: Sets the repository name or repository ID.
1439 :param repoid: Sets the repository name or repository ID.
1442 :type repoid: str or int
1440 :type repoid: str or int
1443 :param locked: Sets the lock state.
1441 :param locked: Sets the lock state.
1444 :type locked: Optional(``True`` | ``False``)
1442 :type locked: Optional(``True`` | ``False``)
1445 :param userid: Set the repository lock to this user.
1443 :param userid: Set the repository lock to this user.
1446 :type userid: Optional(str or int)
1444 :type userid: Optional(str or int)
1447
1445
1448 Example error output:
1446 Example error output:
1449
1447
1450 .. code-block:: bash
1448 .. code-block:: bash
1451
1449
1452 id : <id_given_in_input>
1450 id : <id_given_in_input>
1453 result : {
1451 result : {
1454 'repo': '<reponame>',
1452 'repo': '<reponame>',
1455 'locked': <bool: lock state>,
1453 'locked': <bool: lock state>,
1456 'locked_since': <int: lock timestamp>,
1454 'locked_since': <int: lock timestamp>,
1457 'locked_by': <username of person who made the lock>,
1455 'locked_by': <username of person who made the lock>,
1458 'lock_reason': <str: reason for locking>,
1456 'lock_reason': <str: reason for locking>,
1459 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1457 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1460 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1458 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1461 or
1459 or
1462 'msg': 'Repo `<repository name>` not locked.'
1460 'msg': 'Repo `<repository name>` not locked.'
1463 or
1461 or
1464 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1462 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1465 }
1463 }
1466 error : null
1464 error : null
1467
1465
1468 Example error output:
1466 Example error output:
1469
1467
1470 .. code-block:: bash
1468 .. code-block:: bash
1471
1469
1472 id : <id_given_in_input>
1470 id : <id_given_in_input>
1473 result : null
1471 result : null
1474 error : {
1472 error : {
1475 'Error occurred locking repository `<reponame>`'
1473 'Error occurred locking repository `<reponame>`'
1476 }
1474 }
1477 """
1475 """
1478
1476
1479 repo = get_repo_or_error(repoid)
1477 repo = get_repo_or_error(repoid)
1480 if not has_superadmin_permission(apiuser):
1478 if not has_superadmin_permission(apiuser):
1481 # check if we have at least write permission for this repo !
1479 # check if we have at least write permission for this repo !
1482 _perms = ('repository.admin', 'repository.write',)
1480 _perms = ('repository.admin', 'repository.write',)
1483 validate_repo_permissions(apiuser, repoid, repo, _perms)
1481 validate_repo_permissions(apiuser, repoid, repo, _perms)
1484
1482
1485 # make sure normal user does not pass someone else userid,
1483 # make sure normal user does not pass someone else userid,
1486 # he is not allowed to do that
1484 # he is not allowed to do that
1487 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1485 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1488 raise JSONRPCError('userid is not the same as your user')
1486 raise JSONRPCError('userid is not the same as your user')
1489
1487
1490 if isinstance(userid, Optional):
1488 if isinstance(userid, Optional):
1491 userid = apiuser.user_id
1489 userid = apiuser.user_id
1492
1490
1493 user = get_user_or_error(userid)
1491 user = get_user_or_error(userid)
1494
1492
1495 if isinstance(locked, Optional):
1493 if isinstance(locked, Optional):
1496 lockobj = repo.locked
1494 lockobj = repo.locked
1497
1495
1498 if lockobj[0] is None:
1496 if lockobj[0] is None:
1499 _d = {
1497 _d = {
1500 'repo': repo.repo_name,
1498 'repo': repo.repo_name,
1501 'locked': False,
1499 'locked': False,
1502 'locked_since': None,
1500 'locked_since': None,
1503 'locked_by': None,
1501 'locked_by': None,
1504 'lock_reason': None,
1502 'lock_reason': None,
1505 'lock_state_changed': False,
1503 'lock_state_changed': False,
1506 'msg': 'Repo `%s` not locked.' % repo.repo_name
1504 'msg': 'Repo `%s` not locked.' % repo.repo_name
1507 }
1505 }
1508 return _d
1506 return _d
1509 else:
1507 else:
1510 _user_id, _time, _reason = lockobj
1508 _user_id, _time, _reason = lockobj
1511 lock_user = get_user_or_error(userid)
1509 lock_user = get_user_or_error(userid)
1512 _d = {
1510 _d = {
1513 'repo': repo.repo_name,
1511 'repo': repo.repo_name,
1514 'locked': True,
1512 'locked': True,
1515 'locked_since': _time,
1513 'locked_since': _time,
1516 'locked_by': lock_user.username,
1514 'locked_by': lock_user.username,
1517 'lock_reason': _reason,
1515 'lock_reason': _reason,
1518 'lock_state_changed': False,
1516 'lock_state_changed': False,
1519 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1517 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1520 % (repo.repo_name, lock_user.username,
1518 % (repo.repo_name, lock_user.username,
1521 json.dumps(time_to_datetime(_time))))
1519 json.dumps(time_to_datetime(_time))))
1522 }
1520 }
1523 return _d
1521 return _d
1524
1522
1525 # force locked state through a flag
1523 # force locked state through a flag
1526 else:
1524 else:
1527 locked = str2bool(locked)
1525 locked = str2bool(locked)
1528 lock_reason = Repository.LOCK_API
1526 lock_reason = Repository.LOCK_API
1529 try:
1527 try:
1530 if locked:
1528 if locked:
1531 lock_time = time.time()
1529 lock_time = time.time()
1532 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1530 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1533 else:
1531 else:
1534 lock_time = None
1532 lock_time = None
1535 Repository.unlock(repo)
1533 Repository.unlock(repo)
1536 _d = {
1534 _d = {
1537 'repo': repo.repo_name,
1535 'repo': repo.repo_name,
1538 'locked': locked,
1536 'locked': locked,
1539 'locked_since': lock_time,
1537 'locked_since': lock_time,
1540 'locked_by': user.username,
1538 'locked_by': user.username,
1541 'lock_reason': lock_reason,
1539 'lock_reason': lock_reason,
1542 'lock_state_changed': True,
1540 'lock_state_changed': True,
1543 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1541 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1544 % (user.username, repo.repo_name, locked))
1542 % (user.username, repo.repo_name, locked))
1545 }
1543 }
1546 return _d
1544 return _d
1547 except Exception:
1545 except Exception:
1548 log.exception(
1546 log.exception(
1549 "Exception occurred while trying to lock repository")
1547 "Exception occurred while trying to lock repository")
1550 raise JSONRPCError(
1548 raise JSONRPCError(
1551 'Error occurred locking repository `%s`' % repo.repo_name
1549 'Error occurred locking repository `%s`' % repo.repo_name
1552 )
1550 )
1553
1551
1554
1552
1555 @jsonrpc_method()
1553 @jsonrpc_method()
1556 def comment_commit(
1554 def comment_commit(
1557 request, apiuser, repoid, commit_id, message, status=Optional(None),
1555 request, apiuser, repoid, commit_id, message, status=Optional(None),
1558 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1556 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1559 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1557 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1560 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1558 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1561 """
1559 """
1562 Set a commit comment, and optionally change the status of the commit.
1560 Set a commit comment, and optionally change the status of the commit.
1563
1561
1564 :param apiuser: This is filled automatically from the |authtoken|.
1562 :param apiuser: This is filled automatically from the |authtoken|.
1565 :type apiuser: AuthUser
1563 :type apiuser: AuthUser
1566 :param repoid: Set the repository name or repository ID.
1564 :param repoid: Set the repository name or repository ID.
1567 :type repoid: str or int
1565 :type repoid: str or int
1568 :param commit_id: Specify the commit_id for which to set a comment.
1566 :param commit_id: Specify the commit_id for which to set a comment.
1569 :type commit_id: str
1567 :type commit_id: str
1570 :param message: The comment text.
1568 :param message: The comment text.
1571 :type message: str
1569 :type message: str
1572 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1570 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1573 'approved', 'rejected', 'under_review'
1571 'approved', 'rejected', 'under_review'
1574 :type status: str
1572 :type status: str
1575 :param comment_type: Comment type, one of: 'note', 'todo'
1573 :param comment_type: Comment type, one of: 'note', 'todo'
1576 :type comment_type: Optional(str), default: 'note'
1574 :type comment_type: Optional(str), default: 'note'
1577 :param resolves_comment_id: id of comment which this one will resolve
1575 :param resolves_comment_id: id of comment which this one will resolve
1578 :type resolves_comment_id: Optional(int)
1576 :type resolves_comment_id: Optional(int)
1579 :param extra_recipients: list of user ids or usernames to add
1577 :param extra_recipients: list of user ids or usernames to add
1580 notifications for this comment. Acts like a CC for notification
1578 notifications for this comment. Acts like a CC for notification
1581 :type extra_recipients: Optional(list)
1579 :type extra_recipients: Optional(list)
1582 :param userid: Set the user name of the comment creator.
1580 :param userid: Set the user name of the comment creator.
1583 :type userid: Optional(str or int)
1581 :type userid: Optional(str or int)
1584 :param send_email: Define if this comment should also send email notification
1582 :param send_email: Define if this comment should also send email notification
1585 :type send_email: Optional(bool)
1583 :type send_email: Optional(bool)
1586
1584
1587 Example error output:
1585 Example error output:
1588
1586
1589 .. code-block:: bash
1587 .. code-block:: bash
1590
1588
1591 {
1589 {
1592 "id" : <id_given_in_input>,
1590 "id" : <id_given_in_input>,
1593 "result" : {
1591 "result" : {
1594 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1592 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1595 "status_change": null or <status>,
1593 "status_change": null or <status>,
1596 "success": true
1594 "success": true
1597 },
1595 },
1598 "error" : null
1596 "error" : null
1599 }
1597 }
1600
1598
1601 """
1599 """
1602 repo = get_repo_or_error(repoid)
1600 repo = get_repo_or_error(repoid)
1603 if not has_superadmin_permission(apiuser):
1601 if not has_superadmin_permission(apiuser):
1604 _perms = ('repository.read', 'repository.write', 'repository.admin')
1602 _perms = ('repository.read', 'repository.write', 'repository.admin')
1605 validate_repo_permissions(apiuser, repoid, repo, _perms)
1603 validate_repo_permissions(apiuser, repoid, repo, _perms)
1606
1604
1607 try:
1605 try:
1608 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1606 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1609 commit_id = commit.raw_id
1607 commit_id = commit.raw_id
1610 except Exception as e:
1608 except Exception as e:
1611 log.exception('Failed to fetch commit')
1609 log.exception('Failed to fetch commit')
1612 raise JSONRPCError(safe_str(e))
1610 raise JSONRPCError(safe_str(e))
1613
1611
1614 if isinstance(userid, Optional):
1612 if isinstance(userid, Optional):
1615 userid = apiuser.user_id
1613 userid = apiuser.user_id
1616
1614
1617 user = get_user_or_error(userid)
1615 user = get_user_or_error(userid)
1618 status = Optional.extract(status)
1616 status = Optional.extract(status)
1619 comment_type = Optional.extract(comment_type)
1617 comment_type = Optional.extract(comment_type)
1620 resolves_comment_id = Optional.extract(resolves_comment_id)
1618 resolves_comment_id = Optional.extract(resolves_comment_id)
1621 extra_recipients = Optional.extract(extra_recipients)
1619 extra_recipients = Optional.extract(extra_recipients)
1622 send_email = Optional.extract(send_email, binary=True)
1620 send_email = Optional.extract(send_email, binary=True)
1623
1621
1624 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1622 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1625 if status and status not in allowed_statuses:
1623 if status and status not in allowed_statuses:
1626 raise JSONRPCError('Bad status, must be on '
1624 raise JSONRPCError('Bad status, must be on '
1627 'of %s got %s' % (allowed_statuses, status,))
1625 'of %s got %s' % (allowed_statuses, status,))
1628
1626
1629 if resolves_comment_id:
1627 if resolves_comment_id:
1630 comment = ChangesetComment.get(resolves_comment_id)
1628 comment = ChangesetComment.get(resolves_comment_id)
1631 if not comment:
1629 if not comment:
1632 raise JSONRPCError(
1630 raise JSONRPCError(
1633 'Invalid resolves_comment_id `%s` for this commit.'
1631 'Invalid resolves_comment_id `%s` for this commit.'
1634 % resolves_comment_id)
1632 % resolves_comment_id)
1635 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1633 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1636 raise JSONRPCError(
1634 raise JSONRPCError(
1637 'Comment `%s` is wrong type for setting status to resolved.'
1635 'Comment `%s` is wrong type for setting status to resolved.'
1638 % resolves_comment_id)
1636 % resolves_comment_id)
1639
1637
1640 try:
1638 try:
1641 rc_config = SettingsModel().get_all_settings()
1639 rc_config = SettingsModel().get_all_settings()
1642 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1640 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1643 status_change_label = ChangesetStatus.get_status_lbl(status)
1641 status_change_label = ChangesetStatus.get_status_lbl(status)
1644 comment = CommentsModel().create(
1642 comment = CommentsModel().create(
1645 message, repo, user, commit_id=commit_id,
1643 message, repo, user, commit_id=commit_id,
1646 status_change=status_change_label,
1644 status_change=status_change_label,
1647 status_change_type=status,
1645 status_change_type=status,
1648 renderer=renderer,
1646 renderer=renderer,
1649 comment_type=comment_type,
1647 comment_type=comment_type,
1650 resolves_comment_id=resolves_comment_id,
1648 resolves_comment_id=resolves_comment_id,
1651 auth_user=apiuser,
1649 auth_user=apiuser,
1652 extra_recipients=extra_recipients,
1650 extra_recipients=extra_recipients,
1653 send_email=send_email
1651 send_email=send_email
1654 )
1652 )
1655 if status:
1653 if status:
1656 # also do a status change
1654 # also do a status change
1657 try:
1655 try:
1658 ChangesetStatusModel().set_status(
1656 ChangesetStatusModel().set_status(
1659 repo, status, user, comment, revision=commit_id,
1657 repo, status, user, comment, revision=commit_id,
1660 dont_allow_on_closed_pull_request=True
1658 dont_allow_on_closed_pull_request=True
1661 )
1659 )
1662 except StatusChangeOnClosedPullRequestError:
1660 except StatusChangeOnClosedPullRequestError:
1663 log.exception(
1661 log.exception(
1664 "Exception occurred while trying to change repo commit status")
1662 "Exception occurred while trying to change repo commit status")
1665 msg = ('Changing status on a commit associated with '
1663 msg = ('Changing status on a commit associated with '
1666 'a closed pull request is not allowed')
1664 'a closed pull request is not allowed')
1667 raise JSONRPCError(msg)
1665 raise JSONRPCError(msg)
1668
1666
1669 CommentsModel().trigger_commit_comment_hook(
1667 CommentsModel().trigger_commit_comment_hook(
1670 repo, apiuser, 'create',
1668 repo, apiuser, 'create',
1671 data={'comment': comment, 'commit': commit})
1669 data={'comment': comment, 'commit': commit})
1672
1670
1673 Session().commit()
1671 Session().commit()
1674 return {
1672 return {
1675 'msg': (
1673 'msg': (
1676 'Commented on commit `%s` for repository `%s`' % (
1674 'Commented on commit `%s` for repository `%s`' % (
1677 comment.revision, repo.repo_name)),
1675 comment.revision, repo.repo_name)),
1678 'status_change': status,
1676 'status_change': status,
1679 'success': True,
1677 'success': True,
1680 }
1678 }
1681 except JSONRPCError:
1679 except JSONRPCError:
1682 # catch any inside errors, and re-raise them to prevent from
1680 # catch any inside errors, and re-raise them to prevent from
1683 # below global catch to silence them
1681 # below global catch to silence them
1684 raise
1682 raise
1685 except Exception:
1683 except Exception:
1686 log.exception("Exception occurred while trying to comment on commit")
1684 log.exception("Exception occurred while trying to comment on commit")
1687 raise JSONRPCError(
1685 raise JSONRPCError(
1688 'failed to set comment on repository `%s`' % (repo.repo_name,)
1686 'failed to set comment on repository `%s`' % (repo.repo_name,)
1689 )
1687 )
1690
1688
1691
1689
1692 @jsonrpc_method()
1690 @jsonrpc_method()
1693 def get_repo_comments(request, apiuser, repoid,
1691 def get_repo_comments(request, apiuser, repoid,
1694 commit_id=Optional(None), comment_type=Optional(None),
1692 commit_id=Optional(None), comment_type=Optional(None),
1695 userid=Optional(None)):
1693 userid=Optional(None)):
1696 """
1694 """
1697 Get all comments for a repository
1695 Get all comments for a repository
1698
1696
1699 :param apiuser: This is filled automatically from the |authtoken|.
1697 :param apiuser: This is filled automatically from the |authtoken|.
1700 :type apiuser: AuthUser
1698 :type apiuser: AuthUser
1701 :param repoid: Set the repository name or repository ID.
1699 :param repoid: Set the repository name or repository ID.
1702 :type repoid: str or int
1700 :type repoid: str or int
1703 :param commit_id: Optionally filter the comments by the commit_id
1701 :param commit_id: Optionally filter the comments by the commit_id
1704 :type commit_id: Optional(str), default: None
1702 :type commit_id: Optional(str), default: None
1705 :param comment_type: Optionally filter the comments by the comment_type
1703 :param comment_type: Optionally filter the comments by the comment_type
1706 one of: 'note', 'todo'
1704 one of: 'note', 'todo'
1707 :type comment_type: Optional(str), default: None
1705 :type comment_type: Optional(str), default: None
1708 :param userid: Optionally filter the comments by the author of comment
1706 :param userid: Optionally filter the comments by the author of comment
1709 :type userid: Optional(str or int), Default: None
1707 :type userid: Optional(str or int), Default: None
1710
1708
1711 Example error output:
1709 Example error output:
1712
1710
1713 .. code-block:: bash
1711 .. code-block:: bash
1714
1712
1715 {
1713 {
1716 "id" : <id_given_in_input>,
1714 "id" : <id_given_in_input>,
1717 "result" : [
1715 "result" : [
1718 {
1716 {
1719 "comment_author": <USER_DETAILS>,
1717 "comment_author": <USER_DETAILS>,
1720 "comment_created_on": "2017-02-01T14:38:16.309",
1718 "comment_created_on": "2017-02-01T14:38:16.309",
1721 "comment_f_path": "file.txt",
1719 "comment_f_path": "file.txt",
1722 "comment_id": 282,
1720 "comment_id": 282,
1723 "comment_lineno": "n1",
1721 "comment_lineno": "n1",
1724 "comment_resolved_by": null,
1722 "comment_resolved_by": null,
1725 "comment_status": [],
1723 "comment_status": [],
1726 "comment_text": "This file needs a header",
1724 "comment_text": "This file needs a header",
1727 "comment_type": "todo",
1725 "comment_type": "todo",
1728 "comment_last_version: 0
1726 "comment_last_version: 0
1729 }
1727 }
1730 ],
1728 ],
1731 "error" : null
1729 "error" : null
1732 }
1730 }
1733
1731
1734 """
1732 """
1735 repo = get_repo_or_error(repoid)
1733 repo = get_repo_or_error(repoid)
1736 if not has_superadmin_permission(apiuser):
1734 if not has_superadmin_permission(apiuser):
1737 _perms = ('repository.read', 'repository.write', 'repository.admin')
1735 _perms = ('repository.read', 'repository.write', 'repository.admin')
1738 validate_repo_permissions(apiuser, repoid, repo, _perms)
1736 validate_repo_permissions(apiuser, repoid, repo, _perms)
1739
1737
1740 commit_id = Optional.extract(commit_id)
1738 commit_id = Optional.extract(commit_id)
1741
1739
1742 userid = Optional.extract(userid)
1740 userid = Optional.extract(userid)
1743 if userid:
1741 if userid:
1744 user = get_user_or_error(userid)
1742 user = get_user_or_error(userid)
1745 else:
1743 else:
1746 user = None
1744 user = None
1747
1745
1748 comment_type = Optional.extract(comment_type)
1746 comment_type = Optional.extract(comment_type)
1749 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1747 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1750 raise JSONRPCError(
1748 raise JSONRPCError(
1751 'comment_type must be one of `{}` got {}'.format(
1749 'comment_type must be one of `{}` got {}'.format(
1752 ChangesetComment.COMMENT_TYPES, comment_type)
1750 ChangesetComment.COMMENT_TYPES, comment_type)
1753 )
1751 )
1754
1752
1755 comments = CommentsModel().get_repository_comments(
1753 comments = CommentsModel().get_repository_comments(
1756 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1754 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1757 return comments
1755 return comments
1758
1756
1759
1757
1760 @jsonrpc_method()
1758 @jsonrpc_method()
1761 def get_comment(request, apiuser, comment_id):
1759 def get_comment(request, apiuser, comment_id):
1762 """
1760 """
1763 Get single comment from repository or pull_request
1761 Get single comment from repository or pull_request
1764
1762
1765 :param apiuser: This is filled automatically from the |authtoken|.
1763 :param apiuser: This is filled automatically from the |authtoken|.
1766 :type apiuser: AuthUser
1764 :type apiuser: AuthUser
1767 :param comment_id: comment id found in the URL of comment
1765 :param comment_id: comment id found in the URL of comment
1768 :type comment_id: str or int
1766 :type comment_id: str or int
1769
1767
1770 Example error output:
1768 Example error output:
1771
1769
1772 .. code-block:: bash
1770 .. code-block:: bash
1773
1771
1774 {
1772 {
1775 "id" : <id_given_in_input>,
1773 "id" : <id_given_in_input>,
1776 "result" : {
1774 "result" : {
1777 "comment_author": <USER_DETAILS>,
1775 "comment_author": <USER_DETAILS>,
1778 "comment_created_on": "2017-02-01T14:38:16.309",
1776 "comment_created_on": "2017-02-01T14:38:16.309",
1779 "comment_f_path": "file.txt",
1777 "comment_f_path": "file.txt",
1780 "comment_id": 282,
1778 "comment_id": 282,
1781 "comment_lineno": "n1",
1779 "comment_lineno": "n1",
1782 "comment_resolved_by": null,
1780 "comment_resolved_by": null,
1783 "comment_status": [],
1781 "comment_status": [],
1784 "comment_text": "This file needs a header",
1782 "comment_text": "This file needs a header",
1785 "comment_type": "todo",
1783 "comment_type": "todo",
1786 "comment_last_version: 0
1784 "comment_last_version: 0
1787 },
1785 },
1788 "error" : null
1786 "error" : null
1789 }
1787 }
1790
1788
1791 """
1789 """
1792
1790
1793 comment = ChangesetComment.get(comment_id)
1791 comment = ChangesetComment.get(comment_id)
1794 if not comment:
1792 if not comment:
1795 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1793 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1796
1794
1797 perms = ('repository.read', 'repository.write', 'repository.admin')
1795 perms = ('repository.read', 'repository.write', 'repository.admin')
1798 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1796 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1799 (user=apiuser, repo_name=comment.repo.repo_name)
1797 (user=apiuser, repo_name=comment.repo.repo_name)
1800
1798
1801 if not has_comment_perm:
1799 if not has_comment_perm:
1802 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1800 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1803
1801
1804 return comment
1802 return comment
1805
1803
1806
1804
1807 @jsonrpc_method()
1805 @jsonrpc_method()
1808 def edit_comment(request, apiuser, message, comment_id, version,
1806 def edit_comment(request, apiuser, message, comment_id, version,
1809 userid=Optional(OAttr('apiuser'))):
1807 userid=Optional(OAttr('apiuser'))):
1810 """
1808 """
1811 Edit comment on the pull request or commit,
1809 Edit comment on the pull request or commit,
1812 specified by the `comment_id` and version. Initially version should be 0
1810 specified by the `comment_id` and version. Initially version should be 0
1813
1811
1814 :param apiuser: This is filled automatically from the |authtoken|.
1812 :param apiuser: This is filled automatically from the |authtoken|.
1815 :type apiuser: AuthUser
1813 :type apiuser: AuthUser
1816 :param comment_id: Specify the comment_id for editing
1814 :param comment_id: Specify the comment_id for editing
1817 :type comment_id: int
1815 :type comment_id: int
1818 :param version: version of the comment that will be created, starts from 0
1816 :param version: version of the comment that will be created, starts from 0
1819 :type version: int
1817 :type version: int
1820 :param message: The text content of the comment.
1818 :param message: The text content of the comment.
1821 :type message: str
1819 :type message: str
1822 :param userid: Comment on the pull request as this user
1820 :param userid: Comment on the pull request as this user
1823 :type userid: Optional(str or int)
1821 :type userid: Optional(str or int)
1824
1822
1825 Example output:
1823 Example output:
1826
1824
1827 .. code-block:: bash
1825 .. code-block:: bash
1828
1826
1829 id : <id_given_in_input>
1827 id : <id_given_in_input>
1830 result : {
1828 result : {
1831 "comment": "<comment data>",
1829 "comment": "<comment data>",
1832 "version": "<Integer>",
1830 "version": "<Integer>",
1833 },
1831 },
1834 error : null
1832 error : null
1835 """
1833 """
1836
1834
1837 auth_user = apiuser
1835 auth_user = apiuser
1838 comment = ChangesetComment.get(comment_id)
1836 comment = ChangesetComment.get(comment_id)
1839 if not comment:
1837 if not comment:
1840 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1838 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1841
1839
1842 is_super_admin = has_superadmin_permission(apiuser)
1840 is_super_admin = has_superadmin_permission(apiuser)
1843 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1841 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1844 (user=apiuser, repo_name=comment.repo.repo_name)
1842 (user=apiuser, repo_name=comment.repo.repo_name)
1845
1843
1846 if not isinstance(userid, Optional):
1844 if not isinstance(userid, Optional):
1847 if is_super_admin or is_repo_admin:
1845 if is_super_admin or is_repo_admin:
1848 apiuser = get_user_or_error(userid)
1846 apiuser = get_user_or_error(userid)
1849 auth_user = apiuser.AuthUser()
1847 auth_user = apiuser.AuthUser()
1850 else:
1848 else:
1851 raise JSONRPCError('userid is not the same as your user')
1849 raise JSONRPCError('userid is not the same as your user')
1852
1850
1853 comment_author = comment.author.user_id == auth_user.user_id
1851 comment_author = comment.author.user_id == auth_user.user_id
1854 if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1852 if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1855 raise JSONRPCError("you don't have access to edit this comment")
1853 raise JSONRPCError("you don't have access to edit this comment")
1856
1854
1857 try:
1855 try:
1858 comment_history = CommentsModel().edit(
1856 comment_history = CommentsModel().edit(
1859 comment_id=comment_id,
1857 comment_id=comment_id,
1860 text=message,
1858 text=message,
1861 auth_user=auth_user,
1859 auth_user=auth_user,
1862 version=version,
1860 version=version,
1863 )
1861 )
1864 Session().commit()
1862 Session().commit()
1865 except CommentVersionMismatch:
1863 except CommentVersionMismatch:
1866 raise JSONRPCError(
1864 raise JSONRPCError(
1867 'comment ({}) version ({}) mismatch'.format(comment_id, version)
1865 'comment ({}) version ({}) mismatch'.format(comment_id, version)
1868 )
1866 )
1869 if not comment_history and not message:
1867 if not comment_history and not message:
1870 raise JSONRPCError(
1868 raise JSONRPCError(
1871 "comment ({}) can't be changed with empty string".format(comment_id)
1869 "comment ({}) can't be changed with empty string".format(comment_id)
1872 )
1870 )
1873
1871
1874 if comment.pull_request:
1872 if comment.pull_request:
1875 pull_request = comment.pull_request
1873 pull_request = comment.pull_request
1876 PullRequestModel().trigger_pull_request_hook(
1874 PullRequestModel().trigger_pull_request_hook(
1877 pull_request, apiuser, 'comment_edit',
1875 pull_request, apiuser, 'comment_edit',
1878 data={'comment': comment})
1876 data={'comment': comment})
1879 else:
1877 else:
1880 db_repo = comment.repo
1878 db_repo = comment.repo
1881 commit_id = comment.revision
1879 commit_id = comment.revision
1882 commit = db_repo.get_commit(commit_id)
1880 commit = db_repo.get_commit(commit_id)
1883 CommentsModel().trigger_commit_comment_hook(
1881 CommentsModel().trigger_commit_comment_hook(
1884 db_repo, apiuser, 'edit',
1882 db_repo, apiuser, 'edit',
1885 data={'comment': comment, 'commit': commit})
1883 data={'comment': comment, 'commit': commit})
1886
1884
1887 data = {
1885 data = {
1888 'comment': comment,
1886 'comment': comment,
1889 'version': comment_history.version if comment_history else None,
1887 'version': comment_history.version if comment_history else None,
1890 }
1888 }
1891 return data
1889 return data
1892
1890
1893
1891
1894 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1892 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1895 # @jsonrpc_method()
1893 # @jsonrpc_method()
1896 # def delete_comment(request, apiuser, comment_id):
1894 # def delete_comment(request, apiuser, comment_id):
1897 # auth_user = apiuser
1895 # auth_user = apiuser
1898 #
1896 #
1899 # comment = ChangesetComment.get(comment_id)
1897 # comment = ChangesetComment.get(comment_id)
1900 # if not comment:
1898 # if not comment:
1901 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1899 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1902 #
1900 #
1903 # is_super_admin = has_superadmin_permission(apiuser)
1901 # is_super_admin = has_superadmin_permission(apiuser)
1904 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1902 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1905 # (user=apiuser, repo_name=comment.repo.repo_name)
1903 # (user=apiuser, repo_name=comment.repo.repo_name)
1906 #
1904 #
1907 # comment_author = comment.author.user_id == auth_user.user_id
1905 # comment_author = comment.author.user_id == auth_user.user_id
1908 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1906 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1909 # raise JSONRPCError("you don't have access to edit this comment")
1907 # raise JSONRPCError("you don't have access to edit this comment")
1910
1908
1911 @jsonrpc_method()
1909 @jsonrpc_method()
1912 def grant_user_permission(request, apiuser, repoid, userid, perm):
1910 def grant_user_permission(request, apiuser, repoid, userid, perm):
1913 """
1911 """
1914 Grant permissions for the specified user on the given repository,
1912 Grant permissions for the specified user on the given repository,
1915 or update existing permissions if found.
1913 or update existing permissions if found.
1916
1914
1917 This command can only be run using an |authtoken| with admin
1915 This command can only be run using an |authtoken| with admin
1918 permissions on the |repo|.
1916 permissions on the |repo|.
1919
1917
1920 :param apiuser: This is filled automatically from the |authtoken|.
1918 :param apiuser: This is filled automatically from the |authtoken|.
1921 :type apiuser: AuthUser
1919 :type apiuser: AuthUser
1922 :param repoid: Set the repository name or repository ID.
1920 :param repoid: Set the repository name or repository ID.
1923 :type repoid: str or int
1921 :type repoid: str or int
1924 :param userid: Set the user name.
1922 :param userid: Set the user name.
1925 :type userid: str
1923 :type userid: str
1926 :param perm: Set the user permissions, using the following format
1924 :param perm: Set the user permissions, using the following format
1927 ``(repository.(none|read|write|admin))``
1925 ``(repository.(none|read|write|admin))``
1928 :type perm: str
1926 :type perm: str
1929
1927
1930 Example output:
1928 Example output:
1931
1929
1932 .. code-block:: bash
1930 .. code-block:: bash
1933
1931
1934 id : <id_given_in_input>
1932 id : <id_given_in_input>
1935 result: {
1933 result: {
1936 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1934 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1937 "success": true
1935 "success": true
1938 }
1936 }
1939 error: null
1937 error: null
1940 """
1938 """
1941
1939
1942 repo = get_repo_or_error(repoid)
1940 repo = get_repo_or_error(repoid)
1943 user = get_user_or_error(userid)
1941 user = get_user_or_error(userid)
1944 perm = get_perm_or_error(perm)
1942 perm = get_perm_or_error(perm)
1945 if not has_superadmin_permission(apiuser):
1943 if not has_superadmin_permission(apiuser):
1946 _perms = ('repository.admin',)
1944 _perms = ('repository.admin',)
1947 validate_repo_permissions(apiuser, repoid, repo, _perms)
1945 validate_repo_permissions(apiuser, repoid, repo, _perms)
1948
1946
1949 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1947 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1950 try:
1948 try:
1951 changes = RepoModel().update_permissions(
1949 changes = RepoModel().update_permissions(
1952 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1950 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1953
1951
1954 action_data = {
1952 action_data = {
1955 'added': changes['added'],
1953 'added': changes['added'],
1956 'updated': changes['updated'],
1954 'updated': changes['updated'],
1957 'deleted': changes['deleted'],
1955 'deleted': changes['deleted'],
1958 }
1956 }
1959 audit_logger.store_api(
1957 audit_logger.store_api(
1960 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1958 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1961 Session().commit()
1959 Session().commit()
1962 PermissionModel().flush_user_permission_caches(changes)
1960 PermissionModel().flush_user_permission_caches(changes)
1963
1961
1964 return {
1962 return {
1965 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1963 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1966 perm.permission_name, user.username, repo.repo_name
1964 perm.permission_name, user.username, repo.repo_name
1967 ),
1965 ),
1968 'success': True
1966 'success': True
1969 }
1967 }
1970 except Exception:
1968 except Exception:
1971 log.exception("Exception occurred while trying edit permissions for repo")
1969 log.exception("Exception occurred while trying edit permissions for repo")
1972 raise JSONRPCError(
1970 raise JSONRPCError(
1973 'failed to edit permission for user: `%s` in repo: `%s`' % (
1971 'failed to edit permission for user: `%s` in repo: `%s`' % (
1974 userid, repoid
1972 userid, repoid
1975 )
1973 )
1976 )
1974 )
1977
1975
1978
1976
1979 @jsonrpc_method()
1977 @jsonrpc_method()
1980 def revoke_user_permission(request, apiuser, repoid, userid):
1978 def revoke_user_permission(request, apiuser, repoid, userid):
1981 """
1979 """
1982 Revoke permission for a user on the specified repository.
1980 Revoke permission for a user on the specified repository.
1983
1981
1984 This command can only be run using an |authtoken| with admin
1982 This command can only be run using an |authtoken| with admin
1985 permissions on the |repo|.
1983 permissions on the |repo|.
1986
1984
1987 :param apiuser: This is filled automatically from the |authtoken|.
1985 :param apiuser: This is filled automatically from the |authtoken|.
1988 :type apiuser: AuthUser
1986 :type apiuser: AuthUser
1989 :param repoid: Set the repository name or repository ID.
1987 :param repoid: Set the repository name or repository ID.
1990 :type repoid: str or int
1988 :type repoid: str or int
1991 :param userid: Set the user name of revoked user.
1989 :param userid: Set the user name of revoked user.
1992 :type userid: str or int
1990 :type userid: str or int
1993
1991
1994 Example error output:
1992 Example error output:
1995
1993
1996 .. code-block:: bash
1994 .. code-block:: bash
1997
1995
1998 id : <id_given_in_input>
1996 id : <id_given_in_input>
1999 result: {
1997 result: {
2000 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1998 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2001 "success": true
1999 "success": true
2002 }
2000 }
2003 error: null
2001 error: null
2004 """
2002 """
2005
2003
2006 repo = get_repo_or_error(repoid)
2004 repo = get_repo_or_error(repoid)
2007 user = get_user_or_error(userid)
2005 user = get_user_or_error(userid)
2008 if not has_superadmin_permission(apiuser):
2006 if not has_superadmin_permission(apiuser):
2009 _perms = ('repository.admin',)
2007 _perms = ('repository.admin',)
2010 validate_repo_permissions(apiuser, repoid, repo, _perms)
2008 validate_repo_permissions(apiuser, repoid, repo, _perms)
2011
2009
2012 perm_deletions = [[user.user_id, None, "user"]]
2010 perm_deletions = [[user.user_id, None, "user"]]
2013 try:
2011 try:
2014 changes = RepoModel().update_permissions(
2012 changes = RepoModel().update_permissions(
2015 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2013 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2016
2014
2017 action_data = {
2015 action_data = {
2018 'added': changes['added'],
2016 'added': changes['added'],
2019 'updated': changes['updated'],
2017 'updated': changes['updated'],
2020 'deleted': changes['deleted'],
2018 'deleted': changes['deleted'],
2021 }
2019 }
2022 audit_logger.store_api(
2020 audit_logger.store_api(
2023 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2021 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2024 Session().commit()
2022 Session().commit()
2025 PermissionModel().flush_user_permission_caches(changes)
2023 PermissionModel().flush_user_permission_caches(changes)
2026
2024
2027 return {
2025 return {
2028 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
2026 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
2029 user.username, repo.repo_name
2027 user.username, repo.repo_name
2030 ),
2028 ),
2031 'success': True
2029 'success': True
2032 }
2030 }
2033 except Exception:
2031 except Exception:
2034 log.exception("Exception occurred while trying revoke permissions to repo")
2032 log.exception("Exception occurred while trying revoke permissions to repo")
2035 raise JSONRPCError(
2033 raise JSONRPCError(
2036 'failed to edit permission for user: `%s` in repo: `%s`' % (
2034 'failed to edit permission for user: `%s` in repo: `%s`' % (
2037 userid, repoid
2035 userid, repoid
2038 )
2036 )
2039 )
2037 )
2040
2038
2041
2039
2042 @jsonrpc_method()
2040 @jsonrpc_method()
2043 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2041 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2044 """
2042 """
2045 Grant permission for a user group on the specified repository,
2043 Grant permission for a user group on the specified repository,
2046 or update existing permissions.
2044 or update existing permissions.
2047
2045
2048 This command can only be run using an |authtoken| with admin
2046 This command can only be run using an |authtoken| with admin
2049 permissions on the |repo|.
2047 permissions on the |repo|.
2050
2048
2051 :param apiuser: This is filled automatically from the |authtoken|.
2049 :param apiuser: This is filled automatically from the |authtoken|.
2052 :type apiuser: AuthUser
2050 :type apiuser: AuthUser
2053 :param repoid: Set the repository name or repository ID.
2051 :param repoid: Set the repository name or repository ID.
2054 :type repoid: str or int
2052 :type repoid: str or int
2055 :param usergroupid: Specify the ID of the user group.
2053 :param usergroupid: Specify the ID of the user group.
2056 :type usergroupid: str or int
2054 :type usergroupid: str or int
2057 :param perm: Set the user group permissions using the following
2055 :param perm: Set the user group permissions using the following
2058 format: (repository.(none|read|write|admin))
2056 format: (repository.(none|read|write|admin))
2059 :type perm: str
2057 :type perm: str
2060
2058
2061 Example output:
2059 Example output:
2062
2060
2063 .. code-block:: bash
2061 .. code-block:: bash
2064
2062
2065 id : <id_given_in_input>
2063 id : <id_given_in_input>
2066 result : {
2064 result : {
2067 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2065 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2068 "success": true
2066 "success": true
2069
2067
2070 }
2068 }
2071 error : null
2069 error : null
2072
2070
2073 Example error output:
2071 Example error output:
2074
2072
2075 .. code-block:: bash
2073 .. code-block:: bash
2076
2074
2077 id : <id_given_in_input>
2075 id : <id_given_in_input>
2078 result : null
2076 result : null
2079 error : {
2077 error : {
2080 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2078 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2081 }
2079 }
2082
2080
2083 """
2081 """
2084
2082
2085 repo = get_repo_or_error(repoid)
2083 repo = get_repo_or_error(repoid)
2086 perm = get_perm_or_error(perm)
2084 perm = get_perm_or_error(perm)
2087 if not has_superadmin_permission(apiuser):
2085 if not has_superadmin_permission(apiuser):
2088 _perms = ('repository.admin',)
2086 _perms = ('repository.admin',)
2089 validate_repo_permissions(apiuser, repoid, repo, _perms)
2087 validate_repo_permissions(apiuser, repoid, repo, _perms)
2090
2088
2091 user_group = get_user_group_or_error(usergroupid)
2089 user_group = get_user_group_or_error(usergroupid)
2092 if not has_superadmin_permission(apiuser):
2090 if not has_superadmin_permission(apiuser):
2093 # check if we have at least read permission for this user group !
2091 # check if we have at least read permission for this user group !
2094 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2092 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2095 if not HasUserGroupPermissionAnyApi(*_perms)(
2093 if not HasUserGroupPermissionAnyApi(*_perms)(
2096 user=apiuser, user_group_name=user_group.users_group_name):
2094 user=apiuser, user_group_name=user_group.users_group_name):
2097 raise JSONRPCError(
2095 raise JSONRPCError(
2098 'user group `%s` does not exist' % (usergroupid,))
2096 'user group `%s` does not exist' % (usergroupid,))
2099
2097
2100 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2098 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2101 try:
2099 try:
2102 changes = RepoModel().update_permissions(
2100 changes = RepoModel().update_permissions(
2103 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2101 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2104 action_data = {
2102 action_data = {
2105 'added': changes['added'],
2103 'added': changes['added'],
2106 'updated': changes['updated'],
2104 'updated': changes['updated'],
2107 'deleted': changes['deleted'],
2105 'deleted': changes['deleted'],
2108 }
2106 }
2109 audit_logger.store_api(
2107 audit_logger.store_api(
2110 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2108 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2111 Session().commit()
2109 Session().commit()
2112 PermissionModel().flush_user_permission_caches(changes)
2110 PermissionModel().flush_user_permission_caches(changes)
2113
2111
2114 return {
2112 return {
2115 'msg': 'Granted perm: `%s` for user group: `%s` in '
2113 'msg': 'Granted perm: `%s` for user group: `%s` in '
2116 'repo: `%s`' % (
2114 'repo: `%s`' % (
2117 perm.permission_name, user_group.users_group_name,
2115 perm.permission_name, user_group.users_group_name,
2118 repo.repo_name
2116 repo.repo_name
2119 ),
2117 ),
2120 'success': True
2118 'success': True
2121 }
2119 }
2122 except Exception:
2120 except Exception:
2123 log.exception(
2121 log.exception(
2124 "Exception occurred while trying change permission on repo")
2122 "Exception occurred while trying change permission on repo")
2125 raise JSONRPCError(
2123 raise JSONRPCError(
2126 'failed to edit permission for user group: `%s` in '
2124 'failed to edit permission for user group: `%s` in '
2127 'repo: `%s`' % (
2125 'repo: `%s`' % (
2128 usergroupid, repo.repo_name
2126 usergroupid, repo.repo_name
2129 )
2127 )
2130 )
2128 )
2131
2129
2132
2130
2133 @jsonrpc_method()
2131 @jsonrpc_method()
2134 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2132 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2135 """
2133 """
2136 Revoke the permissions of a user group on a given repository.
2134 Revoke the permissions of a user group on a given repository.
2137
2135
2138 This command can only be run using an |authtoken| with admin
2136 This command can only be run using an |authtoken| with admin
2139 permissions on the |repo|.
2137 permissions on the |repo|.
2140
2138
2141 :param apiuser: This is filled automatically from the |authtoken|.
2139 :param apiuser: This is filled automatically from the |authtoken|.
2142 :type apiuser: AuthUser
2140 :type apiuser: AuthUser
2143 :param repoid: Set the repository name or repository ID.
2141 :param repoid: Set the repository name or repository ID.
2144 :type repoid: str or int
2142 :type repoid: str or int
2145 :param usergroupid: Specify the user group ID.
2143 :param usergroupid: Specify the user group ID.
2146 :type usergroupid: str or int
2144 :type usergroupid: str or int
2147
2145
2148 Example output:
2146 Example output:
2149
2147
2150 .. code-block:: bash
2148 .. code-block:: bash
2151
2149
2152 id : <id_given_in_input>
2150 id : <id_given_in_input>
2153 result: {
2151 result: {
2154 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2152 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2155 "success": true
2153 "success": true
2156 }
2154 }
2157 error: null
2155 error: null
2158 """
2156 """
2159
2157
2160 repo = get_repo_or_error(repoid)
2158 repo = get_repo_or_error(repoid)
2161 if not has_superadmin_permission(apiuser):
2159 if not has_superadmin_permission(apiuser):
2162 _perms = ('repository.admin',)
2160 _perms = ('repository.admin',)
2163 validate_repo_permissions(apiuser, repoid, repo, _perms)
2161 validate_repo_permissions(apiuser, repoid, repo, _perms)
2164
2162
2165 user_group = get_user_group_or_error(usergroupid)
2163 user_group = get_user_group_or_error(usergroupid)
2166 if not has_superadmin_permission(apiuser):
2164 if not has_superadmin_permission(apiuser):
2167 # check if we have at least read permission for this user group !
2165 # check if we have at least read permission for this user group !
2168 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2166 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2169 if not HasUserGroupPermissionAnyApi(*_perms)(
2167 if not HasUserGroupPermissionAnyApi(*_perms)(
2170 user=apiuser, user_group_name=user_group.users_group_name):
2168 user=apiuser, user_group_name=user_group.users_group_name):
2171 raise JSONRPCError(
2169 raise JSONRPCError(
2172 'user group `%s` does not exist' % (usergroupid,))
2170 'user group `%s` does not exist' % (usergroupid,))
2173
2171
2174 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2172 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2175 try:
2173 try:
2176 changes = RepoModel().update_permissions(
2174 changes = RepoModel().update_permissions(
2177 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2175 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2178 action_data = {
2176 action_data = {
2179 'added': changes['added'],
2177 'added': changes['added'],
2180 'updated': changes['updated'],
2178 'updated': changes['updated'],
2181 'deleted': changes['deleted'],
2179 'deleted': changes['deleted'],
2182 }
2180 }
2183 audit_logger.store_api(
2181 audit_logger.store_api(
2184 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2182 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2185 Session().commit()
2183 Session().commit()
2186 PermissionModel().flush_user_permission_caches(changes)
2184 PermissionModel().flush_user_permission_caches(changes)
2187
2185
2188 return {
2186 return {
2189 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2187 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2190 user_group.users_group_name, repo.repo_name
2188 user_group.users_group_name, repo.repo_name
2191 ),
2189 ),
2192 'success': True
2190 'success': True
2193 }
2191 }
2194 except Exception:
2192 except Exception:
2195 log.exception("Exception occurred while trying revoke "
2193 log.exception("Exception occurred while trying revoke "
2196 "user group permission on repo")
2194 "user group permission on repo")
2197 raise JSONRPCError(
2195 raise JSONRPCError(
2198 'failed to edit permission for user group: `%s` in '
2196 'failed to edit permission for user group: `%s` in '
2199 'repo: `%s`' % (
2197 'repo: `%s`' % (
2200 user_group.users_group_name, repo.repo_name
2198 user_group.users_group_name, repo.repo_name
2201 )
2199 )
2202 )
2200 )
2203
2201
2204
2202
2205 @jsonrpc_method()
2203 @jsonrpc_method()
2206 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2204 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2207 """
2205 """
2208 Triggers a pull on the given repository from a remote location. You
2206 Triggers a pull on the given repository from a remote location. You
2209 can use this to keep remote repositories up-to-date.
2207 can use this to keep remote repositories up-to-date.
2210
2208
2211 This command can only be run using an |authtoken| with admin
2209 This command can only be run using an |authtoken| with admin
2212 rights to the specified repository. For more information,
2210 rights to the specified repository. For more information,
2213 see :ref:`config-token-ref`.
2211 see :ref:`config-token-ref`.
2214
2212
2215 This command takes the following options:
2213 This command takes the following options:
2216
2214
2217 :param apiuser: This is filled automatically from the |authtoken|.
2215 :param apiuser: This is filled automatically from the |authtoken|.
2218 :type apiuser: AuthUser
2216 :type apiuser: AuthUser
2219 :param repoid: The repository name or repository ID.
2217 :param repoid: The repository name or repository ID.
2220 :type repoid: str or int
2218 :type repoid: str or int
2221 :param remote_uri: Optional remote URI to pass in for pull
2219 :param remote_uri: Optional remote URI to pass in for pull
2222 :type remote_uri: str
2220 :type remote_uri: str
2223
2221
2224 Example output:
2222 Example output:
2225
2223
2226 .. code-block:: bash
2224 .. code-block:: bash
2227
2225
2228 id : <id_given_in_input>
2226 id : <id_given_in_input>
2229 result : {
2227 result : {
2230 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2228 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2231 "repository": "<repository name>"
2229 "repository": "<repository name>"
2232 }
2230 }
2233 error : null
2231 error : null
2234
2232
2235 Example error output:
2233 Example error output:
2236
2234
2237 .. code-block:: bash
2235 .. code-block:: bash
2238
2236
2239 id : <id_given_in_input>
2237 id : <id_given_in_input>
2240 result : null
2238 result : null
2241 error : {
2239 error : {
2242 "Unable to push changes from `<remote_url>`"
2240 "Unable to push changes from `<remote_url>`"
2243 }
2241 }
2244
2242
2245 """
2243 """
2246
2244
2247 repo = get_repo_or_error(repoid)
2245 repo = get_repo_or_error(repoid)
2248 remote_uri = Optional.extract(remote_uri)
2246 remote_uri = Optional.extract(remote_uri)
2249 remote_uri_display = remote_uri or repo.clone_uri_hidden
2247 remote_uri_display = remote_uri or repo.clone_uri_hidden
2250 if not has_superadmin_permission(apiuser):
2248 if not has_superadmin_permission(apiuser):
2251 _perms = ('repository.admin',)
2249 _perms = ('repository.admin',)
2252 validate_repo_permissions(apiuser, repoid, repo, _perms)
2250 validate_repo_permissions(apiuser, repoid, repo, _perms)
2253
2251
2254 try:
2252 try:
2255 ScmModel().pull_changes(
2253 ScmModel().pull_changes(
2256 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2254 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2257 return {
2255 return {
2258 'msg': 'Pulled from url `%s` on repo `%s`' % (
2256 'msg': 'Pulled from url `%s` on repo `%s`' % (
2259 remote_uri_display, repo.repo_name),
2257 remote_uri_display, repo.repo_name),
2260 'repository': repo.repo_name
2258 'repository': repo.repo_name
2261 }
2259 }
2262 except Exception:
2260 except Exception:
2263 log.exception("Exception occurred while trying to "
2261 log.exception("Exception occurred while trying to "
2264 "pull changes from remote location")
2262 "pull changes from remote location")
2265 raise JSONRPCError(
2263 raise JSONRPCError(
2266 'Unable to pull changes from `%s`' % remote_uri_display
2264 'Unable to pull changes from `%s`' % remote_uri_display
2267 )
2265 )
2268
2266
2269
2267
2270 @jsonrpc_method()
2268 @jsonrpc_method()
2271 def strip(request, apiuser, repoid, revision, branch):
2269 def strip(request, apiuser, repoid, revision, branch):
2272 """
2270 """
2273 Strips the given revision from the specified repository.
2271 Strips the given revision from the specified repository.
2274
2272
2275 * This will remove the revision and all of its decendants.
2273 * This will remove the revision and all of its decendants.
2276
2274
2277 This command can only be run using an |authtoken| with admin rights to
2275 This command can only be run using an |authtoken| with admin rights to
2278 the specified repository.
2276 the specified repository.
2279
2277
2280 This command takes the following options:
2278 This command takes the following options:
2281
2279
2282 :param apiuser: This is filled automatically from the |authtoken|.
2280 :param apiuser: This is filled automatically from the |authtoken|.
2283 :type apiuser: AuthUser
2281 :type apiuser: AuthUser
2284 :param repoid: The repository name or repository ID.
2282 :param repoid: The repository name or repository ID.
2285 :type repoid: str or int
2283 :type repoid: str or int
2286 :param revision: The revision you wish to strip.
2284 :param revision: The revision you wish to strip.
2287 :type revision: str
2285 :type revision: str
2288 :param branch: The branch from which to strip the revision.
2286 :param branch: The branch from which to strip the revision.
2289 :type branch: str
2287 :type branch: str
2290
2288
2291 Example output:
2289 Example output:
2292
2290
2293 .. code-block:: bash
2291 .. code-block:: bash
2294
2292
2295 id : <id_given_in_input>
2293 id : <id_given_in_input>
2296 result : {
2294 result : {
2297 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2295 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2298 "repository": "<repository name>"
2296 "repository": "<repository name>"
2299 }
2297 }
2300 error : null
2298 error : null
2301
2299
2302 Example error output:
2300 Example error output:
2303
2301
2304 .. code-block:: bash
2302 .. code-block:: bash
2305
2303
2306 id : <id_given_in_input>
2304 id : <id_given_in_input>
2307 result : null
2305 result : null
2308 error : {
2306 error : {
2309 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2307 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2310 }
2308 }
2311
2309
2312 """
2310 """
2313
2311
2314 repo = get_repo_or_error(repoid)
2312 repo = get_repo_or_error(repoid)
2315 if not has_superadmin_permission(apiuser):
2313 if not has_superadmin_permission(apiuser):
2316 _perms = ('repository.admin',)
2314 _perms = ('repository.admin',)
2317 validate_repo_permissions(apiuser, repoid, repo, _perms)
2315 validate_repo_permissions(apiuser, repoid, repo, _perms)
2318
2316
2319 try:
2317 try:
2320 ScmModel().strip(repo, revision, branch)
2318 ScmModel().strip(repo, revision, branch)
2321 audit_logger.store_api(
2319 audit_logger.store_api(
2322 'repo.commit.strip', action_data={'commit_id': revision},
2320 'repo.commit.strip', action_data={'commit_id': revision},
2323 repo=repo,
2321 repo=repo,
2324 user=apiuser, commit=True)
2322 user=apiuser, commit=True)
2325
2323
2326 return {
2324 return {
2327 'msg': 'Stripped commit %s from repo `%s`' % (
2325 'msg': 'Stripped commit %s from repo `%s`' % (
2328 revision, repo.repo_name),
2326 revision, repo.repo_name),
2329 'repository': repo.repo_name
2327 'repository': repo.repo_name
2330 }
2328 }
2331 except Exception:
2329 except Exception:
2332 log.exception("Exception while trying to strip")
2330 log.exception("Exception while trying to strip")
2333 raise JSONRPCError(
2331 raise JSONRPCError(
2334 'Unable to strip commit %s from repo `%s`' % (
2332 'Unable to strip commit %s from repo `%s`' % (
2335 revision, repo.repo_name)
2333 revision, repo.repo_name)
2336 )
2334 )
2337
2335
2338
2336
2339 @jsonrpc_method()
2337 @jsonrpc_method()
2340 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2338 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2341 """
2339 """
2342 Returns all settings for a repository. If key is given it only returns the
2340 Returns all settings for a repository. If key is given it only returns the
2343 setting identified by the key or null.
2341 setting identified by the key or null.
2344
2342
2345 :param apiuser: This is filled automatically from the |authtoken|.
2343 :param apiuser: This is filled automatically from the |authtoken|.
2346 :type apiuser: AuthUser
2344 :type apiuser: AuthUser
2347 :param repoid: The repository name or repository id.
2345 :param repoid: The repository name or repository id.
2348 :type repoid: str or int
2346 :type repoid: str or int
2349 :param key: Key of the setting to return.
2347 :param key: Key of the setting to return.
2350 :type: key: Optional(str)
2348 :type: key: Optional(str)
2351
2349
2352 Example output:
2350 Example output:
2353
2351
2354 .. code-block:: bash
2352 .. code-block:: bash
2355
2353
2356 {
2354 {
2357 "error": null,
2355 "error": null,
2358 "id": 237,
2356 "id": 237,
2359 "result": {
2357 "result": {
2360 "extensions_largefiles": true,
2358 "extensions_largefiles": true,
2361 "extensions_evolve": true,
2359 "extensions_evolve": true,
2362 "hooks_changegroup_push_logger": true,
2360 "hooks_changegroup_push_logger": true,
2363 "hooks_changegroup_repo_size": false,
2361 "hooks_changegroup_repo_size": false,
2364 "hooks_outgoing_pull_logger": true,
2362 "hooks_outgoing_pull_logger": true,
2365 "phases_publish": "True",
2363 "phases_publish": "True",
2366 "rhodecode_hg_use_rebase_for_merging": true,
2364 "rhodecode_hg_use_rebase_for_merging": true,
2367 "rhodecode_pr_merge_enabled": true,
2365 "rhodecode_pr_merge_enabled": true,
2368 "rhodecode_use_outdated_comments": true
2366 "rhodecode_use_outdated_comments": true
2369 }
2367 }
2370 }
2368 }
2371 """
2369 """
2372
2370
2373 # Restrict access to this api method to admins only.
2371 # Restrict access to this api method to super-admins, and repo admins only.
2372 repo = get_repo_or_error(repoid)
2374 if not has_superadmin_permission(apiuser):
2373 if not has_superadmin_permission(apiuser):
2375 raise JSONRPCForbidden()
2374 _perms = ('repository.admin',)
2375 validate_repo_permissions(apiuser, repoid, repo, _perms)
2376
2376
2377 try:
2377 try:
2378 repo = get_repo_or_error(repoid)
2379 settings_model = VcsSettingsModel(repo=repo)
2378 settings_model = VcsSettingsModel(repo=repo)
2380 settings = settings_model.get_global_settings()
2379 settings = settings_model.get_global_settings()
2381 settings.update(settings_model.get_repo_settings())
2380 settings.update(settings_model.get_repo_settings())
2382
2381
2383 # If only a single setting is requested fetch it from all settings.
2382 # If only a single setting is requested fetch it from all settings.
2384 key = Optional.extract(key)
2383 key = Optional.extract(key)
2385 if key is not None:
2384 if key is not None:
2386 settings = settings.get(key, None)
2385 settings = settings.get(key, None)
2387 except Exception:
2386 except Exception:
2388 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2387 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2389 log.exception(msg)
2388 log.exception(msg)
2390 raise JSONRPCError(msg)
2389 raise JSONRPCError(msg)
2391
2390
2392 return settings
2391 return settings
2393
2392
2394
2393
2395 @jsonrpc_method()
2394 @jsonrpc_method()
2396 def set_repo_settings(request, apiuser, repoid, settings):
2395 def set_repo_settings(request, apiuser, repoid, settings):
2397 """
2396 """
2398 Update repository settings. Returns true on success.
2397 Update repository settings. Returns true on success.
2399
2398
2400 :param apiuser: This is filled automatically from the |authtoken|.
2399 :param apiuser: This is filled automatically from the |authtoken|.
2401 :type apiuser: AuthUser
2400 :type apiuser: AuthUser
2402 :param repoid: The repository name or repository id.
2401 :param repoid: The repository name or repository id.
2403 :type repoid: str or int
2402 :type repoid: str or int
2404 :param settings: The new settings for the repository.
2403 :param settings: The new settings for the repository.
2405 :type: settings: dict
2404 :type: settings: dict
2406
2405
2407 Example output:
2406 Example output:
2408
2407
2409 .. code-block:: bash
2408 .. code-block:: bash
2410
2409
2411 {
2410 {
2412 "error": null,
2411 "error": null,
2413 "id": 237,
2412 "id": 237,
2414 "result": true
2413 "result": true
2415 }
2414 }
2416 """
2415 """
2417 # Restrict access to this api method to admins only.
2416 # Restrict access to this api method to super-admins, and repo admins only.
2417 repo = get_repo_or_error(repoid)
2418 if not has_superadmin_permission(apiuser):
2418 if not has_superadmin_permission(apiuser):
2419 raise JSONRPCForbidden()
2419 _perms = ('repository.admin',)
2420 validate_repo_permissions(apiuser, repoid, repo, _perms)
2420
2421
2421 if type(settings) is not dict:
2422 if type(settings) is not dict:
2422 raise JSONRPCError('Settings have to be a JSON Object.')
2423 raise JSONRPCError('Settings have to be a JSON Object.')
2423
2424
2424 try:
2425 try:
2425 settings_model = VcsSettingsModel(repo=repoid)
2426 settings_model = VcsSettingsModel(repo=repoid)
2426
2427
2427 # Merge global, repo and incoming settings.
2428 # Merge global, repo and incoming settings.
2428 new_settings = settings_model.get_global_settings()
2429 new_settings = settings_model.get_global_settings()
2429 new_settings.update(settings_model.get_repo_settings())
2430 new_settings.update(settings_model.get_repo_settings())
2430 new_settings.update(settings)
2431 new_settings.update(settings)
2431
2432
2432 # Update the settings.
2433 # Update the settings.
2433 inherit_global_settings = new_settings.get(
2434 inherit_global_settings = new_settings.get(
2434 'inherit_global_settings', False)
2435 'inherit_global_settings', False)
2435 settings_model.create_or_update_repo_settings(
2436 settings_model.create_or_update_repo_settings(
2436 new_settings, inherit_global_settings=inherit_global_settings)
2437 new_settings, inherit_global_settings=inherit_global_settings)
2437 Session().commit()
2438 Session().commit()
2438 except Exception:
2439 except Exception:
2439 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2440 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2440 log.exception(msg)
2441 log.exception(msg)
2441 raise JSONRPCError(msg)
2442 raise JSONRPCError(msg)
2442
2443
2443 # Indicate success.
2444 # Indicate success.
2444 return True
2445 return True
2445
2446
2446
2447
2447 @jsonrpc_method()
2448 @jsonrpc_method()
2448 def maintenance(request, apiuser, repoid):
2449 def maintenance(request, apiuser, repoid):
2449 """
2450 """
2450 Triggers a maintenance on the given repository.
2451 Triggers a maintenance on the given repository.
2451
2452
2452 This command can only be run using an |authtoken| with admin
2453 This command can only be run using an |authtoken| with admin
2453 rights to the specified repository. For more information,
2454 rights to the specified repository. For more information,
2454 see :ref:`config-token-ref`.
2455 see :ref:`config-token-ref`.
2455
2456
2456 This command takes the following options:
2457 This command takes the following options:
2457
2458
2458 :param apiuser: This is filled automatically from the |authtoken|.
2459 :param apiuser: This is filled automatically from the |authtoken|.
2459 :type apiuser: AuthUser
2460 :type apiuser: AuthUser
2460 :param repoid: The repository name or repository ID.
2461 :param repoid: The repository name or repository ID.
2461 :type repoid: str or int
2462 :type repoid: str or int
2462
2463
2463 Example output:
2464 Example output:
2464
2465
2465 .. code-block:: bash
2466 .. code-block:: bash
2466
2467
2467 id : <id_given_in_input>
2468 id : <id_given_in_input>
2468 result : {
2469 result : {
2469 "msg": "executed maintenance command",
2470 "msg": "executed maintenance command",
2470 "executed_actions": [
2471 "executed_actions": [
2471 <action_message>, <action_message2>...
2472 <action_message>, <action_message2>...
2472 ],
2473 ],
2473 "repository": "<repository name>"
2474 "repository": "<repository name>"
2474 }
2475 }
2475 error : null
2476 error : null
2476
2477
2477 Example error output:
2478 Example error output:
2478
2479
2479 .. code-block:: bash
2480 .. code-block:: bash
2480
2481
2481 id : <id_given_in_input>
2482 id : <id_given_in_input>
2482 result : null
2483 result : null
2483 error : {
2484 error : {
2484 "Unable to execute maintenance on `<reponame>`"
2485 "Unable to execute maintenance on `<reponame>`"
2485 }
2486 }
2486
2487
2487 """
2488 """
2488
2489
2489 repo = get_repo_or_error(repoid)
2490 repo = get_repo_or_error(repoid)
2490 if not has_superadmin_permission(apiuser):
2491 if not has_superadmin_permission(apiuser):
2491 _perms = ('repository.admin',)
2492 _perms = ('repository.admin',)
2492 validate_repo_permissions(apiuser, repoid, repo, _perms)
2493 validate_repo_permissions(apiuser, repoid, repo, _perms)
2493
2494
2494 try:
2495 try:
2495 maintenance = repo_maintenance.RepoMaintenance()
2496 maintenance = repo_maintenance.RepoMaintenance()
2496 executed_actions = maintenance.execute(repo)
2497 executed_actions = maintenance.execute(repo)
2497
2498
2498 return {
2499 return {
2499 'msg': 'executed maintenance command',
2500 'msg': 'executed maintenance command',
2500 'executed_actions': executed_actions,
2501 'executed_actions': executed_actions,
2501 'repository': repo.repo_name
2502 'repository': repo.repo_name
2502 }
2503 }
2503 except Exception:
2504 except Exception:
2504 log.exception("Exception occurred while trying to run maintenance")
2505 log.exception("Exception occurred while trying to run maintenance")
2505 raise JSONRPCError(
2506 raise JSONRPCError(
2506 'Unable to execute maintenance on `%s`' % repo.repo_name)
2507 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,177 +1,187 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import uuid
22 import uuid
23
23
24 from pyramid.view import view_config
24 from pyramid.view import view_config
25 from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPBadGateway
25 from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPBadGateway
26
26
27 from rhodecode.apps._base import BaseAppView
27 from rhodecode.apps._base import BaseAppView
28 from rhodecode.lib.channelstream import (
28 from rhodecode.lib.channelstream import (
29 channelstream_request, get_channelstream_server_url,
29 channelstream_request, get_channelstream_server_url,
30 ChannelstreamConnectionException,
30 ChannelstreamConnectionException,
31 ChannelstreamPermissionException,
31 ChannelstreamPermissionException,
32 check_channel_permissions,
32 check_channel_permissions,
33 get_connection_validators,
33 get_connection_validators,
34 get_user_data,
34 get_user_data,
35 parse_channels_info,
35 parse_channels_info,
36 update_history_from_logs,
36 update_history_from_logs,
37 STATE_PUBLIC_KEYS)
37 USER_STATE_PUBLIC_KEYS)
38
38
39 from rhodecode.lib.auth import NotAnonymous
39 from rhodecode.lib.auth import NotAnonymous
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 class ChannelstreamView(BaseAppView):
44 class ChannelstreamView(BaseAppView):
45
45
46 def load_default_context(self):
46 def load_default_context(self):
47 c = self._get_local_tmpl_context()
47 c = self._get_local_tmpl_context()
48 self.channelstream_config = \
48 self.channelstream_config = \
49 self.request.registry.rhodecode_plugins['channelstream']
49 self.request.registry.rhodecode_plugins['channelstream']
50 if not self.channelstream_config.get('enabled'):
50 if not self.channelstream_config.get('enabled'):
51 log.error('Channelstream plugin is disabled')
51 log.error('Channelstream plugin is disabled')
52 raise HTTPBadRequest()
52 raise HTTPBadRequest()
53
53
54 return c
54 return c
55
55
56 @NotAnonymous()
56 @NotAnonymous()
57 @view_config(route_name='channelstream_connect', renderer='json_ext')
57 @view_config(route_name='channelstream_connect', renderer='json_ext')
58 def connect(self):
58 def connect(self):
59 """ handle authorization of users trying to connect """
59 """ handle authorization of users trying to connect """
60
60
61 self.load_default_context()
61 self.load_default_context()
62 try:
62 try:
63 json_body = self.request.json_body
63 json_body = self.request.json_body
64 except Exception:
64 except Exception:
65 log.exception('Failed to decode json from request')
65 log.exception('Failed to decode json from request')
66 raise HTTPBadRequest()
66 raise HTTPBadRequest()
67
67
68 try:
68 try:
69 channels = check_channel_permissions(
69 channels = check_channel_permissions(
70 json_body.get('channels'),
70 json_body.get('channels'),
71 get_connection_validators(self.request.registry))
71 get_connection_validators(self.request.registry))
72 except ChannelstreamPermissionException:
72 except ChannelstreamPermissionException:
73 log.error('Incorrect permissions for requested channels')
73 log.error('Incorrect permissions for requested channels')
74 raise HTTPForbidden()
74 raise HTTPForbidden()
75
75
76 user = self._rhodecode_user
76 user = self._rhodecode_user
77 if user.user_id:
77 if user.user_id:
78 user_data = get_user_data(user.user_id)
78 user_data = get_user_data(user.user_id)
79 else:
79 else:
80 user_data = {
80 user_data = {
81 'id': None,
81 'id': None,
82 'username': None,
82 'username': None,
83 'first_name': None,
83 'first_name': None,
84 'last_name': None,
84 'last_name': None,
85 'icon_link': None,
85 'icon_link': None,
86 'display_name': None,
86 'display_name': None,
87 'display_link': None,
87 'display_link': None,
88 }
88 }
89 user_data['permissions'] = self._rhodecode_user.permissions_safe
89
90 #user_data['permissions'] = self._rhodecode_user.permissions_safe
91
90 payload = {
92 payload = {
91 'username': user.username,
93 'username': user.username,
92 'user_state': user_data,
94 'user_state': user_data,
93 'conn_id': str(uuid.uuid4()),
95 'conn_id': str(uuid.uuid4()),
94 'channels': channels,
96 'channels': channels,
95 'channel_configs': {},
97 'channel_configs': {},
96 'state_public_keys': STATE_PUBLIC_KEYS,
98 'state_public_keys': USER_STATE_PUBLIC_KEYS,
97 'info': {
99 'info': {
98 'exclude_channels': ['broadcast']
100 'exclude_channels': ['broadcast']
99 }
101 }
100 }
102 }
101 filtered_channels = [channel for channel in channels
103 filtered_channels = [channel for channel in channels
102 if channel != 'broadcast']
104 if channel != 'broadcast']
103 for channel in filtered_channels:
105 for channel in filtered_channels:
104 payload['channel_configs'][channel] = {
106 payload['channel_configs'][channel] = {
105 'notify_presence': True,
107 'notify_presence': True,
106 'history_size': 100,
108 'history_size': 100,
107 'store_history': True,
109 'store_history': True,
108 'broadcast_presence_with_user_lists': True
110 'broadcast_presence_with_user_lists': True
109 }
111 }
110 # connect user to server
112 # connect user to server
111 channelstream_url = get_channelstream_server_url(
113 channelstream_url = get_channelstream_server_url(
112 self.channelstream_config, '/connect')
114 self.channelstream_config, '/connect')
113 try:
115 try:
114 connect_result = channelstream_request(
116 connect_result = channelstream_request(
115 self.channelstream_config, payload, '/connect')
117 self.channelstream_config, payload, '/connect')
116 except ChannelstreamConnectionException:
118 except ChannelstreamConnectionException:
117 log.exception(
119 log.exception(
118 'Channelstream service at {} is down'.format(channelstream_url))
120 'Channelstream service at {} is down'.format(channelstream_url))
119 return HTTPBadGateway()
121 return HTTPBadGateway()
120
122
123 channel_info = connect_result.get('channels_info')
124 if not channel_info:
125 raise HTTPBadRequest()
126
121 connect_result['channels'] = channels
127 connect_result['channels'] = channels
122 connect_result['channels_info'] = parse_channels_info(
128 connect_result['channels_info'] = parse_channels_info(
123 connect_result['channels_info'],
129 channel_info, include_channel_info=filtered_channels)
124 include_channel_info=filtered_channels)
125 update_history_from_logs(self.channelstream_config,
130 update_history_from_logs(self.channelstream_config,
126 filtered_channels, connect_result)
131 filtered_channels, connect_result)
127 return connect_result
132 return connect_result
128
133
129 @NotAnonymous()
134 @NotAnonymous()
130 @view_config(route_name='channelstream_subscribe', renderer='json_ext')
135 @view_config(route_name='channelstream_subscribe', renderer='json_ext')
131 def subscribe(self):
136 def subscribe(self):
132 """ can be used to subscribe specific connection to other channels """
137 """ can be used to subscribe specific connection to other channels """
133 self.load_default_context()
138 self.load_default_context()
134 try:
139 try:
135 json_body = self.request.json_body
140 json_body = self.request.json_body
136 except Exception:
141 except Exception:
137 log.exception('Failed to decode json from request')
142 log.exception('Failed to decode json from request')
138 raise HTTPBadRequest()
143 raise HTTPBadRequest()
139 try:
144 try:
140 channels = check_channel_permissions(
145 channels = check_channel_permissions(
141 json_body.get('channels'),
146 json_body.get('channels'),
142 get_connection_validators(self.request.registry))
147 get_connection_validators(self.request.registry))
143 except ChannelstreamPermissionException:
148 except ChannelstreamPermissionException:
144 log.error('Incorrect permissions for requested channels')
149 log.error('Incorrect permissions for requested channels')
145 raise HTTPForbidden()
150 raise HTTPForbidden()
146 payload = {'conn_id': json_body.get('conn_id', ''),
151 payload = {'conn_id': json_body.get('conn_id', ''),
147 'channels': channels,
152 'channels': channels,
148 'channel_configs': {},
153 'channel_configs': {},
149 'info': {
154 'info': {
150 'exclude_channels': ['broadcast']}
155 'exclude_channels': ['broadcast']}
151 }
156 }
152 filtered_channels = [chan for chan in channels if chan != 'broadcast']
157 filtered_channels = [chan for chan in channels if chan != 'broadcast']
153 for channel in filtered_channels:
158 for channel in filtered_channels:
154 payload['channel_configs'][channel] = {
159 payload['channel_configs'][channel] = {
155 'notify_presence': True,
160 'notify_presence': True,
156 'history_size': 100,
161 'history_size': 100,
157 'store_history': True,
162 'store_history': True,
158 'broadcast_presence_with_user_lists': True
163 'broadcast_presence_with_user_lists': True
159 }
164 }
160
165
161 channelstream_url = get_channelstream_server_url(
166 channelstream_url = get_channelstream_server_url(
162 self.channelstream_config, '/subscribe')
167 self.channelstream_config, '/subscribe')
163 try:
168 try:
164 connect_result = channelstream_request(
169 connect_result = channelstream_request(
165 self.channelstream_config, payload, '/subscribe')
170 self.channelstream_config, payload, '/subscribe')
166 except ChannelstreamConnectionException:
171 except ChannelstreamConnectionException:
167 log.exception(
172 log.exception(
168 'Channelstream service at {} is down'.format(channelstream_url))
173 'Channelstream service at {} is down'.format(channelstream_url))
169 return HTTPBadGateway()
174 return HTTPBadGateway()
175
176 channel_info = connect_result.get('channels_info')
177 if not channel_info:
178 raise HTTPBadRequest()
179
170 # include_channel_info will limit history only to new channel
180 # include_channel_info will limit history only to new channel
171 # to not overwrite histories on other channels in client
181 # to not overwrite histories on other channels in client
172 connect_result['channels_info'] = parse_channels_info(
182 connect_result['channels_info'] = parse_channels_info(
173 connect_result['channels_info'],
183 channel_info,
174 include_channel_info=filtered_channels)
184 include_channel_info=filtered_channels)
175 update_history_from_logs(
185 update_history_from_logs(
176 self.channelstream_config, filtered_channels, connect_result)
186 self.channelstream_config, filtered_channels, connect_result)
177 return connect_result
187 return connect_result
@@ -1,52 +1,52 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 from rhodecode.apps.file_store import config_keys
21 from rhodecode.apps.file_store import config_keys
22 from rhodecode.config.middleware import _bool_setting, _string_setting
22 from rhodecode.config.middleware import _bool_setting, _string_setting
23
23
24
24
25 def _sanitize_settings_and_apply_defaults(settings):
25 def _sanitize_settings_and_apply_defaults(settings):
26 """
26 """
27 Set defaults, convert to python types and validate settings.
27 Set defaults, convert to python types and validate settings.
28 """
28 """
29 _bool_setting(settings, config_keys.enabled, 'true')
29 _bool_setting(settings, config_keys.enabled, 'true')
30
30
31 _string_setting(settings, config_keys.backend, 'local')
31 _string_setting(settings, config_keys.backend, 'local')
32
32
33 default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store')
33 default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store')
34 _string_setting(settings, config_keys.store_path, default_store)
34 _string_setting(settings, config_keys.store_path, default_store)
35
35
36
36
37 def includeme(config):
37 def includeme(config):
38 settings = config.registry.settings
38 settings = config.registry.settings
39 _sanitize_settings_and_apply_defaults(settings)
39 _sanitize_settings_and_apply_defaults(settings)
40
40
41 config.add_route(
41 config.add_route(
42 name='upload_file',
42 name='upload_file',
43 pattern='/_file_store/upload')
43 pattern='/_file_store/upload')
44 config.add_route(
44 config.add_route(
45 name='download_file',
45 name='download_file',
46 pattern='/_file_store/download/{fid}')
46 pattern='/_file_store/download/{fid:.*}')
47 config.add_route(
47 config.add_route(
48 name='download_file_by_token',
48 name='download_file_by_token',
49 pattern='/_file_store/token-download/{_auth_token}/{fid}')
49 pattern='/_file_store/token-download/{_auth_token}/{fid:.*}')
50
50
51 # Scan module for configuration decorators.
51 # Scan module for configuration decorators.
52 config.scan('.views', ignore='.tests')
52 config.scan('.views', ignore='.tests')
@@ -1,240 +1,261 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import errno
23 import shutil
24 import shutil
24 import hashlib
25 import hashlib
25
26
26 from rhodecode.lib.ext_json import json
27 from rhodecode.lib.ext_json import json
27 from rhodecode.apps.file_store import utils
28 from rhodecode.apps.file_store import utils
28 from rhodecode.apps.file_store.extensions import resolve_extensions
29 from rhodecode.apps.file_store.extensions import resolve_extensions
29 from rhodecode.apps.file_store.exceptions import (
30 from rhodecode.apps.file_store.exceptions import (
30 FileNotAllowedException, FileOverSizeException)
31 FileNotAllowedException, FileOverSizeException)
31
32
32 METADATA_VER = 'v1'
33 METADATA_VER = 'v1'
33
34
34
35
36 def safe_make_dirs(dir_path):
37 if not os.path.exists(dir_path):
38 try:
39 os.makedirs(dir_path)
40 except OSError as e:
41 if e.errno != errno.EEXIST:
42 raise
43 return
44
45
35 class LocalFileStorage(object):
46 class LocalFileStorage(object):
36
47
37 @classmethod
48 @classmethod
49 def apply_counter(cls, counter, filename):
50 name_counted = '%d-%s' % (counter, filename)
51 return name_counted
52
53 @classmethod
38 def resolve_name(cls, name, directory):
54 def resolve_name(cls, name, directory):
39 """
55 """
40 Resolves a unique name and the correct path. If a filename
56 Resolves a unique name and the correct path. If a filename
41 for that path already exists then a numeric prefix with values > 0 will be
57 for that path already exists then a numeric prefix with values > 0 will be
42 added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix.
58 added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix.
43
59
44 :param name: base name of file
60 :param name: base name of file
45 :param directory: absolute directory path
61 :param directory: absolute directory path
46 """
62 """
47
63
48 counter = 0
64 counter = 0
49 while True:
65 while True:
50 name = '%d-%s' % (counter, name)
66 name_counted = cls.apply_counter(counter, name)
51
67
52 # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file
68 # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file
53 sub_store = cls._sub_store_from_filename(name)
69 sub_store = cls._sub_store_from_filename(name_counted)
54 sub_store_path = os.path.join(directory, sub_store)
70 sub_store_path = os.path.join(directory, sub_store)
55 if not os.path.exists(sub_store_path):
71 safe_make_dirs(sub_store_path)
56 os.makedirs(sub_store_path)
57
72
58 path = os.path.join(sub_store_path, name)
73 path = os.path.join(sub_store_path, name_counted)
59 if not os.path.exists(path):
74 if not os.path.exists(path):
60 return name, path
75 return name_counted, path
61 counter += 1
76 counter += 1
62
77
63 @classmethod
78 @classmethod
64 def _sub_store_from_filename(cls, filename):
79 def _sub_store_from_filename(cls, filename):
65 return filename[:2]
80 return filename[:2]
66
81
67 @classmethod
82 @classmethod
68 def calculate_path_hash(cls, file_path):
83 def calculate_path_hash(cls, file_path):
69 """
84 """
70 Efficient calculation of file_path sha256 sum
85 Efficient calculation of file_path sha256 sum
71
86
72 :param file_path:
87 :param file_path:
73 :return: sha256sum
88 :return: sha256sum
74 """
89 """
75 digest = hashlib.sha256()
90 digest = hashlib.sha256()
76 with open(file_path, 'rb') as f:
91 with open(file_path, 'rb') as f:
77 for chunk in iter(lambda: f.read(1024 * 100), b""):
92 for chunk in iter(lambda: f.read(1024 * 100), b""):
78 digest.update(chunk)
93 digest.update(chunk)
79
94
80 return digest.hexdigest()
95 return digest.hexdigest()
81
96
82 def __init__(self, base_path, extension_groups=None):
97 def __init__(self, base_path, extension_groups=None):
83
98
84 """
99 """
85 Local file storage
100 Local file storage
86
101
87 :param base_path: the absolute base path where uploads are stored
102 :param base_path: the absolute base path where uploads are stored
88 :param extension_groups: extensions string
103 :param extension_groups: extensions string
89 """
104 """
90
105
91 extension_groups = extension_groups or ['any']
106 extension_groups = extension_groups or ['any']
92 self.base_path = base_path
107 self.base_path = base_path
93 self.extensions = resolve_extensions([], groups=extension_groups)
108 self.extensions = resolve_extensions([], groups=extension_groups)
94
109
95 def __repr__(self):
110 def __repr__(self):
96 return '{}@{}'.format(self.__class__, self.base_path)
111 return '{}@{}'.format(self.__class__, self.base_path)
97
112
98 def store_path(self, filename):
113 def store_path(self, filename):
99 """
114 """
100 Returns absolute file path of the filename, joined to the
115 Returns absolute file path of the filename, joined to the
101 base_path.
116 base_path.
102
117
103 :param filename: base name of file
118 :param filename: base name of file
104 """
119 """
105 sub_store = self._sub_store_from_filename(filename)
120 prefix_dir = ''
106 return os.path.join(self.base_path, sub_store, filename)
121 if '/' in filename:
122 prefix_dir, filename = filename.split('/')
123 sub_store = self._sub_store_from_filename(filename)
124 else:
125 sub_store = self._sub_store_from_filename(filename)
126 return os.path.join(self.base_path, prefix_dir, sub_store, filename)
107
127
108 def delete(self, filename):
128 def delete(self, filename):
109 """
129 """
110 Deletes the filename. Filename is resolved with the
130 Deletes the filename. Filename is resolved with the
111 absolute path based on base_path. If file does not exist,
131 absolute path based on base_path. If file does not exist,
112 returns **False**, otherwise **True**
132 returns **False**, otherwise **True**
113
133
114 :param filename: base name of file
134 :param filename: base name of file
115 """
135 """
116 if self.exists(filename):
136 if self.exists(filename):
117 os.remove(self.store_path(filename))
137 os.remove(self.store_path(filename))
118 return True
138 return True
119 return False
139 return False
120
140
121 def exists(self, filename):
141 def exists(self, filename):
122 """
142 """
123 Checks if file exists. Resolves filename's absolute
143 Checks if file exists. Resolves filename's absolute
124 path based on base_path.
144 path based on base_path.
125
145
126 :param filename: base name of file
146 :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg
127 """
147 """
128 return os.path.exists(self.store_path(filename))
148 return os.path.exists(self.store_path(filename))
129
149
130 def filename_allowed(self, filename, extensions=None):
150 def filename_allowed(self, filename, extensions=None):
131 """Checks if a filename has an allowed extension
151 """Checks if a filename has an allowed extension
132
152
133 :param filename: base name of file
153 :param filename: base name of file
134 :param extensions: iterable of extensions (or self.extensions)
154 :param extensions: iterable of extensions (or self.extensions)
135 """
155 """
136 _, ext = os.path.splitext(filename)
156 _, ext = os.path.splitext(filename)
137 return self.extension_allowed(ext, extensions)
157 return self.extension_allowed(ext, extensions)
138
158
139 def extension_allowed(self, ext, extensions=None):
159 def extension_allowed(self, ext, extensions=None):
140 """
160 """
141 Checks if an extension is permitted. Both e.g. ".jpg" and
161 Checks if an extension is permitted. Both e.g. ".jpg" and
142 "jpg" can be passed in. Extension lookup is case-insensitive.
162 "jpg" can be passed in. Extension lookup is case-insensitive.
143
163
144 :param ext: extension to check
164 :param ext: extension to check
145 :param extensions: iterable of extensions to validate against (or self.extensions)
165 :param extensions: iterable of extensions to validate against (or self.extensions)
146 """
166 """
147 def normalize_ext(_ext):
167 def normalize_ext(_ext):
148 if _ext.startswith('.'):
168 if _ext.startswith('.'):
149 _ext = _ext[1:]
169 _ext = _ext[1:]
150 return _ext.lower()
170 return _ext.lower()
151
171
152 extensions = extensions or self.extensions
172 extensions = extensions or self.extensions
153 if not extensions:
173 if not extensions:
154 return True
174 return True
155
175
156 ext = normalize_ext(ext)
176 ext = normalize_ext(ext)
157
177
158 return ext in [normalize_ext(x) for x in extensions]
178 return ext in [normalize_ext(x) for x in extensions]
159
179
160 def save_file(self, file_obj, filename, directory=None, extensions=None,
180 def save_file(self, file_obj, filename, directory=None, extensions=None,
161 extra_metadata=None, max_filesize=None, **kwargs):
181 extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs):
162 """
182 """
163 Saves a file object to the uploads location.
183 Saves a file object to the uploads location.
164 Returns the resolved filename, i.e. the directory +
184 Returns the resolved filename, i.e. the directory +
165 the (randomized/incremented) base name.
185 the (randomized/incremented) base name.
166
186
167 :param file_obj: **cgi.FieldStorage** object (or similar)
187 :param file_obj: **cgi.FieldStorage** object (or similar)
168 :param filename: original filename
188 :param filename: original filename
169 :param directory: relative path of sub-directory
189 :param directory: relative path of sub-directory
170 :param extensions: iterable of allowed extensions, if not default
190 :param extensions: iterable of allowed extensions, if not default
171 :param max_filesize: maximum size of file that should be allowed
191 :param max_filesize: maximum size of file that should be allowed
192 :param randomized_name: generate random generated UID or fixed based on the filename
172 :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix
193 :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix
173
194
174 """
195 """
175
196
176 extensions = extensions or self.extensions
197 extensions = extensions or self.extensions
177
198
178 if not self.filename_allowed(filename, extensions):
199 if not self.filename_allowed(filename, extensions):
179 raise FileNotAllowedException()
200 raise FileNotAllowedException()
180
201
181 if directory:
202 if directory:
182 dest_directory = os.path.join(self.base_path, directory)
203 dest_directory = os.path.join(self.base_path, directory)
183 else:
204 else:
184 dest_directory = self.base_path
205 dest_directory = self.base_path
185
206
186 if not os.path.exists(dest_directory):
207 safe_make_dirs(dest_directory)
187 os.makedirs(dest_directory)
188
208
189 filename = utils.uid_filename(filename)
209 uid_filename = utils.uid_filename(filename, randomized=randomized_name)
190
210
191 # resolve also produces special sub-dir for file optimized store
211 # resolve also produces special sub-dir for file optimized store
192 filename, path = self.resolve_name(filename, dest_directory)
212 filename, path = self.resolve_name(uid_filename, dest_directory)
193 stored_file_dir = os.path.dirname(path)
213 stored_file_dir = os.path.dirname(path)
194
214
195 file_obj.seek(0)
215 file_obj.seek(0)
196
216
197 with open(path, "wb") as dest:
217 with open(path, "wb") as dest:
198 shutil.copyfileobj(file_obj, dest)
218 shutil.copyfileobj(file_obj, dest)
199
219
200 metadata = {}
220 metadata = {}
201 if extra_metadata:
221 if extra_metadata:
202 metadata = extra_metadata
222 metadata = extra_metadata
203
223
204 size = os.stat(path).st_size
224 size = os.stat(path).st_size
205
225
206 if max_filesize and size > max_filesize:
226 if max_filesize and size > max_filesize:
207 # free up the copied file, and raise exc
227 # free up the copied file, and raise exc
208 os.remove(path)
228 os.remove(path)
209 raise FileOverSizeException()
229 raise FileOverSizeException()
210
230
211 file_hash = self.calculate_path_hash(path)
231 file_hash = self.calculate_path_hash(path)
212
232
213 metadata.update(
233 metadata.update({
214 {"filename": filename,
234 "filename": filename,
215 "size": size,
235 "size": size,
216 "time": time.time(),
236 "time": time.time(),
217 "sha256": file_hash,
237 "sha256": file_hash,
218 "meta_ver": METADATA_VER})
238 "meta_ver": METADATA_VER
239 })
219
240
220 filename_meta = filename + '.meta'
241 filename_meta = filename + '.meta'
221 with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta:
242 with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta:
222 dest_meta.write(json.dumps(metadata))
243 dest_meta.write(json.dumps(metadata))
223
244
224 if directory:
245 if directory:
225 filename = os.path.join(directory, filename)
246 filename = os.path.join(directory, filename)
226
247
227 return filename, metadata
248 return filename, metadata
228
249
229 def get_metadata(self, filename):
250 def get_metadata(self, filename):
230 """
251 """
231 Reads JSON stored metadata for a file
252 Reads JSON stored metadata for a file
232
253
233 :param filename:
254 :param filename:
234 :return:
255 :return:
235 """
256 """
236 filename = self.store_path(filename)
257 filename = self.store_path(filename)
237 filename_meta = filename + '.meta'
258 filename_meta = filename + '.meta'
238
259
239 with open(filename_meta, "rb") as source_meta:
260 with open(filename_meta, "rb") as source_meta:
240 return json.loads(source_meta.read())
261 return json.loads(source_meta.read())
@@ -1,54 +1,58 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import uuid
22 import uuid
23
23 import StringIO
24 import pathlib2
24 import pathlib2
25
25
26
26
27 def get_file_storage(settings):
27 def get_file_storage(settings):
28 from rhodecode.apps.file_store.backends.local_store import LocalFileStorage
28 from rhodecode.apps.file_store.backends.local_store import LocalFileStorage
29 from rhodecode.apps.file_store import config_keys
29 from rhodecode.apps.file_store import config_keys
30 store_path = settings.get(config_keys.store_path)
30 store_path = settings.get(config_keys.store_path)
31 return LocalFileStorage(base_path=store_path)
31 return LocalFileStorage(base_path=store_path)
32
32
33
33
34 def splitext(filename):
34 def splitext(filename):
35 ext = ''.join(pathlib2.Path(filename).suffixes)
35 ext = ''.join(pathlib2.Path(filename).suffixes)
36 return filename, ext
36 return filename, ext
37
37
38
38
39 def uid_filename(filename, randomized=True):
39 def uid_filename(filename, randomized=True):
40 """
40 """
41 Generates a randomized or stable (uuid) filename,
41 Generates a randomized or stable (uuid) filename,
42 preserving the original extension.
42 preserving the original extension.
43
43
44 :param filename: the original filename
44 :param filename: the original filename
45 :param randomized: define if filename should be stable (sha1 based) or randomized
45 :param randomized: define if filename should be stable (sha1 based) or randomized
46 """
46 """
47
47
48 _, ext = splitext(filename)
48 _, ext = splitext(filename)
49 if randomized:
49 if randomized:
50 uid = uuid.uuid4()
50 uid = uuid.uuid4()
51 else:
51 else:
52 hash_key = '{}.{}'.format(filename, 'store')
52 hash_key = '{}.{}'.format(filename, 'store')
53 uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key)
53 uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key)
54 return str(uid) + ext.lower()
54 return str(uid) + ext.lower()
55
56
57 def bytes_to_file_obj(bytes_data):
58 return StringIO.StringIO(bytes_data)
@@ -1,195 +1,195 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
20 import logging
21
21
22 from pyramid.view import view_config
22 from pyramid.view import view_config
23 from pyramid.response import FileResponse
23 from pyramid.response import FileResponse
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
25
25
26 from rhodecode.apps._base import BaseAppView
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.apps.file_store import utils
27 from rhodecode.apps.file_store import utils
28 from rhodecode.apps.file_store.exceptions import (
28 from rhodecode.apps.file_store.exceptions import (
29 FileNotAllowedException, FileOverSizeException)
29 FileNotAllowedException, FileOverSizeException)
30
30
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny,
34 CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny,
35 LoginRequired)
35 LoginRequired)
36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
37 from rhodecode.model.db import Session, FileStore, UserApiKeys
37 from rhodecode.model.db import Session, FileStore, UserApiKeys
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class FileStoreView(BaseAppView):
42 class FileStoreView(BaseAppView):
43 upload_key = 'store_file'
43 upload_key = 'store_file'
44
44
45 def load_default_context(self):
45 def load_default_context(self):
46 c = self._get_local_tmpl_context()
46 c = self._get_local_tmpl_context()
47 self.storage = utils.get_file_storage(self.request.registry.settings)
47 self.storage = utils.get_file_storage(self.request.registry.settings)
48 return c
48 return c
49
49
50 def _guess_type(self, file_name):
50 def _guess_type(self, file_name):
51 """
51 """
52 Our own type guesser for mimetypes using the rich DB
52 Our own type guesser for mimetypes using the rich DB
53 """
53 """
54 if not hasattr(self, 'db'):
54 if not hasattr(self, 'db'):
55 self.db = get_mimetypes_db()
55 self.db = get_mimetypes_db()
56 _content_type, _encoding = self.db.guess_type(file_name, strict=False)
56 _content_type, _encoding = self.db.guess_type(file_name, strict=False)
57 return _content_type, _encoding
57 return _content_type, _encoding
58
58
59 def _serve_file(self, file_uid):
59 def _serve_file(self, file_uid):
60
60
61 if not self.storage.exists(file_uid):
61 if not self.storage.exists(file_uid):
62 store_path = self.storage.store_path(file_uid)
62 store_path = self.storage.store_path(file_uid)
63 log.debug('File with FID:%s not found in the store under `%s`',
63 log.debug('File with FID:%s not found in the store under `%s`',
64 file_uid, store_path)
64 file_uid, store_path)
65 raise HTTPNotFound()
65 raise HTTPNotFound()
66
66
67 db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar()
67 db_obj = FileStore.get_by_store_uid(file_uid, safe=True)
68 if not db_obj:
68 if not db_obj:
69 raise HTTPNotFound()
69 raise HTTPNotFound()
70
70
71 # private upload for user
71 # private upload for user
72 if db_obj.check_acl and db_obj.scope_user_id:
72 if db_obj.check_acl and db_obj.scope_user_id:
73 log.debug('Artifact: checking scope access for bound artifact user: `%s`',
73 log.debug('Artifact: checking scope access for bound artifact user: `%s`',
74 db_obj.scope_user_id)
74 db_obj.scope_user_id)
75 user = db_obj.user
75 user = db_obj.user
76 if self._rhodecode_db_user.user_id != user.user_id:
76 if self._rhodecode_db_user.user_id != user.user_id:
77 log.warning('Access to file store object forbidden')
77 log.warning('Access to file store object forbidden')
78 raise HTTPNotFound()
78 raise HTTPNotFound()
79
79
80 # scoped to repository permissions
80 # scoped to repository permissions
81 if db_obj.check_acl and db_obj.scope_repo_id:
81 if db_obj.check_acl and db_obj.scope_repo_id:
82 log.debug('Artifact: checking scope access for bound artifact repo: `%s`',
82 log.debug('Artifact: checking scope access for bound artifact repo: `%s`',
83 db_obj.scope_repo_id)
83 db_obj.scope_repo_id)
84 repo = db_obj.repo
84 repo = db_obj.repo
85 perm_set = ['repository.read', 'repository.write', 'repository.admin']
85 perm_set = ['repository.read', 'repository.write', 'repository.admin']
86 has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
86 has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
87 if not has_perm:
87 if not has_perm:
88 log.warning('Access to file store object `%s` forbidden', file_uid)
88 log.warning('Access to file store object `%s` forbidden', file_uid)
89 raise HTTPNotFound()
89 raise HTTPNotFound()
90
90
91 # scoped to repository group permissions
91 # scoped to repository group permissions
92 if db_obj.check_acl and db_obj.scope_repo_group_id:
92 if db_obj.check_acl and db_obj.scope_repo_group_id:
93 log.debug('Artifact: checking scope access for bound artifact repo group: `%s`',
93 log.debug('Artifact: checking scope access for bound artifact repo group: `%s`',
94 db_obj.scope_repo_group_id)
94 db_obj.scope_repo_group_id)
95 repo_group = db_obj.repo_group
95 repo_group = db_obj.repo_group
96 perm_set = ['group.read', 'group.write', 'group.admin']
96 perm_set = ['group.read', 'group.write', 'group.admin']
97 has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
97 has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
98 if not has_perm:
98 if not has_perm:
99 log.warning('Access to file store object `%s` forbidden', file_uid)
99 log.warning('Access to file store object `%s` forbidden', file_uid)
100 raise HTTPNotFound()
100 raise HTTPNotFound()
101
101
102 FileStore.bump_access_counter(file_uid)
102 FileStore.bump_access_counter(file_uid)
103
103
104 file_path = self.storage.store_path(file_uid)
104 file_path = self.storage.store_path(file_uid)
105 content_type = 'application/octet-stream'
105 content_type = 'application/octet-stream'
106 content_encoding = None
106 content_encoding = None
107
107
108 _content_type, _encoding = self._guess_type(file_path)
108 _content_type, _encoding = self._guess_type(file_path)
109 if _content_type:
109 if _content_type:
110 content_type = _content_type
110 content_type = _content_type
111
111
112 # For file store we don't submit any session data, this logic tells the
112 # For file store we don't submit any session data, this logic tells the
113 # Session lib to skip it
113 # Session lib to skip it
114 setattr(self.request, '_file_response', True)
114 setattr(self.request, '_file_response', True)
115 return FileResponse(file_path, request=self.request,
115 return FileResponse(file_path, request=self.request,
116 content_type=content_type, content_encoding=content_encoding)
116 content_type=content_type, content_encoding=content_encoding)
117
117
118 @LoginRequired()
118 @LoginRequired()
119 @NotAnonymous()
119 @NotAnonymous()
120 @CSRFRequired()
120 @CSRFRequired()
121 @view_config(route_name='upload_file', request_method='POST', renderer='json_ext')
121 @view_config(route_name='upload_file', request_method='POST', renderer='json_ext')
122 def upload_file(self):
122 def upload_file(self):
123 self.load_default_context()
123 self.load_default_context()
124 file_obj = self.request.POST.get(self.upload_key)
124 file_obj = self.request.POST.get(self.upload_key)
125
125
126 if file_obj is None:
126 if file_obj is None:
127 return {'store_fid': None,
127 return {'store_fid': None,
128 'access_path': None,
128 'access_path': None,
129 'error': '{} data field is missing'.format(self.upload_key)}
129 'error': '{} data field is missing'.format(self.upload_key)}
130
130
131 if not hasattr(file_obj, 'filename'):
131 if not hasattr(file_obj, 'filename'):
132 return {'store_fid': None,
132 return {'store_fid': None,
133 'access_path': None,
133 'access_path': None,
134 'error': 'filename cannot be read from the data field'}
134 'error': 'filename cannot be read from the data field'}
135
135
136 filename = file_obj.filename
136 filename = file_obj.filename
137
137
138 metadata = {
138 metadata = {
139 'user_uploaded': {'username': self._rhodecode_user.username,
139 'user_uploaded': {'username': self._rhodecode_user.username,
140 'user_id': self._rhodecode_user.user_id,
140 'user_id': self._rhodecode_user.user_id,
141 'ip': self._rhodecode_user.ip_addr}}
141 'ip': self._rhodecode_user.ip_addr}}
142 try:
142 try:
143 store_uid, metadata = self.storage.save_file(
143 store_uid, metadata = self.storage.save_file(
144 file_obj.file, filename, extra_metadata=metadata)
144 file_obj.file, filename, extra_metadata=metadata)
145 except FileNotAllowedException:
145 except FileNotAllowedException:
146 return {'store_fid': None,
146 return {'store_fid': None,
147 'access_path': None,
147 'access_path': None,
148 'error': 'File {} is not allowed.'.format(filename)}
148 'error': 'File {} is not allowed.'.format(filename)}
149
149
150 except FileOverSizeException:
150 except FileOverSizeException:
151 return {'store_fid': None,
151 return {'store_fid': None,
152 'access_path': None,
152 'access_path': None,
153 'error': 'File {} is exceeding allowed limit.'.format(filename)}
153 'error': 'File {} is exceeding allowed limit.'.format(filename)}
154
154
155 try:
155 try:
156 entry = FileStore.create(
156 entry = FileStore.create(
157 file_uid=store_uid, filename=metadata["filename"],
157 file_uid=store_uid, filename=metadata["filename"],
158 file_hash=metadata["sha256"], file_size=metadata["size"],
158 file_hash=metadata["sha256"], file_size=metadata["size"],
159 file_description=u'upload attachment',
159 file_description=u'upload attachment',
160 check_acl=False, user_id=self._rhodecode_user.user_id
160 check_acl=False, user_id=self._rhodecode_user.user_id
161 )
161 )
162 Session().add(entry)
162 Session().add(entry)
163 Session().commit()
163 Session().commit()
164 log.debug('Stored upload in DB as %s', entry)
164 log.debug('Stored upload in DB as %s', entry)
165 except Exception:
165 except Exception:
166 log.exception('Failed to store file %s', filename)
166 log.exception('Failed to store file %s', filename)
167 return {'store_fid': None,
167 return {'store_fid': None,
168 'access_path': None,
168 'access_path': None,
169 'error': 'File {} failed to store in DB.'.format(filename)}
169 'error': 'File {} failed to store in DB.'.format(filename)}
170
170
171 return {'store_fid': store_uid,
171 return {'store_fid': store_uid,
172 'access_path': h.route_path('download_file', fid=store_uid)}
172 'access_path': h.route_path('download_file', fid=store_uid)}
173
173
174 # ACL is checked by scopes, if no scope the file is accessible to all
174 # ACL is checked by scopes, if no scope the file is accessible to all
175 @view_config(route_name='download_file')
175 @view_config(route_name='download_file')
176 def download_file(self):
176 def download_file(self):
177 self.load_default_context()
177 self.load_default_context()
178 file_uid = self.request.matchdict['fid']
178 file_uid = self.request.matchdict['fid']
179 log.debug('Requesting FID:%s from store %s', file_uid, self.storage)
179 log.debug('Requesting FID:%s from store %s', file_uid, self.storage)
180 return self._serve_file(file_uid)
180 return self._serve_file(file_uid)
181
181
182 # in addition to @LoginRequired ACL is checked by scopes
182 # in addition to @LoginRequired ACL is checked by scopes
183 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD])
183 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD])
184 @NotAnonymous()
184 @NotAnonymous()
185 @view_config(route_name='download_file_by_token')
185 @view_config(route_name='download_file_by_token')
186 def download_file_by_token(self):
186 def download_file_by_token(self):
187 """
187 """
188 Special view that allows to access the download file by special URL that
188 Special view that allows to access the download file by special URL that
189 is stored inside the URL.
189 is stored inside the URL.
190
190
191 http://example.com/_file_store/token-download/TOKEN/FILE_UID
191 http://example.com/_file_store/token-download/TOKEN/FILE_UID
192 """
192 """
193 self.load_default_context()
193 self.load_default_context()
194 file_uid = self.request.matchdict['fid']
194 file_uid = self.request.matchdict['fid']
195 return self._serve_file(file_uid)
195 return self._serve_file(file_uid)
@@ -1,533 +1,543 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 from rhodecode.apps._base import add_route_with_slash
20 from rhodecode.apps._base import add_route_with_slash
21
21
22
22
23 def includeme(config):
23 def includeme(config):
24
24
25 # repo creating checks, special cases that aren't repo routes
25 # repo creating checks, special cases that aren't repo routes
26 config.add_route(
26 config.add_route(
27 name='repo_creating',
27 name='repo_creating',
28 pattern='/{repo_name:.*?[^/]}/repo_creating')
28 pattern='/{repo_name:.*?[^/]}/repo_creating')
29
29
30 config.add_route(
30 config.add_route(
31 name='repo_creating_check',
31 name='repo_creating_check',
32 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
32 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
33
33
34 # Summary
34 # Summary
35 # NOTE(marcink): one additional route is defined in very bottom, catch
35 # NOTE(marcink): one additional route is defined in very bottom, catch
36 # all pattern
36 # all pattern
37 config.add_route(
37 config.add_route(
38 name='repo_summary_explicit',
38 name='repo_summary_explicit',
39 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
39 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
40 config.add_route(
40 config.add_route(
41 name='repo_summary_commits',
41 name='repo_summary_commits',
42 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
42 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
43
43
44 # Commits
44 # Commits
45 config.add_route(
45 config.add_route(
46 name='repo_commit',
46 name='repo_commit',
47 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
47 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
48
48
49 config.add_route(
49 config.add_route(
50 name='repo_commit_children',
50 name='repo_commit_children',
51 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
51 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
52
52
53 config.add_route(
53 config.add_route(
54 name='repo_commit_parents',
54 name='repo_commit_parents',
55 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
55 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
56
56
57 config.add_route(
57 config.add_route(
58 name='repo_commit_raw',
58 name='repo_commit_raw',
59 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
59 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
60
60
61 config.add_route(
61 config.add_route(
62 name='repo_commit_patch',
62 name='repo_commit_patch',
63 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
63 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
64
64
65 config.add_route(
65 config.add_route(
66 name='repo_commit_download',
66 name='repo_commit_download',
67 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
67 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
68
68
69 config.add_route(
69 config.add_route(
70 name='repo_commit_data',
70 name='repo_commit_data',
71 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
71 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
72
72
73 config.add_route(
73 config.add_route(
74 name='repo_commit_comment_create',
74 name='repo_commit_comment_create',
75 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
75 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
76
76
77 config.add_route(
77 config.add_route(
78 name='repo_commit_comment_preview',
78 name='repo_commit_comment_preview',
79 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
79 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
80
80
81 config.add_route(
81 config.add_route(
82 name='repo_commit_comment_history_view',
82 name='repo_commit_comment_history_view',
83 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True)
83 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True)
84
84
85 config.add_route(
85 config.add_route(
86 name='repo_commit_comment_attachment_upload',
86 name='repo_commit_comment_attachment_upload',
87 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
87 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
88
88
89 config.add_route(
89 config.add_route(
90 name='repo_commit_comment_delete',
90 name='repo_commit_comment_delete',
91 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
91 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
92
92
93 config.add_route(
93 config.add_route(
94 name='repo_commit_comment_edit',
94 name='repo_commit_comment_edit',
95 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
95 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
96
96
97 # still working url for backward compat.
97 # still working url for backward compat.
98 config.add_route(
98 config.add_route(
99 name='repo_commit_raw_deprecated',
99 name='repo_commit_raw_deprecated',
100 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
100 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
101
101
102 # Files
102 # Files
103 config.add_route(
103 config.add_route(
104 name='repo_archivefile',
104 name='repo_archivefile',
105 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
105 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
106
106
107 config.add_route(
107 config.add_route(
108 name='repo_files_diff',
108 name='repo_files_diff',
109 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
109 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
110 config.add_route( # legacy route to make old links work
110 config.add_route( # legacy route to make old links work
111 name='repo_files_diff_2way_redirect',
111 name='repo_files_diff_2way_redirect',
112 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
112 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
113
113
114 config.add_route(
114 config.add_route(
115 name='repo_files',
115 name='repo_files',
116 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
116 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
117 config.add_route(
117 config.add_route(
118 name='repo_files:default_path',
118 name='repo_files:default_path',
119 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
119 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
120 config.add_route(
120 config.add_route(
121 name='repo_files:default_commit',
121 name='repo_files:default_commit',
122 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
122 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
123
123
124 config.add_route(
124 config.add_route(
125 name='repo_files:rendered',
125 name='repo_files:rendered',
126 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
126 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
127
127
128 config.add_route(
128 config.add_route(
129 name='repo_files:annotated',
129 name='repo_files:annotated',
130 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
130 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
131 config.add_route(
131 config.add_route(
132 name='repo_files:annotated_previous',
132 name='repo_files:annotated_previous',
133 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
133 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
134
134
135 config.add_route(
135 config.add_route(
136 name='repo_nodetree_full',
136 name='repo_nodetree_full',
137 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
137 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
138 config.add_route(
138 config.add_route(
139 name='repo_nodetree_full:default_path',
139 name='repo_nodetree_full:default_path',
140 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
140 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
141
141
142 config.add_route(
142 config.add_route(
143 name='repo_files_nodelist',
143 name='repo_files_nodelist',
144 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
144 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
145
145
146 config.add_route(
146 config.add_route(
147 name='repo_file_raw',
147 name='repo_file_raw',
148 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
148 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
149
149
150 config.add_route(
150 config.add_route(
151 name='repo_file_download',
151 name='repo_file_download',
152 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
152 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
153 config.add_route( # backward compat to keep old links working
153 config.add_route( # backward compat to keep old links working
154 name='repo_file_download:legacy',
154 name='repo_file_download:legacy',
155 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
155 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
156 repo_route=True)
156 repo_route=True)
157
157
158 config.add_route(
158 config.add_route(
159 name='repo_file_history',
159 name='repo_file_history',
160 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
160 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
161
161
162 config.add_route(
162 config.add_route(
163 name='repo_file_authors',
163 name='repo_file_authors',
164 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
164 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
165
165
166 config.add_route(
166 config.add_route(
167 name='repo_files_check_head',
167 name='repo_files_check_head',
168 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
168 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
169 repo_route=True)
169 repo_route=True)
170 config.add_route(
170 config.add_route(
171 name='repo_files_remove_file',
171 name='repo_files_remove_file',
172 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
172 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
173 repo_route=True)
173 repo_route=True)
174 config.add_route(
174 config.add_route(
175 name='repo_files_delete_file',
175 name='repo_files_delete_file',
176 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
176 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
177 repo_route=True)
177 repo_route=True)
178 config.add_route(
178 config.add_route(
179 name='repo_files_edit_file',
179 name='repo_files_edit_file',
180 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
180 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
181 repo_route=True)
181 repo_route=True)
182 config.add_route(
182 config.add_route(
183 name='repo_files_update_file',
183 name='repo_files_update_file',
184 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
184 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
185 repo_route=True)
185 repo_route=True)
186 config.add_route(
186 config.add_route(
187 name='repo_files_add_file',
187 name='repo_files_add_file',
188 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
188 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
189 repo_route=True)
189 repo_route=True)
190 config.add_route(
190 config.add_route(
191 name='repo_files_upload_file',
191 name='repo_files_upload_file',
192 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
192 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
193 repo_route=True)
193 repo_route=True)
194 config.add_route(
194 config.add_route(
195 name='repo_files_create_file',
195 name='repo_files_create_file',
196 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
196 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
197 repo_route=True)
197 repo_route=True)
198
198
199 # Refs data
199 # Refs data
200 config.add_route(
200 config.add_route(
201 name='repo_refs_data',
201 name='repo_refs_data',
202 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
202 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
203
203
204 config.add_route(
204 config.add_route(
205 name='repo_refs_changelog_data',
205 name='repo_refs_changelog_data',
206 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
206 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
207
207
208 config.add_route(
208 config.add_route(
209 name='repo_stats',
209 name='repo_stats',
210 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
210 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
211
211
212 # Commits
212 # Commits
213 config.add_route(
213 config.add_route(
214 name='repo_commits',
214 name='repo_commits',
215 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
215 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
216 config.add_route(
216 config.add_route(
217 name='repo_commits_file',
217 name='repo_commits_file',
218 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
218 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
219 config.add_route(
219 config.add_route(
220 name='repo_commits_elements',
220 name='repo_commits_elements',
221 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
221 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
222 config.add_route(
222 config.add_route(
223 name='repo_commits_elements_file',
223 name='repo_commits_elements_file',
224 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
224 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
225
225
226 # Changelog (old deprecated name for commits page)
226 # Changelog (old deprecated name for commits page)
227 config.add_route(
227 config.add_route(
228 name='repo_changelog',
228 name='repo_changelog',
229 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
229 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
230 config.add_route(
230 config.add_route(
231 name='repo_changelog_file',
231 name='repo_changelog_file',
232 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
232 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
233
233
234 # Compare
234 # Compare
235 config.add_route(
235 config.add_route(
236 name='repo_compare_select',
236 name='repo_compare_select',
237 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
237 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
238
238
239 config.add_route(
239 config.add_route(
240 name='repo_compare',
240 name='repo_compare',
241 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
241 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
242
242
243 # Tags
243 # Tags
244 config.add_route(
244 config.add_route(
245 name='tags_home',
245 name='tags_home',
246 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
246 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
247
247
248 # Branches
248 # Branches
249 config.add_route(
249 config.add_route(
250 name='branches_home',
250 name='branches_home',
251 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
251 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
252
252
253 # Bookmarks
253 # Bookmarks
254 config.add_route(
254 config.add_route(
255 name='bookmarks_home',
255 name='bookmarks_home',
256 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
256 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
257
257
258 # Forks
258 # Forks
259 config.add_route(
259 config.add_route(
260 name='repo_fork_new',
260 name='repo_fork_new',
261 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
261 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
262 repo_forbid_when_archived=True,
262 repo_forbid_when_archived=True,
263 repo_accepted_types=['hg', 'git'])
263 repo_accepted_types=['hg', 'git'])
264
264
265 config.add_route(
265 config.add_route(
266 name='repo_fork_create',
266 name='repo_fork_create',
267 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
267 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
268 repo_forbid_when_archived=True,
268 repo_forbid_when_archived=True,
269 repo_accepted_types=['hg', 'git'])
269 repo_accepted_types=['hg', 'git'])
270
270
271 config.add_route(
271 config.add_route(
272 name='repo_forks_show_all',
272 name='repo_forks_show_all',
273 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
273 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
274 repo_accepted_types=['hg', 'git'])
274 repo_accepted_types=['hg', 'git'])
275 config.add_route(
275 config.add_route(
276 name='repo_forks_data',
276 name='repo_forks_data',
277 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
277 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
278 repo_accepted_types=['hg', 'git'])
278 repo_accepted_types=['hg', 'git'])
279
279
280 # Pull Requests
280 # Pull Requests
281 config.add_route(
281 config.add_route(
282 name='pullrequest_show',
282 name='pullrequest_show',
283 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
283 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
284 repo_route=True)
284 repo_route=True)
285
285
286 config.add_route(
286 config.add_route(
287 name='pullrequest_show_all',
287 name='pullrequest_show_all',
288 pattern='/{repo_name:.*?[^/]}/pull-request',
288 pattern='/{repo_name:.*?[^/]}/pull-request',
289 repo_route=True, repo_accepted_types=['hg', 'git'])
289 repo_route=True, repo_accepted_types=['hg', 'git'])
290
290
291 config.add_route(
291 config.add_route(
292 name='pullrequest_show_all_data',
292 name='pullrequest_show_all_data',
293 pattern='/{repo_name:.*?[^/]}/pull-request-data',
293 pattern='/{repo_name:.*?[^/]}/pull-request-data',
294 repo_route=True, repo_accepted_types=['hg', 'git'])
294 repo_route=True, repo_accepted_types=['hg', 'git'])
295
295
296 config.add_route(
296 config.add_route(
297 name='pullrequest_repo_refs',
297 name='pullrequest_repo_refs',
298 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
298 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
299 repo_route=True)
299 repo_route=True)
300
300
301 config.add_route(
301 config.add_route(
302 name='pullrequest_repo_targets',
302 name='pullrequest_repo_targets',
303 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
303 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
304 repo_route=True)
304 repo_route=True)
305
305
306 config.add_route(
306 config.add_route(
307 name='pullrequest_new',
307 name='pullrequest_new',
308 pattern='/{repo_name:.*?[^/]}/pull-request/new',
308 pattern='/{repo_name:.*?[^/]}/pull-request/new',
309 repo_route=True, repo_accepted_types=['hg', 'git'],
309 repo_route=True, repo_accepted_types=['hg', 'git'],
310 repo_forbid_when_archived=True)
310 repo_forbid_when_archived=True)
311
311
312 config.add_route(
312 config.add_route(
313 name='pullrequest_create',
313 name='pullrequest_create',
314 pattern='/{repo_name:.*?[^/]}/pull-request/create',
314 pattern='/{repo_name:.*?[^/]}/pull-request/create',
315 repo_route=True, repo_accepted_types=['hg', 'git'],
315 repo_route=True, repo_accepted_types=['hg', 'git'],
316 repo_forbid_when_archived=True)
316 repo_forbid_when_archived=True)
317
317
318 config.add_route(
318 config.add_route(
319 name='pullrequest_update',
319 name='pullrequest_update',
320 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
320 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
321 repo_route=True, repo_forbid_when_archived=True)
321 repo_route=True, repo_forbid_when_archived=True)
322
322
323 config.add_route(
323 config.add_route(
324 name='pullrequest_merge',
324 name='pullrequest_merge',
325 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
325 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
326 repo_route=True, repo_forbid_when_archived=True)
326 repo_route=True, repo_forbid_when_archived=True)
327
327
328 config.add_route(
328 config.add_route(
329 name='pullrequest_delete',
329 name='pullrequest_delete',
330 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
330 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
331 repo_route=True, repo_forbid_when_archived=True)
331 repo_route=True, repo_forbid_when_archived=True)
332
332
333 config.add_route(
333 config.add_route(
334 name='pullrequest_comment_create',
334 name='pullrequest_comment_create',
335 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
335 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
336 repo_route=True)
336 repo_route=True)
337
337
338 config.add_route(
338 config.add_route(
339 name='pullrequest_comment_edit',
339 name='pullrequest_comment_edit',
340 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
340 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
341 repo_route=True, repo_accepted_types=['hg', 'git'])
341 repo_route=True, repo_accepted_types=['hg', 'git'])
342
342
343 config.add_route(
343 config.add_route(
344 name='pullrequest_comment_delete',
344 name='pullrequest_comment_delete',
345 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
345 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
346 repo_route=True, repo_accepted_types=['hg', 'git'])
346 repo_route=True, repo_accepted_types=['hg', 'git'])
347
347
348 config.add_route(
349 name='pullrequest_comments',
350 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments',
351 repo_route=True)
352
353 config.add_route(
354 name='pullrequest_todos',
355 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos',
356 repo_route=True)
357
348 # Artifacts, (EE feature)
358 # Artifacts, (EE feature)
349 config.add_route(
359 config.add_route(
350 name='repo_artifacts_list',
360 name='repo_artifacts_list',
351 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
361 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
352
362
353 # Settings
363 # Settings
354 config.add_route(
364 config.add_route(
355 name='edit_repo',
365 name='edit_repo',
356 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
366 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
357 # update is POST on edit_repo
367 # update is POST on edit_repo
358
368
359 # Settings advanced
369 # Settings advanced
360 config.add_route(
370 config.add_route(
361 name='edit_repo_advanced',
371 name='edit_repo_advanced',
362 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
372 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
363 config.add_route(
373 config.add_route(
364 name='edit_repo_advanced_archive',
374 name='edit_repo_advanced_archive',
365 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
375 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
366 config.add_route(
376 config.add_route(
367 name='edit_repo_advanced_delete',
377 name='edit_repo_advanced_delete',
368 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
378 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
369 config.add_route(
379 config.add_route(
370 name='edit_repo_advanced_locking',
380 name='edit_repo_advanced_locking',
371 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
381 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
372 config.add_route(
382 config.add_route(
373 name='edit_repo_advanced_journal',
383 name='edit_repo_advanced_journal',
374 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
384 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
375 config.add_route(
385 config.add_route(
376 name='edit_repo_advanced_fork',
386 name='edit_repo_advanced_fork',
377 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
387 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
378
388
379 config.add_route(
389 config.add_route(
380 name='edit_repo_advanced_hooks',
390 name='edit_repo_advanced_hooks',
381 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
391 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
382
392
383 # Caches
393 # Caches
384 config.add_route(
394 config.add_route(
385 name='edit_repo_caches',
395 name='edit_repo_caches',
386 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
396 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
387
397
388 # Permissions
398 # Permissions
389 config.add_route(
399 config.add_route(
390 name='edit_repo_perms',
400 name='edit_repo_perms',
391 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
401 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
392
402
393 config.add_route(
403 config.add_route(
394 name='edit_repo_perms_set_private',
404 name='edit_repo_perms_set_private',
395 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
405 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
396
406
397 # Permissions Branch (EE feature)
407 # Permissions Branch (EE feature)
398 config.add_route(
408 config.add_route(
399 name='edit_repo_perms_branch',
409 name='edit_repo_perms_branch',
400 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
410 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
401 config.add_route(
411 config.add_route(
402 name='edit_repo_perms_branch_delete',
412 name='edit_repo_perms_branch_delete',
403 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
413 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
404 repo_route=True)
414 repo_route=True)
405
415
406 # Maintenance
416 # Maintenance
407 config.add_route(
417 config.add_route(
408 name='edit_repo_maintenance',
418 name='edit_repo_maintenance',
409 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
419 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
410
420
411 config.add_route(
421 config.add_route(
412 name='edit_repo_maintenance_execute',
422 name='edit_repo_maintenance_execute',
413 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
423 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
414
424
415 # Fields
425 # Fields
416 config.add_route(
426 config.add_route(
417 name='edit_repo_fields',
427 name='edit_repo_fields',
418 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
428 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
419 config.add_route(
429 config.add_route(
420 name='edit_repo_fields_create',
430 name='edit_repo_fields_create',
421 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
431 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
422 config.add_route(
432 config.add_route(
423 name='edit_repo_fields_delete',
433 name='edit_repo_fields_delete',
424 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
434 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
425
435
426 # Locking
436 # Locking
427 config.add_route(
437 config.add_route(
428 name='repo_edit_toggle_locking',
438 name='repo_edit_toggle_locking',
429 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
439 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
430
440
431 # Remote
441 # Remote
432 config.add_route(
442 config.add_route(
433 name='edit_repo_remote',
443 name='edit_repo_remote',
434 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
444 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
435 config.add_route(
445 config.add_route(
436 name='edit_repo_remote_pull',
446 name='edit_repo_remote_pull',
437 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
447 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
438 config.add_route(
448 config.add_route(
439 name='edit_repo_remote_push',
449 name='edit_repo_remote_push',
440 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
450 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
441
451
442 # Statistics
452 # Statistics
443 config.add_route(
453 config.add_route(
444 name='edit_repo_statistics',
454 name='edit_repo_statistics',
445 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
455 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
446 config.add_route(
456 config.add_route(
447 name='edit_repo_statistics_reset',
457 name='edit_repo_statistics_reset',
448 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
458 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
449
459
450 # Issue trackers
460 # Issue trackers
451 config.add_route(
461 config.add_route(
452 name='edit_repo_issuetracker',
462 name='edit_repo_issuetracker',
453 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
463 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
454 config.add_route(
464 config.add_route(
455 name='edit_repo_issuetracker_test',
465 name='edit_repo_issuetracker_test',
456 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
466 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
457 config.add_route(
467 config.add_route(
458 name='edit_repo_issuetracker_delete',
468 name='edit_repo_issuetracker_delete',
459 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
469 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
460 config.add_route(
470 config.add_route(
461 name='edit_repo_issuetracker_update',
471 name='edit_repo_issuetracker_update',
462 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
472 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
463
473
464 # VCS Settings
474 # VCS Settings
465 config.add_route(
475 config.add_route(
466 name='edit_repo_vcs',
476 name='edit_repo_vcs',
467 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
477 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
468 config.add_route(
478 config.add_route(
469 name='edit_repo_vcs_update',
479 name='edit_repo_vcs_update',
470 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
480 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
471
481
472 # svn pattern
482 # svn pattern
473 config.add_route(
483 config.add_route(
474 name='edit_repo_vcs_svn_pattern_delete',
484 name='edit_repo_vcs_svn_pattern_delete',
475 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
485 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
476
486
477 # Repo Review Rules (EE feature)
487 # Repo Review Rules (EE feature)
478 config.add_route(
488 config.add_route(
479 name='repo_reviewers',
489 name='repo_reviewers',
480 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
490 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
481
491
482 config.add_route(
492 config.add_route(
483 name='repo_default_reviewers_data',
493 name='repo_default_reviewers_data',
484 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
494 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
485
495
486 # Repo Automation (EE feature)
496 # Repo Automation (EE feature)
487 config.add_route(
497 config.add_route(
488 name='repo_automation',
498 name='repo_automation',
489 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
499 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
490
500
491 # Strip
501 # Strip
492 config.add_route(
502 config.add_route(
493 name='edit_repo_strip',
503 name='edit_repo_strip',
494 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
504 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
495
505
496 config.add_route(
506 config.add_route(
497 name='strip_check',
507 name='strip_check',
498 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
508 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
499
509
500 config.add_route(
510 config.add_route(
501 name='strip_execute',
511 name='strip_execute',
502 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
512 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
503
513
504 # Audit logs
514 # Audit logs
505 config.add_route(
515 config.add_route(
506 name='edit_repo_audit_logs',
516 name='edit_repo_audit_logs',
507 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
517 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
508
518
509 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
519 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
510 config.add_route(
520 config.add_route(
511 name='rss_feed_home',
521 name='rss_feed_home',
512 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
522 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
513
523
514 config.add_route(
524 config.add_route(
515 name='atom_feed_home',
525 name='atom_feed_home',
516 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
526 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
517
527
518 config.add_route(
528 config.add_route(
519 name='rss_feed_home_old',
529 name='rss_feed_home_old',
520 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
530 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
521
531
522 config.add_route(
532 config.add_route(
523 name='atom_feed_home_old',
533 name='atom_feed_home_old',
524 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
534 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
525
535
526 # NOTE(marcink): needs to be at the end for catch-all
536 # NOTE(marcink): needs to be at the end for catch-all
527 add_route_with_slash(
537 add_route_with_slash(
528 config,
538 config,
529 name='repo_summary',
539 name='repo_summary',
530 pattern='/{repo_name:.*?[^/]}', repo_route=True)
540 pattern='/{repo_name:.*?[^/]}', repo_route=True)
531
541
532 # Scan module for configuration decorators.
542 # Scan module for configuration decorators.
533 config.scan('.views', ignore='.tests')
543 config.scan('.views', ignore='.tests')
@@ -1,507 +1,494 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests import TestController
23 from rhodecode.tests import TestController
24
24
25 from rhodecode.model.db import ChangesetComment, Notification
25 from rhodecode.model.db import ChangesetComment, Notification
26 from rhodecode.model.meta import Session
26 from rhodecode.model.meta import Session
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28
28
29
29
30 def route_path(name, params=None, **kwargs):
30 def route_path(name, params=None, **kwargs):
31 import urllib
31 import urllib
32
32
33 base_url = {
33 base_url = {
34 'repo_commit': '/{repo_name}/changeset/{commit_id}',
34 'repo_commit': '/{repo_name}/changeset/{commit_id}',
35 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create',
35 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create',
36 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview',
36 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview',
37 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete',
37 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete',
38 'repo_commit_comment_edit': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit',
38 'repo_commit_comment_edit': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit',
39 }[name].format(**kwargs)
39 }[name].format(**kwargs)
40
40
41 if params:
41 if params:
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
43 return base_url
43 return base_url
44
44
45
45
46 @pytest.mark.backends("git", "hg", "svn")
46 @pytest.mark.backends("git", "hg", "svn")
47 class TestRepoCommitCommentsView(TestController):
47 class TestRepoCommitCommentsView(TestController):
48
48
49 @pytest.fixture(autouse=True)
49 @pytest.fixture(autouse=True)
50 def prepare(self, request, baseapp):
50 def prepare(self, request, baseapp):
51 for x in ChangesetComment.query().all():
51 for x in ChangesetComment.query().all():
52 Session().delete(x)
52 Session().delete(x)
53 Session().commit()
53 Session().commit()
54
54
55 for x in Notification.query().all():
55 for x in Notification.query().all():
56 Session().delete(x)
56 Session().delete(x)
57 Session().commit()
57 Session().commit()
58
58
59 request.addfinalizer(self.cleanup)
59 request.addfinalizer(self.cleanup)
60
60
61 def cleanup(self):
61 def cleanup(self):
62 for x in ChangesetComment.query().all():
62 for x in ChangesetComment.query().all():
63 Session().delete(x)
63 Session().delete(x)
64 Session().commit()
64 Session().commit()
65
65
66 for x in Notification.query().all():
66 for x in Notification.query().all():
67 Session().delete(x)
67 Session().delete(x)
68 Session().commit()
68 Session().commit()
69
69
70 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
70 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
71 def test_create(self, comment_type, backend):
71 def test_create(self, comment_type, backend):
72 self.log_user()
72 self.log_user()
73 commit = backend.repo.get_commit('300')
73 commit = backend.repo.get_commit('300')
74 commit_id = commit.raw_id
74 commit_id = commit.raw_id
75 text = u'CommentOnCommit'
75 text = u'CommentOnCommit'
76
76
77 params = {'text': text, 'csrf_token': self.csrf_token,
77 params = {'text': text, 'csrf_token': self.csrf_token,
78 'comment_type': comment_type}
78 'comment_type': comment_type}
79 self.app.post(
79 self.app.post(
80 route_path('repo_commit_comment_create',
80 route_path('repo_commit_comment_create',
81 repo_name=backend.repo_name, commit_id=commit_id),
81 repo_name=backend.repo_name, commit_id=commit_id),
82 params=params)
82 params=params)
83
83
84 response = self.app.get(
84 response = self.app.get(
85 route_path('repo_commit',
85 route_path('repo_commit',
86 repo_name=backend.repo_name, commit_id=commit_id))
86 repo_name=backend.repo_name, commit_id=commit_id))
87
87
88 # test DB
88 # test DB
89 assert ChangesetComment.query().count() == 1
89 assert ChangesetComment.query().count() == 1
90 assert_comment_links(response, ChangesetComment.query().count(), 0)
90 assert_comment_links(response, ChangesetComment.query().count(), 0)
91
91
92 assert Notification.query().count() == 1
92 assert Notification.query().count() == 1
93 assert ChangesetComment.query().count() == 1
93 assert ChangesetComment.query().count() == 1
94
94
95 notification = Notification.query().all()[0]
95 notification = Notification.query().all()[0]
96
96
97 comment_id = ChangesetComment.query().first().comment_id
97 comment_id = ChangesetComment.query().first().comment_id
98 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
98 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
99
99
100 author = notification.created_by_user.username_and_name
100 author = notification.created_by_user.username_and_name
101 sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format(
101 sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format(
102 author, comment_type, h.show_id(commit), backend.repo_name)
102 author, comment_type, h.show_id(commit), backend.repo_name)
103 assert sbj == notification.subject
103 assert sbj == notification.subject
104
104
105 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
105 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
106 backend.repo_name, commit_id, comment_id))
106 backend.repo_name, commit_id, comment_id))
107 assert lnk in notification.body
107 assert lnk in notification.body
108
108
109 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
109 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
110 def test_create_inline(self, comment_type, backend):
110 def test_create_inline(self, comment_type, backend):
111 self.log_user()
111 self.log_user()
112 commit = backend.repo.get_commit('300')
112 commit = backend.repo.get_commit('300')
113 commit_id = commit.raw_id
113 commit_id = commit.raw_id
114 text = u'CommentOnCommit'
114 text = u'CommentOnCommit'
115 f_path = 'vcs/web/simplevcs/views/repository.py'
115 f_path = 'vcs/web/simplevcs/views/repository.py'
116 line = 'n1'
116 line = 'n1'
117
117
118 params = {'text': text, 'f_path': f_path, 'line': line,
118 params = {'text': text, 'f_path': f_path, 'line': line,
119 'comment_type': comment_type,
119 'comment_type': comment_type,
120 'csrf_token': self.csrf_token}
120 'csrf_token': self.csrf_token}
121
121
122 self.app.post(
122 self.app.post(
123 route_path('repo_commit_comment_create',
123 route_path('repo_commit_comment_create',
124 repo_name=backend.repo_name, commit_id=commit_id),
124 repo_name=backend.repo_name, commit_id=commit_id),
125 params=params)
125 params=params)
126
126
127 response = self.app.get(
127 response = self.app.get(
128 route_path('repo_commit',
128 route_path('repo_commit',
129 repo_name=backend.repo_name, commit_id=commit_id))
129 repo_name=backend.repo_name, commit_id=commit_id))
130
130
131 # test DB
131 # test DB
132 assert ChangesetComment.query().count() == 1
132 assert ChangesetComment.query().count() == 1
133 assert_comment_links(response, 0, ChangesetComment.query().count())
133 assert_comment_links(response, 0, ChangesetComment.query().count())
134
134
135 if backend.alias == 'svn':
135 if backend.alias == 'svn':
136 response.mustcontain(
136 response.mustcontain(
137 '''data-f-path="vcs/commands/summary.py" '''
137 '''data-f-path="vcs/commands/summary.py" '''
138 '''data-anchor-id="c-300-ad05457a43f8"'''
138 '''data-anchor-id="c-300-ad05457a43f8"'''
139 )
139 )
140 if backend.alias == 'git':
140 if backend.alias == 'git':
141 response.mustcontain(
141 response.mustcontain(
142 '''data-f-path="vcs/backends/hg.py" '''
142 '''data-f-path="vcs/backends/hg.py" '''
143 '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"'''
143 '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"'''
144 )
144 )
145
145
146 if backend.alias == 'hg':
146 if backend.alias == 'hg':
147 response.mustcontain(
147 response.mustcontain(
148 '''data-f-path="vcs/backends/hg.py" '''
148 '''data-f-path="vcs/backends/hg.py" '''
149 '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"'''
149 '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"'''
150 )
150 )
151
151
152 assert Notification.query().count() == 1
152 assert Notification.query().count() == 1
153 assert ChangesetComment.query().count() == 1
153 assert ChangesetComment.query().count() == 1
154
154
155 notification = Notification.query().all()[0]
155 notification = Notification.query().all()[0]
156 comment = ChangesetComment.query().first()
156 comment = ChangesetComment.query().first()
157 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
157 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
158
158
159 assert comment.revision == commit_id
159 assert comment.revision == commit_id
160
160
161 author = notification.created_by_user.username_and_name
161 author = notification.created_by_user.username_and_name
162 sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format(
162 sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format(
163 author, comment_type, f_path, h.show_id(commit), backend.repo_name)
163 author, comment_type, f_path, h.show_id(commit), backend.repo_name)
164
164
165 assert sbj == notification.subject
165 assert sbj == notification.subject
166
166
167 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
167 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
168 backend.repo_name, commit_id, comment.comment_id))
168 backend.repo_name, commit_id, comment.comment_id))
169 assert lnk in notification.body
169 assert lnk in notification.body
170 assert 'on line n1' in notification.body
170 assert 'on line n1' in notification.body
171
171
172 def test_create_with_mention(self, backend):
172 def test_create_with_mention(self, backend):
173 self.log_user()
173 self.log_user()
174
174
175 commit_id = backend.repo.get_commit('300').raw_id
175 commit_id = backend.repo.get_commit('300').raw_id
176 text = u'@test_regular check CommentOnCommit'
176 text = u'@test_regular check CommentOnCommit'
177
177
178 params = {'text': text, 'csrf_token': self.csrf_token}
178 params = {'text': text, 'csrf_token': self.csrf_token}
179 self.app.post(
179 self.app.post(
180 route_path('repo_commit_comment_create',
180 route_path('repo_commit_comment_create',
181 repo_name=backend.repo_name, commit_id=commit_id),
181 repo_name=backend.repo_name, commit_id=commit_id),
182 params=params)
182 params=params)
183
183
184 response = self.app.get(
184 response = self.app.get(
185 route_path('repo_commit',
185 route_path('repo_commit',
186 repo_name=backend.repo_name, commit_id=commit_id))
186 repo_name=backend.repo_name, commit_id=commit_id))
187 # test DB
187 # test DB
188 assert ChangesetComment.query().count() == 1
188 assert ChangesetComment.query().count() == 1
189 assert_comment_links(response, ChangesetComment.query().count(), 0)
189 assert_comment_links(response, ChangesetComment.query().count(), 0)
190
190
191 notification = Notification.query().one()
191 notification = Notification.query().one()
192
192
193 assert len(notification.recipients) == 2
193 assert len(notification.recipients) == 2
194 users = [x.username for x in notification.recipients]
194 users = [x.username for x in notification.recipients]
195
195
196 # test_regular gets notification by @mention
196 # test_regular gets notification by @mention
197 assert sorted(users) == [u'test_admin', u'test_regular']
197 assert sorted(users) == [u'test_admin', u'test_regular']
198
198
199 def test_create_with_status_change(self, backend):
199 def test_create_with_status_change(self, backend):
200 self.log_user()
200 self.log_user()
201 commit = backend.repo.get_commit('300')
201 commit = backend.repo.get_commit('300')
202 commit_id = commit.raw_id
202 commit_id = commit.raw_id
203 text = u'CommentOnCommit'
203 text = u'CommentOnCommit'
204 f_path = 'vcs/web/simplevcs/views/repository.py'
204 f_path = 'vcs/web/simplevcs/views/repository.py'
205 line = 'n1'
205 line = 'n1'
206
206
207 params = {'text': text, 'changeset_status': 'approved',
207 params = {'text': text, 'changeset_status': 'approved',
208 'csrf_token': self.csrf_token}
208 'csrf_token': self.csrf_token}
209
209
210 self.app.post(
210 self.app.post(
211 route_path(
211 route_path(
212 'repo_commit_comment_create',
212 'repo_commit_comment_create',
213 repo_name=backend.repo_name, commit_id=commit_id),
213 repo_name=backend.repo_name, commit_id=commit_id),
214 params=params)
214 params=params)
215
215
216 response = self.app.get(
216 response = self.app.get(
217 route_path('repo_commit',
217 route_path('repo_commit',
218 repo_name=backend.repo_name, commit_id=commit_id))
218 repo_name=backend.repo_name, commit_id=commit_id))
219
219
220 # test DB
220 # test DB
221 assert ChangesetComment.query().count() == 1
221 assert ChangesetComment.query().count() == 1
222 assert_comment_links(response, ChangesetComment.query().count(), 0)
222 assert_comment_links(response, ChangesetComment.query().count(), 0)
223
223
224 assert Notification.query().count() == 1
224 assert Notification.query().count() == 1
225 assert ChangesetComment.query().count() == 1
225 assert ChangesetComment.query().count() == 1
226
226
227 notification = Notification.query().all()[0]
227 notification = Notification.query().all()[0]
228
228
229 comment_id = ChangesetComment.query().first().comment_id
229 comment_id = ChangesetComment.query().first().comment_id
230 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
230 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
231
231
232 author = notification.created_by_user.username_and_name
232 author = notification.created_by_user.username_and_name
233 sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format(
233 sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format(
234 author, h.show_id(commit), backend.repo_name)
234 author, h.show_id(commit), backend.repo_name)
235 assert sbj == notification.subject
235 assert sbj == notification.subject
236
236
237 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
237 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
238 backend.repo_name, commit_id, comment_id))
238 backend.repo_name, commit_id, comment_id))
239 assert lnk in notification.body
239 assert lnk in notification.body
240
240
241 def test_delete(self, backend):
241 def test_delete(self, backend):
242 self.log_user()
242 self.log_user()
243 commit_id = backend.repo.get_commit('300').raw_id
243 commit_id = backend.repo.get_commit('300').raw_id
244 text = u'CommentOnCommit'
244 text = u'CommentOnCommit'
245
245
246 params = {'text': text, 'csrf_token': self.csrf_token}
246 params = {'text': text, 'csrf_token': self.csrf_token}
247 self.app.post(
247 self.app.post(
248 route_path(
248 route_path(
249 'repo_commit_comment_create',
249 'repo_commit_comment_create',
250 repo_name=backend.repo_name, commit_id=commit_id),
250 repo_name=backend.repo_name, commit_id=commit_id),
251 params=params)
251 params=params)
252
252
253 comments = ChangesetComment.query().all()
253 comments = ChangesetComment.query().all()
254 assert len(comments) == 1
254 assert len(comments) == 1
255 comment_id = comments[0].comment_id
255 comment_id = comments[0].comment_id
256
256
257 self.app.post(
257 self.app.post(
258 route_path('repo_commit_comment_delete',
258 route_path('repo_commit_comment_delete',
259 repo_name=backend.repo_name,
259 repo_name=backend.repo_name,
260 commit_id=commit_id,
260 commit_id=commit_id,
261 comment_id=comment_id),
261 comment_id=comment_id),
262 params={'csrf_token': self.csrf_token})
262 params={'csrf_token': self.csrf_token})
263
263
264 comments = ChangesetComment.query().all()
264 comments = ChangesetComment.query().all()
265 assert len(comments) == 0
265 assert len(comments) == 0
266
266
267 response = self.app.get(
267 response = self.app.get(
268 route_path('repo_commit',
268 route_path('repo_commit',
269 repo_name=backend.repo_name, commit_id=commit_id))
269 repo_name=backend.repo_name, commit_id=commit_id))
270 assert_comment_links(response, 0, 0)
270 assert_comment_links(response, 0, 0)
271
271
272 def test_edit(self, backend):
272 def test_edit(self, backend):
273 self.log_user()
273 self.log_user()
274 commit_id = backend.repo.get_commit('300').raw_id
274 commit_id = backend.repo.get_commit('300').raw_id
275 text = u'CommentOnCommit'
275 text = u'CommentOnCommit'
276
276
277 params = {'text': text, 'csrf_token': self.csrf_token}
277 params = {'text': text, 'csrf_token': self.csrf_token}
278 self.app.post(
278 self.app.post(
279 route_path(
279 route_path(
280 'repo_commit_comment_create',
280 'repo_commit_comment_create',
281 repo_name=backend.repo_name, commit_id=commit_id),
281 repo_name=backend.repo_name, commit_id=commit_id),
282 params=params)
282 params=params)
283
283
284 comments = ChangesetComment.query().all()
284 comments = ChangesetComment.query().all()
285 assert len(comments) == 1
285 assert len(comments) == 1
286 comment_id = comments[0].comment_id
286 comment_id = comments[0].comment_id
287 test_text = 'test_text'
287 test_text = 'test_text'
288 self.app.post(
288 self.app.post(
289 route_path(
289 route_path(
290 'repo_commit_comment_edit',
290 'repo_commit_comment_edit',
291 repo_name=backend.repo_name,
291 repo_name=backend.repo_name,
292 commit_id=commit_id,
292 commit_id=commit_id,
293 comment_id=comment_id,
293 comment_id=comment_id,
294 ),
294 ),
295 params={
295 params={
296 'csrf_token': self.csrf_token,
296 'csrf_token': self.csrf_token,
297 'text': test_text,
297 'text': test_text,
298 'version': '0',
298 'version': '0',
299 })
299 })
300
300
301 text_form_db = ChangesetComment.query().filter(
301 text_form_db = ChangesetComment.query().filter(
302 ChangesetComment.comment_id == comment_id).first().text
302 ChangesetComment.comment_id == comment_id).first().text
303 assert test_text == text_form_db
303 assert test_text == text_form_db
304
304
305 def test_edit_without_change(self, backend):
305 def test_edit_without_change(self, backend):
306 self.log_user()
306 self.log_user()
307 commit_id = backend.repo.get_commit('300').raw_id
307 commit_id = backend.repo.get_commit('300').raw_id
308 text = u'CommentOnCommit'
308 text = u'CommentOnCommit'
309
309
310 params = {'text': text, 'csrf_token': self.csrf_token}
310 params = {'text': text, 'csrf_token': self.csrf_token}
311 self.app.post(
311 self.app.post(
312 route_path(
312 route_path(
313 'repo_commit_comment_create',
313 'repo_commit_comment_create',
314 repo_name=backend.repo_name, commit_id=commit_id),
314 repo_name=backend.repo_name, commit_id=commit_id),
315 params=params)
315 params=params)
316
316
317 comments = ChangesetComment.query().all()
317 comments = ChangesetComment.query().all()
318 assert len(comments) == 1
318 assert len(comments) == 1
319 comment_id = comments[0].comment_id
319 comment_id = comments[0].comment_id
320
320
321 response = self.app.post(
321 response = self.app.post(
322 route_path(
322 route_path(
323 'repo_commit_comment_edit',
323 'repo_commit_comment_edit',
324 repo_name=backend.repo_name,
324 repo_name=backend.repo_name,
325 commit_id=commit_id,
325 commit_id=commit_id,
326 comment_id=comment_id,
326 comment_id=comment_id,
327 ),
327 ),
328 params={
328 params={
329 'csrf_token': self.csrf_token,
329 'csrf_token': self.csrf_token,
330 'text': text,
330 'text': text,
331 'version': '0',
331 'version': '0',
332 },
332 },
333 status=404,
333 status=404,
334 )
334 )
335 assert response.status_int == 404
335 assert response.status_int == 404
336
336
337 def test_edit_try_edit_already_edited(self, backend):
337 def test_edit_try_edit_already_edited(self, backend):
338 self.log_user()
338 self.log_user()
339 commit_id = backend.repo.get_commit('300').raw_id
339 commit_id = backend.repo.get_commit('300').raw_id
340 text = u'CommentOnCommit'
340 text = u'CommentOnCommit'
341
341
342 params = {'text': text, 'csrf_token': self.csrf_token}
342 params = {'text': text, 'csrf_token': self.csrf_token}
343 self.app.post(
343 self.app.post(
344 route_path(
344 route_path(
345 'repo_commit_comment_create',
345 'repo_commit_comment_create',
346 repo_name=backend.repo_name, commit_id=commit_id
346 repo_name=backend.repo_name, commit_id=commit_id
347 ),
347 ),
348 params=params,
348 params=params,
349 )
349 )
350
350
351 comments = ChangesetComment.query().all()
351 comments = ChangesetComment.query().all()
352 assert len(comments) == 1
352 assert len(comments) == 1
353 comment_id = comments[0].comment_id
353 comment_id = comments[0].comment_id
354 test_text = 'test_text'
354 test_text = 'test_text'
355 self.app.post(
355 self.app.post(
356 route_path(
356 route_path(
357 'repo_commit_comment_edit',
357 'repo_commit_comment_edit',
358 repo_name=backend.repo_name,
358 repo_name=backend.repo_name,
359 commit_id=commit_id,
359 commit_id=commit_id,
360 comment_id=comment_id,
360 comment_id=comment_id,
361 ),
361 ),
362 params={
362 params={
363 'csrf_token': self.csrf_token,
363 'csrf_token': self.csrf_token,
364 'text': test_text,
364 'text': test_text,
365 'version': '0',
365 'version': '0',
366 }
366 }
367 )
367 )
368 test_text_v2 = 'test_v2'
368 test_text_v2 = 'test_v2'
369 response = self.app.post(
369 response = self.app.post(
370 route_path(
370 route_path(
371 'repo_commit_comment_edit',
371 'repo_commit_comment_edit',
372 repo_name=backend.repo_name,
372 repo_name=backend.repo_name,
373 commit_id=commit_id,
373 commit_id=commit_id,
374 comment_id=comment_id,
374 comment_id=comment_id,
375 ),
375 ),
376 params={
376 params={
377 'csrf_token': self.csrf_token,
377 'csrf_token': self.csrf_token,
378 'text': test_text_v2,
378 'text': test_text_v2,
379 'version': '0',
379 'version': '0',
380 },
380 },
381 status=409,
381 status=409,
382 )
382 )
383 assert response.status_int == 409
383 assert response.status_int == 409
384
384
385 text_form_db = ChangesetComment.query().filter(
385 text_form_db = ChangesetComment.query().filter(
386 ChangesetComment.comment_id == comment_id).first().text
386 ChangesetComment.comment_id == comment_id).first().text
387
387
388 assert test_text == text_form_db
388 assert test_text == text_form_db
389 assert test_text_v2 != text_form_db
389 assert test_text_v2 != text_form_db
390
390
391 def test_edit_forbidden_for_immutable_comments(self, backend):
391 def test_edit_forbidden_for_immutable_comments(self, backend):
392 self.log_user()
392 self.log_user()
393 commit_id = backend.repo.get_commit('300').raw_id
393 commit_id = backend.repo.get_commit('300').raw_id
394 text = u'CommentOnCommit'
394 text = u'CommentOnCommit'
395
395
396 params = {'text': text, 'csrf_token': self.csrf_token, 'version': '0'}
396 params = {'text': text, 'csrf_token': self.csrf_token, 'version': '0'}
397 self.app.post(
397 self.app.post(
398 route_path(
398 route_path(
399 'repo_commit_comment_create',
399 'repo_commit_comment_create',
400 repo_name=backend.repo_name,
400 repo_name=backend.repo_name,
401 commit_id=commit_id,
401 commit_id=commit_id,
402 ),
402 ),
403 params=params
403 params=params
404 )
404 )
405
405
406 comments = ChangesetComment.query().all()
406 comments = ChangesetComment.query().all()
407 assert len(comments) == 1
407 assert len(comments) == 1
408 comment_id = comments[0].comment_id
408 comment_id = comments[0].comment_id
409
409
410 comment = ChangesetComment.get(comment_id)
410 comment = ChangesetComment.get(comment_id)
411 comment.immutable_state = ChangesetComment.OP_IMMUTABLE
411 comment.immutable_state = ChangesetComment.OP_IMMUTABLE
412 Session().add(comment)
412 Session().add(comment)
413 Session().commit()
413 Session().commit()
414
414
415 response = self.app.post(
415 response = self.app.post(
416 route_path(
416 route_path(
417 'repo_commit_comment_edit',
417 'repo_commit_comment_edit',
418 repo_name=backend.repo_name,
418 repo_name=backend.repo_name,
419 commit_id=commit_id,
419 commit_id=commit_id,
420 comment_id=comment_id,
420 comment_id=comment_id,
421 ),
421 ),
422 params={
422 params={
423 'csrf_token': self.csrf_token,
423 'csrf_token': self.csrf_token,
424 'text': 'test_text',
424 'text': 'test_text',
425 },
425 },
426 status=403,
426 status=403,
427 )
427 )
428 assert response.status_int == 403
428 assert response.status_int == 403
429
429
430 def test_delete_forbidden_for_immutable_comments(self, backend):
430 def test_delete_forbidden_for_immutable_comments(self, backend):
431 self.log_user()
431 self.log_user()
432 commit_id = backend.repo.get_commit('300').raw_id
432 commit_id = backend.repo.get_commit('300').raw_id
433 text = u'CommentOnCommit'
433 text = u'CommentOnCommit'
434
434
435 params = {'text': text, 'csrf_token': self.csrf_token}
435 params = {'text': text, 'csrf_token': self.csrf_token}
436 self.app.post(
436 self.app.post(
437 route_path(
437 route_path(
438 'repo_commit_comment_create',
438 'repo_commit_comment_create',
439 repo_name=backend.repo_name, commit_id=commit_id),
439 repo_name=backend.repo_name, commit_id=commit_id),
440 params=params)
440 params=params)
441
441
442 comments = ChangesetComment.query().all()
442 comments = ChangesetComment.query().all()
443 assert len(comments) == 1
443 assert len(comments) == 1
444 comment_id = comments[0].comment_id
444 comment_id = comments[0].comment_id
445
445
446 comment = ChangesetComment.get(comment_id)
446 comment = ChangesetComment.get(comment_id)
447 comment.immutable_state = ChangesetComment.OP_IMMUTABLE
447 comment.immutable_state = ChangesetComment.OP_IMMUTABLE
448 Session().add(comment)
448 Session().add(comment)
449 Session().commit()
449 Session().commit()
450
450
451 self.app.post(
451 self.app.post(
452 route_path('repo_commit_comment_delete',
452 route_path('repo_commit_comment_delete',
453 repo_name=backend.repo_name,
453 repo_name=backend.repo_name,
454 commit_id=commit_id,
454 commit_id=commit_id,
455 comment_id=comment_id),
455 comment_id=comment_id),
456 params={'csrf_token': self.csrf_token},
456 params={'csrf_token': self.csrf_token},
457 status=403)
457 status=403)
458
458
459 @pytest.mark.parametrize('renderer, text_input, output', [
459 @pytest.mark.parametrize('renderer, text_input, output', [
460 ('rst', 'plain text', '<p>plain text</p>'),
460 ('rst', 'plain text', '<p>plain text</p>'),
461 ('rst', 'header\n======', '<h1 class="title">header</h1>'),
461 ('rst', 'header\n======', '<h1 class="title">header</h1>'),
462 ('rst', '*italics*', '<em>italics</em>'),
462 ('rst', '*italics*', '<em>italics</em>'),
463 ('rst', '**bold**', '<strong>bold</strong>'),
463 ('rst', '**bold**', '<strong>bold</strong>'),
464 ('markdown', 'plain text', '<p>plain text</p>'),
464 ('markdown', 'plain text', '<p>plain text</p>'),
465 ('markdown', '# header', '<h1>header</h1>'),
465 ('markdown', '# header', '<h1>header</h1>'),
466 ('markdown', '*italics*', '<em>italics</em>'),
466 ('markdown', '*italics*', '<em>italics</em>'),
467 ('markdown', '**bold**', '<strong>bold</strong>'),
467 ('markdown', '**bold**', '<strong>bold</strong>'),
468 ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain',
468 ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain',
469 'md-header', 'md-italics', 'md-bold', ])
469 'md-header', 'md-italics', 'md-bold', ])
470 def test_preview(self, renderer, text_input, output, backend, xhr_header):
470 def test_preview(self, renderer, text_input, output, backend, xhr_header):
471 self.log_user()
471 self.log_user()
472 params = {
472 params = {
473 'renderer': renderer,
473 'renderer': renderer,
474 'text': text_input,
474 'text': text_input,
475 'csrf_token': self.csrf_token
475 'csrf_token': self.csrf_token
476 }
476 }
477 commit_id = '0' * 16 # fake this for tests
477 commit_id = '0' * 16 # fake this for tests
478 response = self.app.post(
478 response = self.app.post(
479 route_path('repo_commit_comment_preview',
479 route_path('repo_commit_comment_preview',
480 repo_name=backend.repo_name, commit_id=commit_id,),
480 repo_name=backend.repo_name, commit_id=commit_id,),
481 params=params,
481 params=params,
482 extra_environ=xhr_header)
482 extra_environ=xhr_header)
483
483
484 response.mustcontain(output)
484 response.mustcontain(output)
485
485
486
486
487 def assert_comment_links(response, comments, inline_comments):
487 def assert_comment_links(response, comments, inline_comments):
488 if comments == 1:
488 response.mustcontain(
489 comments_text = "%d General" % comments
489 '<span class="display-none" id="general-comments-count">{}</span>'.format(comments))
490 else:
490 response.mustcontain(
491 comments_text = "%d General" % comments
491 '<span class="display-none" id="inline-comments-count">{}</span>'.format(inline_comments))
492
493 if inline_comments == 1:
494 inline_comments_text = "%d Inline" % inline_comments
495 else:
496 inline_comments_text = "%d Inline" % inline_comments
497
492
498 if comments:
499 response.mustcontain('<a href="#comments">%s</a>,' % comments_text)
500 else:
501 response.mustcontain(comments_text)
502
493
503 if inline_comments:
494
504 response.mustcontain(
505 'id="inline-comments-counter">%s' % inline_comments_text)
506 else:
507 response.mustcontain(inline_comments_text)
@@ -1,667 +1,672 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import lxml.html
23 import lxml.html
24
24
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.tests import assert_session_flash
26 from rhodecode.tests import assert_session_flash
27 from rhodecode.tests.utils import AssertResponse, commit_change
27 from rhodecode.tests.utils import AssertResponse, commit_change
28
28
29
29
30 def route_path(name, params=None, **kwargs):
30 def route_path(name, params=None, **kwargs):
31 import urllib
31 import urllib
32
32
33 base_url = {
33 base_url = {
34 'repo_compare_select': '/{repo_name}/compare',
34 'repo_compare_select': '/{repo_name}/compare',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 }[name].format(**kwargs)
36 }[name].format(**kwargs)
37
37
38 if params:
38 if params:
39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
40 return base_url
40 return base_url
41
41
42
42
43 @pytest.mark.usefixtures("autologin_user", "app")
43 @pytest.mark.usefixtures("autologin_user", "app")
44 class TestCompareView(object):
44 class TestCompareView(object):
45
45
46 def test_compare_index_is_reached_at_least_once(self, backend):
46 def test_compare_index_is_reached_at_least_once(self, backend):
47 repo = backend.repo
47 repo = backend.repo
48 self.app.get(
48 self.app.get(
49 route_path('repo_compare_select', repo_name=repo.repo_name))
49 route_path('repo_compare_select', repo_name=repo.repo_name))
50
50
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 def test_compare_remote_with_different_commit_indexes(self, backend):
52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 # Preparing the following repository structure:
53 # Preparing the following repository structure:
54 #
54 #
55 # Origin repository has two commits:
55 # Origin repository has two commits:
56 #
56 #
57 # 0 1
57 # 0 1
58 # A -- D
58 # A -- D
59 #
59 #
60 # The fork of it has a few more commits and "D" has a commit index
60 # The fork of it has a few more commits and "D" has a commit index
61 # which does not exist in origin.
61 # which does not exist in origin.
62 #
62 #
63 # 0 1 2 3 4
63 # 0 1 2 3 4
64 # A -- -- -- D -- E
64 # A -- -- -- D -- E
65 # \- B -- C
65 # \- B -- C
66 #
66 #
67
67
68 fork = backend.create_repo()
68 fork = backend.create_repo()
69
69
70 # prepare fork
70 # prepare fork
71 commit0 = commit_change(
71 commit0 = commit_change(
72 fork.repo_name, filename='file1', content='A',
72 fork.repo_name, filename='file1', content='A',
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74
74
75 commit1 = commit_change(
75 commit1 = commit_change(
76 fork.repo_name, filename='file1', content='B',
76 fork.repo_name, filename='file1', content='B',
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78
78
79 commit_change( # commit 2
79 commit_change( # commit 2
80 fork.repo_name, filename='file1', content='C',
80 fork.repo_name, filename='file1', content='C',
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82
82
83 commit3 = commit_change(
83 commit3 = commit_change(
84 fork.repo_name, filename='file1', content='D',
84 fork.repo_name, filename='file1', content='D',
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86
86
87 commit4 = commit_change(
87 commit4 = commit_change(
88 fork.repo_name, filename='file1', content='E',
88 fork.repo_name, filename='file1', content='E',
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90
90
91 # prepare origin repository, taking just the history up to D
91 # prepare origin repository, taking just the history up to D
92 origin = backend.create_repo()
92 origin = backend.create_repo()
93
93
94 origin_repo = origin.scm_instance(cache=False)
94 origin_repo = origin.scm_instance(cache=False)
95 origin_repo.config.clear_section('hooks')
95 origin_repo.config.clear_section('hooks')
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
98
98
99 # Verify test fixture setup
99 # Verify test fixture setup
100 # This does not work for git
100 # This does not work for git
101 if backend.alias != 'git':
101 if backend.alias != 'git':
102 assert 5 == len(fork.scm_instance().commit_ids)
102 assert 5 == len(fork.scm_instance().commit_ids)
103 assert 2 == len(origin_repo.commit_ids)
103 assert 2 == len(origin_repo.commit_ids)
104
104
105 # Comparing the revisions
105 # Comparing the revisions
106 response = self.app.get(
106 response = self.app.get(
107 route_path('repo_compare',
107 route_path('repo_compare',
108 repo_name=origin.repo_name,
108 repo_name=origin.repo_name,
109 source_ref_type="rev", source_ref=commit3.raw_id,
109 source_ref_type="rev", source_ref=commit3.raw_id,
110 target_ref_type="rev", target_ref=commit4.raw_id,
110 target_ref_type="rev", target_ref=commit4.raw_id,
111 params=dict(merge='1', target_repo=fork.repo_name)
111 params=dict(merge='1', target_repo=fork.repo_name)
112 ))
112 ))
113
113
114 compare_page = ComparePage(response)
114 compare_page = ComparePage(response)
115 compare_page.contains_commits([commit4])
115 compare_page.contains_commits([commit4])
116
116
117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
118 def test_compare_forks_on_branch_extra_commits(self, backend):
118 def test_compare_forks_on_branch_extra_commits(self, backend):
119 repo1 = backend.create_repo()
119 repo1 = backend.create_repo()
120
120
121 # commit something !
121 # commit something !
122 commit0 = commit_change(
122 commit0 = commit_change(
123 repo1.repo_name, filename='file1', content='line1\n',
123 repo1.repo_name, filename='file1', content='line1\n',
124 message='commit1', vcs_type=backend.alias, parent=None,
124 message='commit1', vcs_type=backend.alias, parent=None,
125 newfile=True)
125 newfile=True)
126
126
127 # fork this repo
127 # fork this repo
128 repo2 = backend.create_fork()
128 repo2 = backend.create_fork()
129
129
130 # add two extra commit into fork
130 # add two extra commit into fork
131 commit1 = commit_change(
131 commit1 = commit_change(
132 repo2.repo_name, filename='file1', content='line1\nline2\n',
132 repo2.repo_name, filename='file1', content='line1\nline2\n',
133 message='commit2', vcs_type=backend.alias, parent=commit0)
133 message='commit2', vcs_type=backend.alias, parent=commit0)
134
134
135 commit2 = commit_change(
135 commit2 = commit_change(
136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
137 message='commit3', vcs_type=backend.alias, parent=commit1)
137 message='commit3', vcs_type=backend.alias, parent=commit1)
138
138
139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
141
141
142 response = self.app.get(
142 response = self.app.get(
143 route_path('repo_compare',
143 route_path('repo_compare',
144 repo_name=repo1.repo_name,
144 repo_name=repo1.repo_name,
145 source_ref_type="branch", source_ref=commit_id2,
145 source_ref_type="branch", source_ref=commit_id2,
146 target_ref_type="branch", target_ref=commit_id1,
146 target_ref_type="branch", target_ref=commit_id1,
147 params=dict(merge='1', target_repo=repo2.repo_name)
147 params=dict(merge='1', target_repo=repo2.repo_name)
148 ))
148 ))
149
149
150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
152
152
153 compare_page = ComparePage(response)
153 compare_page = ComparePage(response)
154 compare_page.contains_change_summary(1, 2, 0)
154 compare_page.contains_change_summary(1, 2, 0)
155 compare_page.contains_commits([commit1, commit2])
155 compare_page.contains_commits([commit1, commit2])
156
156
157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
159
159
160 # Swap is removed when comparing branches since it's a PR feature and
160 # Swap is removed when comparing branches since it's a PR feature and
161 # it is then a preview mode
161 # it is then a preview mode
162 compare_page.swap_is_hidden()
162 compare_page.swap_is_hidden()
163 compare_page.target_source_are_disabled()
163 compare_page.target_source_are_disabled()
164
164
165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
167 repo1 = backend.create_repo()
167 repo1 = backend.create_repo()
168
168
169 # commit something !
169 # commit something !
170 commit0 = commit_change(
170 commit0 = commit_change(
171 repo1.repo_name, filename='file1', content='line1\n',
171 repo1.repo_name, filename='file1', content='line1\n',
172 message='commit1', vcs_type=backend.alias, parent=None,
172 message='commit1', vcs_type=backend.alias, parent=None,
173 newfile=True)
173 newfile=True)
174
174
175 # fork this repo
175 # fork this repo
176 repo2 = backend.create_fork()
176 repo2 = backend.create_fork()
177
177
178 # now commit something to origin repo
178 # now commit something to origin repo
179 commit_change(
179 commit_change(
180 repo1.repo_name, filename='file2', content='line1file2\n',
180 repo1.repo_name, filename='file2', content='line1file2\n',
181 message='commit2', vcs_type=backend.alias, parent=commit0,
181 message='commit2', vcs_type=backend.alias, parent=commit0,
182 newfile=True)
182 newfile=True)
183
183
184 # add two extra commit into fork
184 # add two extra commit into fork
185 commit1 = commit_change(
185 commit1 = commit_change(
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
187 message='commit2', vcs_type=backend.alias, parent=commit0)
187 message='commit2', vcs_type=backend.alias, parent=commit0)
188
188
189 commit2 = commit_change(
189 commit2 = commit_change(
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
191 message='commit3', vcs_type=backend.alias, parent=commit1)
191 message='commit3', vcs_type=backend.alias, parent=commit1)
192
192
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
195
195
196 response = self.app.get(
196 response = self.app.get(
197 route_path('repo_compare',
197 route_path('repo_compare',
198 repo_name=repo1.repo_name,
198 repo_name=repo1.repo_name,
199 source_ref_type="branch", source_ref=commit_id2,
199 source_ref_type="branch", source_ref=commit_id2,
200 target_ref_type="branch", target_ref=commit_id1,
200 target_ref_type="branch", target_ref=commit_id1,
201 params=dict(merge='1', target_repo=repo2.repo_name),
201 params=dict(merge='1', target_repo=repo2.repo_name),
202 ))
202 ))
203
203
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
206
206
207 compare_page = ComparePage(response)
207 compare_page = ComparePage(response)
208 compare_page.contains_change_summary(1, 2, 0)
208 compare_page.contains_change_summary(1, 2, 0)
209 compare_page.contains_commits([commit1, commit2])
209 compare_page.contains_commits([commit1, commit2])
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
212
212
213 # Swap is removed when comparing branches since it's a PR feature and
213 # Swap is removed when comparing branches since it's a PR feature and
214 # it is then a preview mode
214 # it is then a preview mode
215 compare_page.swap_is_hidden()
215 compare_page.swap_is_hidden()
216 compare_page.target_source_are_disabled()
216 compare_page.target_source_are_disabled()
217
217
218 @pytest.mark.xfail_backends("svn")
218 @pytest.mark.xfail_backends("svn")
219 # TODO(marcink): no svn support for compare two seperate repos
219 # TODO(marcink): no svn support for compare two seperate repos
220 def test_compare_of_unrelated_forks(self, backend):
220 def test_compare_of_unrelated_forks(self, backend):
221 orig = backend.create_repo(number_of_commits=1)
221 orig = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
223
223
224 response = self.app.get(
224 response = self.app.get(
225 route_path('repo_compare',
225 route_path('repo_compare',
226 repo_name=orig.repo_name,
226 repo_name=orig.repo_name,
227 source_ref_type="rev", source_ref="tip",
227 source_ref_type="rev", source_ref="tip",
228 target_ref_type="rev", target_ref="tip",
228 target_ref_type="rev", target_ref="tip",
229 params=dict(merge='1', target_repo=fork.repo_name),
229 params=dict(merge='1', target_repo=fork.repo_name),
230 ),
230 ),
231 status=302)
231 status=302)
232 response = response.follow()
232 response = response.follow()
233 response.mustcontain("Repositories unrelated.")
233 response.mustcontain("Repositories unrelated.")
234
234
235 @pytest.mark.xfail_backends("svn")
235 @pytest.mark.xfail_backends("svn")
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
237
237
238 # repo1:
238 # repo1:
239 # commit0:
239 # commit0:
240 # commit1:
240 # commit1:
241 # repo1-fork- in which we will cherry pick bottom commits
241 # repo1-fork- in which we will cherry pick bottom commits
242 # commit0:
242 # commit0:
243 # commit1:
243 # commit1:
244 # commit2: x
244 # commit2: x
245 # commit3: x
245 # commit3: x
246 # commit4: x
246 # commit4: x
247 # commit5:
247 # commit5:
248 # make repo1, and commit1+commit2
248 # make repo1, and commit1+commit2
249
249
250 repo1 = backend.create_repo()
250 repo1 = backend.create_repo()
251
251
252 # commit something !
252 # commit something !
253 commit0 = commit_change(
253 commit0 = commit_change(
254 repo1.repo_name, filename='file1', content='line1\n',
254 repo1.repo_name, filename='file1', content='line1\n',
255 message='commit1', vcs_type=backend.alias, parent=None,
255 message='commit1', vcs_type=backend.alias, parent=None,
256 newfile=True)
256 newfile=True)
257 commit1 = commit_change(
257 commit1 = commit_change(
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
259 message='commit2', vcs_type=backend.alias, parent=commit0)
259 message='commit2', vcs_type=backend.alias, parent=commit0)
260
260
261 # fork this repo
261 # fork this repo
262 repo2 = backend.create_fork()
262 repo2 = backend.create_fork()
263
263
264 # now make commit3-6
264 # now make commit3-6
265 commit2 = commit_change(
265 commit2 = commit_change(
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
267 message='commit3', vcs_type=backend.alias, parent=commit1)
267 message='commit3', vcs_type=backend.alias, parent=commit1)
268 commit3 = commit_change(
268 commit3 = commit_change(
269 repo1.repo_name, filename='file1',
269 repo1.repo_name, filename='file1',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
271 vcs_type=backend.alias, parent=commit2)
271 vcs_type=backend.alias, parent=commit2)
272 commit4 = commit_change(
272 commit4 = commit_change(
273 repo1.repo_name, filename='file1',
273 repo1.repo_name, filename='file1',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
275 vcs_type=backend.alias, parent=commit3)
275 vcs_type=backend.alias, parent=commit3)
276 commit_change( # commit 5
276 commit_change( # commit 5
277 repo1.repo_name, filename='file1',
277 repo1.repo_name, filename='file1',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
279 message='commit6', vcs_type=backend.alias, parent=commit4)
279 message='commit6', vcs_type=backend.alias, parent=commit4)
280
280
281 response = self.app.get(
281 response = self.app.get(
282 route_path('repo_compare',
282 route_path('repo_compare',
283 repo_name=repo2.repo_name,
283 repo_name=repo2.repo_name,
284 # parent of commit2, in target repo2
284 # parent of commit2, in target repo2
285 source_ref_type="rev", source_ref=commit1.raw_id,
285 source_ref_type="rev", source_ref=commit1.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
287 params=dict(merge='1', target_repo=repo1.repo_name),
287 params=dict(merge='1', target_repo=repo1.repo_name),
288 ))
288 ))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
291
291
292 # files
292 # files
293 compare_page = ComparePage(response)
293 compare_page = ComparePage(response)
294 compare_page.contains_change_summary(1, 3, 0)
294 compare_page.contains_change_summary(1, 3, 0)
295 compare_page.contains_commits([commit2, commit3, commit4])
295 compare_page.contains_commits([commit2, commit3, commit4])
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
298
298
299 @pytest.mark.xfail_backends("svn")
299 @pytest.mark.xfail_backends("svn")
300 def test_compare_cherry_pick_commits_from_top(self, backend):
300 def test_compare_cherry_pick_commits_from_top(self, backend):
301 # repo1:
301 # repo1:
302 # commit0:
302 # commit0:
303 # commit1:
303 # commit1:
304 # repo1-fork- in which we will cherry pick bottom commits
304 # repo1-fork- in which we will cherry pick bottom commits
305 # commit0:
305 # commit0:
306 # commit1:
306 # commit1:
307 # commit2:
307 # commit2:
308 # commit3: x
308 # commit3: x
309 # commit4: x
309 # commit4: x
310 # commit5: x
310 # commit5: x
311
311
312 # make repo1, and commit1+commit2
312 # make repo1, and commit1+commit2
313 repo1 = backend.create_repo()
313 repo1 = backend.create_repo()
314
314
315 # commit something !
315 # commit something !
316 commit0 = commit_change(
316 commit0 = commit_change(
317 repo1.repo_name, filename='file1', content='line1\n',
317 repo1.repo_name, filename='file1', content='line1\n',
318 message='commit1', vcs_type=backend.alias, parent=None,
318 message='commit1', vcs_type=backend.alias, parent=None,
319 newfile=True)
319 newfile=True)
320 commit1 = commit_change(
320 commit1 = commit_change(
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
322 message='commit2', vcs_type=backend.alias, parent=commit0)
322 message='commit2', vcs_type=backend.alias, parent=commit0)
323
323
324 # fork this repo
324 # fork this repo
325 backend.create_fork()
325 backend.create_fork()
326
326
327 # now make commit3-6
327 # now make commit3-6
328 commit2 = commit_change(
328 commit2 = commit_change(
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
330 message='commit3', vcs_type=backend.alias, parent=commit1)
330 message='commit3', vcs_type=backend.alias, parent=commit1)
331 commit3 = commit_change(
331 commit3 = commit_change(
332 repo1.repo_name, filename='file1',
332 repo1.repo_name, filename='file1',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
334 vcs_type=backend.alias, parent=commit2)
334 vcs_type=backend.alias, parent=commit2)
335 commit4 = commit_change(
335 commit4 = commit_change(
336 repo1.repo_name, filename='file1',
336 repo1.repo_name, filename='file1',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
338 vcs_type=backend.alias, parent=commit3)
338 vcs_type=backend.alias, parent=commit3)
339 commit5 = commit_change(
339 commit5 = commit_change(
340 repo1.repo_name, filename='file1',
340 repo1.repo_name, filename='file1',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
342 message='commit6', vcs_type=backend.alias, parent=commit4)
342 message='commit6', vcs_type=backend.alias, parent=commit4)
343
343
344 response = self.app.get(
344 response = self.app.get(
345 route_path('repo_compare',
345 route_path('repo_compare',
346 repo_name=repo1.repo_name,
346 repo_name=repo1.repo_name,
347 # parent of commit3, not in source repo2
347 # parent of commit3, not in source repo2
348 source_ref_type="rev", source_ref=commit2.raw_id,
348 source_ref_type="rev", source_ref=commit2.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
350 params=dict(merge='1'),))
350 params=dict(merge='1'),))
351
351
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
354
354
355 compare_page = ComparePage(response)
355 compare_page = ComparePage(response)
356 compare_page.contains_change_summary(1, 3, 0)
356 compare_page.contains_change_summary(1, 3, 0)
357 compare_page.contains_commits([commit3, commit4, commit5])
357 compare_page.contains_commits([commit3, commit4, commit5])
358
358
359 # files
359 # files
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
362
362
363 @pytest.mark.xfail_backends("svn")
363 @pytest.mark.xfail_backends("svn")
364 def test_compare_remote_branches(self, backend):
364 def test_compare_remote_branches(self, backend):
365 repo1 = backend.repo
365 repo1 = backend.repo
366 repo2 = backend.create_fork()
366 repo2 = backend.create_fork()
367
367
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
372
372
373 response = self.app.get(
373 response = self.app.get(
374 route_path('repo_compare',
374 route_path('repo_compare',
375 repo_name=repo1.repo_name,
375 repo_name=repo1.repo_name,
376 source_ref_type="rev", source_ref=commit_id1,
376 source_ref_type="rev", source_ref=commit_id1,
377 target_ref_type="rev", target_ref=commit_id2,
377 target_ref_type="rev", target_ref=commit_id2,
378 params=dict(merge='1', target_repo=repo2.repo_name),
378 params=dict(merge='1', target_repo=repo2.repo_name),
379 ))
379 ))
380
380
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383
383
384 compare_page = ComparePage(response)
384 compare_page = ComparePage(response)
385
385
386 # outgoing commits between those commits
386 # outgoing commits between those commits
387 compare_page.contains_commits(
387 compare_page.contains_commits(
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389
389
390 # files
390 # files
391 compare_page.contains_file_links_and_anchors([
391 compare_page.contains_file_links_and_anchors([
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
395 ])
395 ])
396
396
397 @pytest.mark.xfail_backends("svn")
397 @pytest.mark.xfail_backends("svn")
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 repo1 = backend.create_repo()
399 repo1 = backend.create_repo()
400 r1_name = repo1.repo_name
400 r1_name = repo1.repo_name
401
401
402 commit0 = commit_change(
402 commit0 = commit_change(
403 repo=r1_name, filename='file1',
403 repo=r1_name, filename='file1',
404 content='line1', message='commit1', vcs_type=backend.alias,
404 content='line1', message='commit1', vcs_type=backend.alias,
405 newfile=True)
405 newfile=True)
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407
407
408 # fork the repo1
408 # fork the repo1
409 repo2 = backend.create_fork()
409 repo2 = backend.create_fork()
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411
411
412 self.r2_id = repo2.repo_id
412 self.r2_id = repo2.repo_id
413 r2_name = repo2.repo_name
413 r2_name = repo2.repo_name
414
414
415 commit1 = commit_change(
415 commit1 = commit_change(
416 repo=r2_name, filename='file1-fork',
416 repo=r2_name, filename='file1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 newfile=True)
419 newfile=True)
420
420
421 commit2 = commit_change(
421 commit2 = commit_change(
422 repo=r2_name, filename='file2-fork',
422 repo=r2_name, filename='file2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
424 vcs_type=backend.alias, parent=commit1,
424 vcs_type=backend.alias, parent=commit1,
425 newfile=True)
425 newfile=True)
426
426
427 commit_change( # commit 3
427 commit_change( # commit 3
428 repo=r2_name, filename='file3-fork',
428 repo=r2_name, filename='file3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
430 vcs_type=backend.alias, parent=commit2, newfile=True)
430 vcs_type=backend.alias, parent=commit2, newfile=True)
431
431
432 # compare !
432 # compare !
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435
435
436 response = self.app.get(
436 response = self.app.get(
437 route_path('repo_compare',
437 route_path('repo_compare',
438 repo_name=r2_name,
438 repo_name=r2_name,
439 source_ref_type="branch", source_ref=commit_id1,
439 source_ref_type="branch", source_ref=commit_id1,
440 target_ref_type="branch", target_ref=commit_id2,
440 target_ref_type="branch", target_ref=commit_id2,
441 params=dict(merge='1', target_repo=r1_name),
441 params=dict(merge='1', target_repo=r1_name),
442 ))
442 ))
443
443
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
446 response.mustcontain('No files')
446 response.mustcontain('No files')
447 response.mustcontain('No commits in this compare')
447 response.mustcontain('No commits in this compare')
448
448
449 commit0 = commit_change(
449 commit0 = commit_change(
450 repo=r1_name, filename='file2',
450 repo=r1_name, filename='file2',
451 content='line1-added-after-fork', message='commit2-parent',
451 content='line1-added-after-fork', message='commit2-parent',
452 vcs_type=backend.alias, parent=None, newfile=True)
452 vcs_type=backend.alias, parent=None, newfile=True)
453
453
454 # compare !
454 # compare !
455 response = self.app.get(
455 response = self.app.get(
456 route_path('repo_compare',
456 route_path('repo_compare',
457 repo_name=r2_name,
457 repo_name=r2_name,
458 source_ref_type="branch", source_ref=commit_id1,
458 source_ref_type="branch", source_ref=commit_id1,
459 target_ref_type="branch", target_ref=commit_id2,
459 target_ref_type="branch", target_ref=commit_id2,
460 params=dict(merge='1', target_repo=r1_name),
460 params=dict(merge='1', target_repo=r1_name),
461 ))
461 ))
462
462
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
465
465
466 response.mustcontain("""commit2-parent""")
466 response.mustcontain("""commit2-parent""")
467 response.mustcontain("""line1-added-after-fork""")
467 response.mustcontain("""line1-added-after-fork""")
468 compare_page = ComparePage(response)
468 compare_page = ComparePage(response)
469 compare_page.contains_change_summary(1, 1, 0)
469 compare_page.contains_change_summary(1, 1, 0)
470
470
471 @pytest.mark.xfail_backends("svn")
471 @pytest.mark.xfail_backends("svn")
472 def test_compare_commits(self, backend, xhr_header):
472 def test_compare_commits(self, backend, xhr_header):
473 commit0 = backend.repo.get_commit(commit_idx=0)
473 commit0 = backend.repo.get_commit(commit_idx=0)
474 commit1 = backend.repo.get_commit(commit_idx=1)
474 commit1 = backend.repo.get_commit(commit_idx=1)
475
475
476 response = self.app.get(
476 response = self.app.get(
477 route_path('repo_compare',
477 route_path('repo_compare',
478 repo_name=backend.repo_name,
478 repo_name=backend.repo_name,
479 source_ref_type="rev", source_ref=commit0.raw_id,
479 source_ref_type="rev", source_ref=commit0.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
481 params=dict(merge='1')
481 params=dict(merge='1')
482 ),
482 ),
483 extra_environ=xhr_header, )
483 extra_environ=xhr_header, )
484
484
485 # outgoing commits between those commits
485 # outgoing commits between those commits
486 compare_page = ComparePage(response)
486 compare_page = ComparePage(response)
487 compare_page.contains_commits(commits=[commit1])
487 compare_page.contains_commits(commits=[commit1])
488
488
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
490 repo = backend.repo
490 repo = backend.repo
491 badrepo = 'badrepo'
491 badrepo = 'badrepo'
492
492
493 response = self.app.get(
493 response = self.app.get(
494 route_path('repo_compare',
494 route_path('repo_compare',
495 repo_name=badrepo,
495 repo_name=badrepo,
496 source_ref_type="rev", source_ref='tip',
496 source_ref_type="rev", source_ref='tip',
497 target_ref_type="rev", target_ref='tip',
497 target_ref_type="rev", target_ref='tip',
498 params=dict(merge='1', target_repo=repo.repo_name)
498 params=dict(merge='1', target_repo=repo.repo_name)
499 ),
499 ),
500 status=404)
500 status=404)
501
501
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
503 repo = backend.repo
503 repo = backend.repo
504 badrepo = 'badrepo'
504 badrepo = 'badrepo'
505
505
506 response = self.app.get(
506 response = self.app.get(
507 route_path('repo_compare',
507 route_path('repo_compare',
508 repo_name=repo.repo_name,
508 repo_name=repo.repo_name,
509 source_ref_type="rev", source_ref='tip',
509 source_ref_type="rev", source_ref='tip',
510 target_ref_type="rev", target_ref='tip',
510 target_ref_type="rev", target_ref='tip',
511 params=dict(merge='1', target_repo=badrepo),
511 params=dict(merge='1', target_repo=badrepo),
512 ),
512 ),
513 status=302)
513 status=302)
514 redirected = response.follow()
514 redirected = response.follow()
515 redirected.mustcontain(
515 redirected.mustcontain(
516 'Could not find the target repo: `{}`'.format(badrepo))
516 'Could not find the target repo: `{}`'.format(badrepo))
517
517
518 def test_compare_not_in_preview_mode(self, backend_stub):
518 def test_compare_not_in_preview_mode(self, backend_stub):
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
521
521
522 response = self.app.get(
522 response = self.app.get(
523 route_path('repo_compare',
523 route_path('repo_compare',
524 repo_name=backend_stub.repo_name,
524 repo_name=backend_stub.repo_name,
525 source_ref_type="rev", source_ref=commit0.raw_id,
525 source_ref_type="rev", source_ref=commit0.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
527 ))
527 ))
528
528
529 # outgoing commits between those commits
529 # outgoing commits between those commits
530 compare_page = ComparePage(response)
530 compare_page = ComparePage(response)
531 compare_page.swap_is_visible()
531 compare_page.swap_is_visible()
532 compare_page.target_source_are_enabled()
532 compare_page.target_source_are_enabled()
533
533
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
535 orig = backend_hg.create_repo(number_of_commits=1)
535 orig = backend_hg.create_repo(number_of_commits=1)
536 fork = backend_hg.create_fork()
536 fork = backend_hg.create_fork()
537
537
538 settings_util.create_repo_rhodecode_ui(
538 settings_util.create_repo_rhodecode_ui(
539 orig, 'extensions', value='', key='largefiles', active=False)
539 orig, 'extensions', value='', key='largefiles', active=False)
540 settings_util.create_repo_rhodecode_ui(
540 settings_util.create_repo_rhodecode_ui(
541 fork, 'extensions', value='', key='largefiles', active=True)
541 fork, 'extensions', value='', key='largefiles', active=True)
542
542
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
544 'MercurialRepository.compare')
544 'MercurialRepository.compare')
545 with mock.patch(compare_module) as compare_mock:
545 with mock.patch(compare_module) as compare_mock:
546 compare_mock.side_effect = RepositoryRequirementError()
546 compare_mock.side_effect = RepositoryRequirementError()
547
547
548 response = self.app.get(
548 response = self.app.get(
549 route_path('repo_compare',
549 route_path('repo_compare',
550 repo_name=orig.repo_name,
550 repo_name=orig.repo_name,
551 source_ref_type="rev", source_ref="tip",
551 source_ref_type="rev", source_ref="tip",
552 target_ref_type="rev", target_ref="tip",
552 target_ref_type="rev", target_ref="tip",
553 params=dict(merge='1', target_repo=fork.repo_name),
553 params=dict(merge='1', target_repo=fork.repo_name),
554 ),
554 ),
555 status=302)
555 status=302)
556
556
557 assert_session_flash(
557 assert_session_flash(
558 response,
558 response,
559 'Could not compare repos with different large file settings')
559 'Could not compare repos with different large file settings')
560
560
561
561
562 @pytest.mark.usefixtures("autologin_user")
562 @pytest.mark.usefixtures("autologin_user")
563 class TestCompareControllerSvn(object):
563 class TestCompareControllerSvn(object):
564
564
565 def test_supports_references_with_path(self, app, backend_svn):
565 def test_supports_references_with_path(self, app, backend_svn):
566 repo = backend_svn['svn-simple-layout']
566 repo = backend_svn['svn-simple-layout']
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
568 response = app.get(
568 response = app.get(
569 route_path('repo_compare',
569 route_path('repo_compare',
570 repo_name=repo.repo_name,
570 repo_name=repo.repo_name,
571 source_ref_type="tag",
571 source_ref_type="tag",
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
573 target_ref_type="tag",
573 target_ref_type="tag",
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
575 params=dict(merge='1'),
575 params=dict(merge='1'),
576 ),
576 ),
577 status=200)
577 status=200)
578
578
579 # Expecting no commits, since both paths are at the same revision
579 # Expecting no commits, since both paths are at the same revision
580 response.mustcontain('No commits in this compare')
580 response.mustcontain('No commits in this compare')
581
581
582 # Should find only one file changed when comparing those two tags
582 # Should find only one file changed when comparing those two tags
583 response.mustcontain('example.py')
583 response.mustcontain('example.py')
584 compare_page = ComparePage(response)
584 compare_page = ComparePage(response)
585 compare_page.contains_change_summary(1, 5, 1)
585 compare_page.contains_change_summary(1, 5, 1)
586
586
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
588 repo = backend_svn['svn-simple-layout']
588 repo = backend_svn['svn-simple-layout']
589 source_id = repo.get_commit(commit_idx=-6).raw_id
589 source_id = repo.get_commit(commit_idx=-6).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
591 response = app.get(
591 response = app.get(
592 route_path('repo_compare',
592 route_path('repo_compare',
593 repo_name=repo.repo_name,
593 repo_name=repo.repo_name,
594 source_ref_type="tag",
594 source_ref_type="tag",
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
596 target_ref_type="tag",
596 target_ref_type="tag",
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
598 params=dict(merge='1')
598 params=dict(merge='1')
599 ),
599 ),
600 status=200)
600 status=200)
601
601
602 # It should show commits
602 # It should show commits
603 assert 'No commits in this compare' not in response.body
603 assert 'No commits in this compare' not in response.body
604
604
605 # Should find only one file changed when comparing those two tags
605 # Should find only one file changed when comparing those two tags
606 response.mustcontain('example.py')
606 response.mustcontain('example.py')
607 compare_page = ComparePage(response)
607 compare_page = ComparePage(response)
608 compare_page.contains_change_summary(1, 5, 1)
608 compare_page.contains_change_summary(1, 5, 1)
609
609
610
610
611 class ComparePage(AssertResponse):
611 class ComparePage(AssertResponse):
612 """
612 """
613 Abstracts the page template from the tests
613 Abstracts the page template from the tests
614 """
614 """
615
615
616 def contains_file_links_and_anchors(self, files):
616 def contains_file_links_and_anchors(self, files):
617 doc = lxml.html.fromstring(self.response.body)
617 doc = lxml.html.fromstring(self.response.body)
618 for filename, file_id in files:
618 for filename, file_id in files:
619 self.contains_one_anchor(file_id)
619 self.contains_one_anchor(file_id)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
621 assert len(diffblock) == 2
621 assert len(diffblock) == 2
622 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
622 for lnk in diffblock[0].cssselect('a'):
623 if 'permalink' in lnk.text:
624 assert '#{}'.format(file_id) in lnk.attrib['href']
625 break
626 else:
627 pytest.fail('Unable to find permalink')
623
628
624 def contains_change_summary(self, files_changed, inserted, deleted):
629 def contains_change_summary(self, files_changed, inserted, deleted):
625 template = (
630 template = (
626 '{files_changed} file{plural} changed: '
631 '{files_changed} file{plural} changed: '
627 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
632 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
628 self.response.mustcontain(template.format(
633 self.response.mustcontain(template.format(
629 files_changed=files_changed,
634 files_changed=files_changed,
630 plural="s" if files_changed > 1 else "",
635 plural="s" if files_changed > 1 else "",
631 inserted=inserted,
636 inserted=inserted,
632 deleted=deleted))
637 deleted=deleted))
633
638
634 def contains_commits(self, commits, ancestors=None):
639 def contains_commits(self, commits, ancestors=None):
635 response = self.response
640 response = self.response
636
641
637 for commit in commits:
642 for commit in commits:
638 # Expecting to see the commit message in an element which
643 # Expecting to see the commit message in an element which
639 # has the ID "c-{commit.raw_id}"
644 # has the ID "c-{commit.raw_id}"
640 self.element_contains('#c-' + commit.raw_id, commit.message)
645 self.element_contains('#c-' + commit.raw_id, commit.message)
641 self.contains_one_link(
646 self.contains_one_link(
642 'r%s:%s' % (commit.idx, commit.short_id),
647 'r%s:%s' % (commit.idx, commit.short_id),
643 self._commit_url(commit))
648 self._commit_url(commit))
644
649
645 if ancestors:
650 if ancestors:
646 response.mustcontain('Ancestor')
651 response.mustcontain('Ancestor')
647 for ancestor in ancestors:
652 for ancestor in ancestors:
648 self.contains_one_link(
653 self.contains_one_link(
649 ancestor.short_id, self._commit_url(ancestor))
654 ancestor.short_id, self._commit_url(ancestor))
650
655
651 def _commit_url(self, commit):
656 def _commit_url(self, commit):
652 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
657 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
653
658
654 def swap_is_hidden(self):
659 def swap_is_hidden(self):
655 assert '<a id="btn-swap"' not in self.response.text
660 assert '<a id="btn-swap"' not in self.response.text
656
661
657 def swap_is_visible(self):
662 def swap_is_visible(self):
658 assert '<a id="btn-swap"' in self.response.text
663 assert '<a id="btn-swap"' in self.response.text
659
664
660 def target_source_are_disabled(self):
665 def target_source_are_disabled(self):
661 response = self.response
666 response = self.response
662 response.mustcontain("var enable_fields = false;")
667 response.mustcontain("var enable_fields = false;")
663 response.mustcontain('.select2("enable", enable_fields)')
668 response.mustcontain('.select2("enable", enable_fields)')
664
669
665 def target_source_are_enabled(self):
670 def target_source_are_enabled(self):
666 response = self.response
671 response = self.response
667 response.mustcontain("var enable_fields = true;")
672 response.mustcontain("var enable_fields = true;")
@@ -1,1652 +1,1658 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.comment import CommentsModel
34 from rhodecode.tests import (
34 from rhodecode.tests import (
35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'repo_commits': '/{repo_name}/commits',
44 'repo_commits': '/{repo_name}/commits',
45 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
46 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
47 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all': '/{repo_name}/pull-request',
48 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
49 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
50 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
51 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_new': '/{repo_name}/pull-request/new',
52 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_create': '/{repo_name}/pull-request/create',
53 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
54 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
55 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
56 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
57 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
58 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
58 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
59 }[name].format(**kwargs)
59 }[name].format(**kwargs)
60
60
61 if params:
61 if params:
62 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
62 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
63 return base_url
63 return base_url
64
64
65
65
66 @pytest.mark.usefixtures('app', 'autologin_user')
66 @pytest.mark.usefixtures('app', 'autologin_user')
67 @pytest.mark.backends("git", "hg")
67 @pytest.mark.backends("git", "hg")
68 class TestPullrequestsView(object):
68 class TestPullrequestsView(object):
69
69
70 def test_index(self, backend):
70 def test_index(self, backend):
71 self.app.get(route_path(
71 self.app.get(route_path(
72 'pullrequest_new',
72 'pullrequest_new',
73 repo_name=backend.repo_name))
73 repo_name=backend.repo_name))
74
74
75 def test_option_menu_create_pull_request_exists(self, backend):
75 def test_option_menu_create_pull_request_exists(self, backend):
76 repo_name = backend.repo_name
76 repo_name = backend.repo_name
77 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
77 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
78
78
79 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
79 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
80 'pullrequest_new', repo_name=repo_name)
80 'pullrequest_new', repo_name=repo_name)
81 response.mustcontain(create_pr_link)
81 response.mustcontain(create_pr_link)
82
82
83 def test_create_pr_form_with_raw_commit_id(self, backend):
83 def test_create_pr_form_with_raw_commit_id(self, backend):
84 repo = backend.repo
84 repo = backend.repo
85
85
86 self.app.get(
86 self.app.get(
87 route_path('pullrequest_new', repo_name=repo.repo_name,
87 route_path('pullrequest_new', repo_name=repo.repo_name,
88 commit=repo.get_commit().raw_id),
88 commit=repo.get_commit().raw_id),
89 status=200)
89 status=200)
90
90
91 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
91 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
92 @pytest.mark.parametrize('range_diff', ["0", "1"])
92 @pytest.mark.parametrize('range_diff', ["0", "1"])
93 def test_show(self, pr_util, pr_merge_enabled, range_diff):
93 def test_show(self, pr_util, pr_merge_enabled, range_diff):
94 pull_request = pr_util.create_pull_request(
94 pull_request = pr_util.create_pull_request(
95 mergeable=pr_merge_enabled, enable_notifications=False)
95 mergeable=pr_merge_enabled, enable_notifications=False)
96
96
97 response = self.app.get(route_path(
97 response = self.app.get(route_path(
98 'pullrequest_show',
98 'pullrequest_show',
99 repo_name=pull_request.target_repo.scm_instance().name,
99 repo_name=pull_request.target_repo.scm_instance().name,
100 pull_request_id=pull_request.pull_request_id,
100 pull_request_id=pull_request.pull_request_id,
101 params={'range-diff': range_diff}))
101 params={'range-diff': range_diff}))
102
102
103 for commit_id in pull_request.revisions:
103 for commit_id in pull_request.revisions:
104 response.mustcontain(commit_id)
104 response.mustcontain(commit_id)
105
105
106 response.mustcontain(pull_request.target_ref_parts.type)
106 response.mustcontain(pull_request.target_ref_parts.type)
107 response.mustcontain(pull_request.target_ref_parts.name)
107 response.mustcontain(pull_request.target_ref_parts.name)
108
108
109 response.mustcontain('class="pull-request-merge"')
109 response.mustcontain('class="pull-request-merge"')
110
110
111 if pr_merge_enabled:
111 if pr_merge_enabled:
112 response.mustcontain('Pull request reviewer approval is pending')
112 response.mustcontain('Pull request reviewer approval is pending')
113 else:
113 else:
114 response.mustcontain('Server-side pull request merging is disabled.')
114 response.mustcontain('Server-side pull request merging is disabled.')
115
115
116 if range_diff == "1":
116 if range_diff == "1":
117 response.mustcontain('Turn off: Show the diff as commit range')
117 response.mustcontain('Turn off: Show the diff as commit range')
118
118
119 def test_show_versions_of_pr(self, backend, csrf_token):
119 def test_show_versions_of_pr(self, backend, csrf_token):
120 commits = [
120 commits = [
121 {'message': 'initial-commit',
121 {'message': 'initial-commit',
122 'added': [FileNode('test-file.txt', 'LINE1\n')]},
122 'added': [FileNode('test-file.txt', 'LINE1\n')]},
123
123
124 {'message': 'commit-1',
124 {'message': 'commit-1',
125 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
125 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
126 # Above is the initial version of PR that changes a single line
126 # Above is the initial version of PR that changes a single line
127
127
128 # from now on we'll add 3x commit adding a nother line on each step
128 # from now on we'll add 3x commit adding a nother line on each step
129 {'message': 'commit-2',
129 {'message': 'commit-2',
130 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
130 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
131
131
132 {'message': 'commit-3',
132 {'message': 'commit-3',
133 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
133 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
134
134
135 {'message': 'commit-4',
135 {'message': 'commit-4',
136 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
136 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
137 ]
137 ]
138
138
139 commit_ids = backend.create_master_repo(commits)
139 commit_ids = backend.create_master_repo(commits)
140 target = backend.create_repo(heads=['initial-commit'])
140 target = backend.create_repo(heads=['initial-commit'])
141 source = backend.create_repo(heads=['commit-1'])
141 source = backend.create_repo(heads=['commit-1'])
142 source_repo_name = source.repo_name
142 source_repo_name = source.repo_name
143 target_repo_name = target.repo_name
143 target_repo_name = target.repo_name
144
144
145 target_ref = 'branch:{branch}:{commit_id}'.format(
145 target_ref = 'branch:{branch}:{commit_id}'.format(
146 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
146 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
147 source_ref = 'branch:{branch}:{commit_id}'.format(
147 source_ref = 'branch:{branch}:{commit_id}'.format(
148 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
148 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
149
149
150 response = self.app.post(
150 response = self.app.post(
151 route_path('pullrequest_create', repo_name=source.repo_name),
151 route_path('pullrequest_create', repo_name=source.repo_name),
152 [
152 [
153 ('source_repo', source.repo_name),
153 ('source_repo', source_repo_name),
154 ('source_ref', source_ref),
154 ('source_ref', source_ref),
155 ('target_repo', target.repo_name),
155 ('target_repo', target_repo_name),
156 ('target_ref', target_ref),
156 ('target_ref', target_ref),
157 ('common_ancestor', commit_ids['initial-commit']),
157 ('common_ancestor', commit_ids['initial-commit']),
158 ('pullrequest_title', 'Title'),
158 ('pullrequest_title', 'Title'),
159 ('pullrequest_desc', 'Description'),
159 ('pullrequest_desc', 'Description'),
160 ('description_renderer', 'markdown'),
160 ('description_renderer', 'markdown'),
161 ('__start__', 'review_members:sequence'),
161 ('__start__', 'review_members:sequence'),
162 ('__start__', 'reviewer:mapping'),
162 ('__start__', 'reviewer:mapping'),
163 ('user_id', '1'),
163 ('user_id', '1'),
164 ('__start__', 'reasons:sequence'),
164 ('__start__', 'reasons:sequence'),
165 ('reason', 'Some reason'),
165 ('reason', 'Some reason'),
166 ('__end__', 'reasons:sequence'),
166 ('__end__', 'reasons:sequence'),
167 ('__start__', 'rules:sequence'),
167 ('__start__', 'rules:sequence'),
168 ('__end__', 'rules:sequence'),
168 ('__end__', 'rules:sequence'),
169 ('mandatory', 'False'),
169 ('mandatory', 'False'),
170 ('__end__', 'reviewer:mapping'),
170 ('__end__', 'reviewer:mapping'),
171 ('__end__', 'review_members:sequence'),
171 ('__end__', 'review_members:sequence'),
172 ('__start__', 'revisions:sequence'),
172 ('__start__', 'revisions:sequence'),
173 ('revisions', commit_ids['commit-1']),
173 ('revisions', commit_ids['commit-1']),
174 ('__end__', 'revisions:sequence'),
174 ('__end__', 'revisions:sequence'),
175 ('user', ''),
175 ('user', ''),
176 ('csrf_token', csrf_token),
176 ('csrf_token', csrf_token),
177 ],
177 ],
178 status=302)
178 status=302)
179
179
180 location = response.headers['Location']
180 location = response.headers['Location']
181
181
182 pull_request_id = location.rsplit('/', 1)[1]
182 pull_request_id = location.rsplit('/', 1)[1]
183 assert pull_request_id != 'new'
183 assert pull_request_id != 'new'
184 pull_request = PullRequest.get(int(pull_request_id))
184 pull_request = PullRequest.get(int(pull_request_id))
185
185
186 pull_request_id = pull_request.pull_request_id
186 pull_request_id = pull_request.pull_request_id
187
187
188 # Show initial version of PR
188 # Show initial version of PR
189 response = self.app.get(
189 response = self.app.get(
190 route_path('pullrequest_show',
190 route_path('pullrequest_show',
191 repo_name=target_repo_name,
191 repo_name=target_repo_name,
192 pull_request_id=pull_request_id))
192 pull_request_id=pull_request_id))
193
193
194 response.mustcontain('commit-1')
194 response.mustcontain('commit-1')
195 response.mustcontain(no=['commit-2'])
195 response.mustcontain(no=['commit-2'])
196 response.mustcontain(no=['commit-3'])
196 response.mustcontain(no=['commit-3'])
197 response.mustcontain(no=['commit-4'])
197 response.mustcontain(no=['commit-4'])
198
198
199 response.mustcontain('cb-addition"></span><span>LINE2</span>')
199 response.mustcontain('cb-addition"></span><span>LINE2</span>')
200 response.mustcontain(no=['LINE3'])
200 response.mustcontain(no=['LINE3'])
201 response.mustcontain(no=['LINE4'])
201 response.mustcontain(no=['LINE4'])
202 response.mustcontain(no=['LINE5'])
202 response.mustcontain(no=['LINE5'])
203
203
204 # update PR #1
204 # update PR #1
205 source_repo = Repository.get_by_repo_name(source_repo_name)
205 source_repo = Repository.get_by_repo_name(source_repo_name)
206 backend.pull_heads(source_repo, heads=['commit-2'])
206 backend.pull_heads(source_repo, heads=['commit-2'])
207 response = self.app.post(
207 response = self.app.post(
208 route_path('pullrequest_update',
208 route_path('pullrequest_update',
209 repo_name=target_repo_name, pull_request_id=pull_request_id),
209 repo_name=target_repo_name, pull_request_id=pull_request_id),
210 params={'update_commits': 'true', 'csrf_token': csrf_token})
210 params={'update_commits': 'true', 'csrf_token': csrf_token})
211
211
212 # update PR #2
212 # update PR #2
213 source_repo = Repository.get_by_repo_name(source_repo_name)
213 source_repo = Repository.get_by_repo_name(source_repo_name)
214 backend.pull_heads(source_repo, heads=['commit-3'])
214 backend.pull_heads(source_repo, heads=['commit-3'])
215 response = self.app.post(
215 response = self.app.post(
216 route_path('pullrequest_update',
216 route_path('pullrequest_update',
217 repo_name=target_repo_name, pull_request_id=pull_request_id),
217 repo_name=target_repo_name, pull_request_id=pull_request_id),
218 params={'update_commits': 'true', 'csrf_token': csrf_token})
218 params={'update_commits': 'true', 'csrf_token': csrf_token})
219
219
220 # update PR #3
220 # update PR #3
221 source_repo = Repository.get_by_repo_name(source_repo_name)
221 source_repo = Repository.get_by_repo_name(source_repo_name)
222 backend.pull_heads(source_repo, heads=['commit-4'])
222 backend.pull_heads(source_repo, heads=['commit-4'])
223 response = self.app.post(
223 response = self.app.post(
224 route_path('pullrequest_update',
224 route_path('pullrequest_update',
225 repo_name=target_repo_name, pull_request_id=pull_request_id),
225 repo_name=target_repo_name, pull_request_id=pull_request_id),
226 params={'update_commits': 'true', 'csrf_token': csrf_token})
226 params={'update_commits': 'true', 'csrf_token': csrf_token})
227
227
228 # Show final version !
228 # Show final version !
229 response = self.app.get(
229 response = self.app.get(
230 route_path('pullrequest_show',
230 route_path('pullrequest_show',
231 repo_name=target_repo_name,
231 repo_name=target_repo_name,
232 pull_request_id=pull_request_id))
232 pull_request_id=pull_request_id))
233
233
234 # 3 updates, and the latest == 4
234 # 3 updates, and the latest == 4
235 response.mustcontain('4 versions available for this pull request')
235 response.mustcontain('4 versions available for this pull request')
236 response.mustcontain(no=['rhodecode diff rendering error'])
236 response.mustcontain(no=['rhodecode diff rendering error'])
237
237
238 # initial show must have 3 commits, and 3 adds
238 # initial show must have 3 commits, and 3 adds
239 response.mustcontain('commit-1')
239 response.mustcontain('commit-1')
240 response.mustcontain('commit-2')
240 response.mustcontain('commit-2')
241 response.mustcontain('commit-3')
241 response.mustcontain('commit-3')
242 response.mustcontain('commit-4')
242 response.mustcontain('commit-4')
243
243
244 response.mustcontain('cb-addition"></span><span>LINE2</span>')
244 response.mustcontain('cb-addition"></span><span>LINE2</span>')
245 response.mustcontain('cb-addition"></span><span>LINE3</span>')
245 response.mustcontain('cb-addition"></span><span>LINE3</span>')
246 response.mustcontain('cb-addition"></span><span>LINE4</span>')
246 response.mustcontain('cb-addition"></span><span>LINE4</span>')
247 response.mustcontain('cb-addition"></span><span>LINE5</span>')
247 response.mustcontain('cb-addition"></span><span>LINE5</span>')
248
248
249 # fetch versions
249 # fetch versions
250 pr = PullRequest.get(pull_request_id)
250 pr = PullRequest.get(pull_request_id)
251 versions = [x.pull_request_version_id for x in pr.versions.all()]
251 versions = [x.pull_request_version_id for x in pr.versions.all()]
252 assert len(versions) == 3
252 assert len(versions) == 3
253
253
254 # show v1,v2,v3,v4
254 # show v1,v2,v3,v4
255 def cb_line(text):
255 def cb_line(text):
256 return 'cb-addition"></span><span>{}</span>'.format(text)
256 return 'cb-addition"></span><span>{}</span>'.format(text)
257
257
258 def cb_context(text):
258 def cb_context(text):
259 return '<span class="cb-code"><span class="cb-action cb-context">' \
259 return '<span class="cb-code"><span class="cb-action cb-context">' \
260 '</span><span>{}</span></span>'.format(text)
260 '</span><span>{}</span></span>'.format(text)
261
261
262 commit_tests = {
262 commit_tests = {
263 # in response, not in response
263 # in response, not in response
264 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
264 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
265 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
265 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
266 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
266 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
267 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
267 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
268 }
268 }
269 diff_tests = {
269 diff_tests = {
270 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
270 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
271 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
271 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
272 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
272 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
273 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
273 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
274 }
274 }
275 for idx, ver in enumerate(versions, 1):
275 for idx, ver in enumerate(versions, 1):
276
276
277 response = self.app.get(
277 response = self.app.get(
278 route_path('pullrequest_show',
278 route_path('pullrequest_show',
279 repo_name=target_repo_name,
279 repo_name=target_repo_name,
280 pull_request_id=pull_request_id,
280 pull_request_id=pull_request_id,
281 params={'version': ver}))
281 params={'version': ver}))
282
282
283 response.mustcontain(no=['rhodecode diff rendering error'])
283 response.mustcontain(no=['rhodecode diff rendering error'])
284 response.mustcontain('Showing changes at v{}'.format(idx))
284 response.mustcontain('Showing changes at v{}'.format(idx))
285
285
286 yes, no = commit_tests[idx]
286 yes, no = commit_tests[idx]
287 for y in yes:
287 for y in yes:
288 response.mustcontain(y)
288 response.mustcontain(y)
289 for n in no:
289 for n in no:
290 response.mustcontain(no=n)
290 response.mustcontain(no=n)
291
291
292 yes, no = diff_tests[idx]
292 yes, no = diff_tests[idx]
293 for y in yes:
293 for y in yes:
294 response.mustcontain(cb_line(y))
294 response.mustcontain(cb_line(y))
295 for n in no:
295 for n in no:
296 response.mustcontain(no=n)
296 response.mustcontain(no=n)
297
297
298 # show diff between versions
298 # show diff between versions
299 diff_compare_tests = {
299 diff_compare_tests = {
300 1: (['LINE3'], ['LINE1', 'LINE2']),
300 1: (['LINE3'], ['LINE1', 'LINE2']),
301 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
301 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
302 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
302 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
303 }
303 }
304 for idx, ver in enumerate(versions, 1):
304 for idx, ver in enumerate(versions, 1):
305 adds, context = diff_compare_tests[idx]
305 adds, context = diff_compare_tests[idx]
306
306
307 to_ver = ver+1
307 to_ver = ver+1
308 if idx == 3:
308 if idx == 3:
309 to_ver = 'latest'
309 to_ver = 'latest'
310
310
311 response = self.app.get(
311 response = self.app.get(
312 route_path('pullrequest_show',
312 route_path('pullrequest_show',
313 repo_name=target_repo_name,
313 repo_name=target_repo_name,
314 pull_request_id=pull_request_id,
314 pull_request_id=pull_request_id,
315 params={'from_version': versions[0], 'version': to_ver}))
315 params={'from_version': versions[0], 'version': to_ver}))
316
316
317 response.mustcontain(no=['rhodecode diff rendering error'])
317 response.mustcontain(no=['rhodecode diff rendering error'])
318
318
319 for a in adds:
319 for a in adds:
320 response.mustcontain(cb_line(a))
320 response.mustcontain(cb_line(a))
321 for c in context:
321 for c in context:
322 response.mustcontain(cb_context(c))
322 response.mustcontain(cb_context(c))
323
323
324 # test version v2 -> v3
324 # test version v2 -> v3
325 response = self.app.get(
325 response = self.app.get(
326 route_path('pullrequest_show',
326 route_path('pullrequest_show',
327 repo_name=target_repo_name,
327 repo_name=target_repo_name,
328 pull_request_id=pull_request_id,
328 pull_request_id=pull_request_id,
329 params={'from_version': versions[1], 'version': versions[2]}))
329 params={'from_version': versions[1], 'version': versions[2]}))
330
330
331 response.mustcontain(cb_context('LINE1'))
331 response.mustcontain(cb_context('LINE1'))
332 response.mustcontain(cb_context('LINE2'))
332 response.mustcontain(cb_context('LINE2'))
333 response.mustcontain(cb_context('LINE3'))
333 response.mustcontain(cb_context('LINE3'))
334 response.mustcontain(cb_line('LINE4'))
334 response.mustcontain(cb_line('LINE4'))
335
335
336 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
336 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
337 # Logout
337 # Logout
338 response = self.app.post(
338 response = self.app.post(
339 h.route_path('logout'),
339 h.route_path('logout'),
340 params={'csrf_token': csrf_token})
340 params={'csrf_token': csrf_token})
341 # Login as regular user
341 # Login as regular user
342 response = self.app.post(h.route_path('login'),
342 response = self.app.post(h.route_path('login'),
343 {'username': TEST_USER_REGULAR_LOGIN,
343 {'username': TEST_USER_REGULAR_LOGIN,
344 'password': 'test12'})
344 'password': 'test12'})
345
345
346 pull_request = pr_util.create_pull_request(
346 pull_request = pr_util.create_pull_request(
347 author=TEST_USER_REGULAR_LOGIN)
347 author=TEST_USER_REGULAR_LOGIN)
348
348
349 response = self.app.get(route_path(
349 response = self.app.get(route_path(
350 'pullrequest_show',
350 'pullrequest_show',
351 repo_name=pull_request.target_repo.scm_instance().name,
351 repo_name=pull_request.target_repo.scm_instance().name,
352 pull_request_id=pull_request.pull_request_id))
352 pull_request_id=pull_request.pull_request_id))
353
353
354 response.mustcontain('Server-side pull request merging is disabled.')
354 response.mustcontain('Server-side pull request merging is disabled.')
355
355
356 assert_response = response.assert_response()
356 assert_response = response.assert_response()
357 # for regular user without a merge permissions, we don't see it
357 # for regular user without a merge permissions, we don't see it
358 assert_response.no_element_exists('#close-pull-request-action')
358 assert_response.no_element_exists('#close-pull-request-action')
359
359
360 user_util.grant_user_permission_to_repo(
360 user_util.grant_user_permission_to_repo(
361 pull_request.target_repo,
361 pull_request.target_repo,
362 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
362 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
363 'repository.write')
363 'repository.write')
364 response = self.app.get(route_path(
364 response = self.app.get(route_path(
365 'pullrequest_show',
365 'pullrequest_show',
366 repo_name=pull_request.target_repo.scm_instance().name,
366 repo_name=pull_request.target_repo.scm_instance().name,
367 pull_request_id=pull_request.pull_request_id))
367 pull_request_id=pull_request.pull_request_id))
368
368
369 response.mustcontain('Server-side pull request merging is disabled.')
369 response.mustcontain('Server-side pull request merging is disabled.')
370
370
371 assert_response = response.assert_response()
371 assert_response = response.assert_response()
372 # now regular user has a merge permissions, we have CLOSE button
372 # now regular user has a merge permissions, we have CLOSE button
373 assert_response.one_element_exists('#close-pull-request-action')
373 assert_response.one_element_exists('#close-pull-request-action')
374
374
375 def test_show_invalid_commit_id(self, pr_util):
375 def test_show_invalid_commit_id(self, pr_util):
376 # Simulating invalid revisions which will cause a lookup error
376 # Simulating invalid revisions which will cause a lookup error
377 pull_request = pr_util.create_pull_request()
377 pull_request = pr_util.create_pull_request()
378 pull_request.revisions = ['invalid']
378 pull_request.revisions = ['invalid']
379 Session().add(pull_request)
379 Session().add(pull_request)
380 Session().commit()
380 Session().commit()
381
381
382 response = self.app.get(route_path(
382 response = self.app.get(route_path(
383 'pullrequest_show',
383 'pullrequest_show',
384 repo_name=pull_request.target_repo.scm_instance().name,
384 repo_name=pull_request.target_repo.scm_instance().name,
385 pull_request_id=pull_request.pull_request_id))
385 pull_request_id=pull_request.pull_request_id))
386
386
387 for commit_id in pull_request.revisions:
387 for commit_id in pull_request.revisions:
388 response.mustcontain(commit_id)
388 response.mustcontain(commit_id)
389
389
390 def test_show_invalid_source_reference(self, pr_util):
390 def test_show_invalid_source_reference(self, pr_util):
391 pull_request = pr_util.create_pull_request()
391 pull_request = pr_util.create_pull_request()
392 pull_request.source_ref = 'branch:b:invalid'
392 pull_request.source_ref = 'branch:b:invalid'
393 Session().add(pull_request)
393 Session().add(pull_request)
394 Session().commit()
394 Session().commit()
395
395
396 self.app.get(route_path(
396 self.app.get(route_path(
397 'pullrequest_show',
397 'pullrequest_show',
398 repo_name=pull_request.target_repo.scm_instance().name,
398 repo_name=pull_request.target_repo.scm_instance().name,
399 pull_request_id=pull_request.pull_request_id))
399 pull_request_id=pull_request.pull_request_id))
400
400
401 def test_edit_title_description(self, pr_util, csrf_token):
401 def test_edit_title_description(self, pr_util, csrf_token):
402 pull_request = pr_util.create_pull_request()
402 pull_request = pr_util.create_pull_request()
403 pull_request_id = pull_request.pull_request_id
403 pull_request_id = pull_request.pull_request_id
404
404
405 response = self.app.post(
405 response = self.app.post(
406 route_path('pullrequest_update',
406 route_path('pullrequest_update',
407 repo_name=pull_request.target_repo.repo_name,
407 repo_name=pull_request.target_repo.repo_name,
408 pull_request_id=pull_request_id),
408 pull_request_id=pull_request_id),
409 params={
409 params={
410 'edit_pull_request': 'true',
410 'edit_pull_request': 'true',
411 'title': 'New title',
411 'title': 'New title',
412 'description': 'New description',
412 'description': 'New description',
413 'csrf_token': csrf_token})
413 'csrf_token': csrf_token})
414
414
415 assert_session_flash(
415 assert_session_flash(
416 response, u'Pull request title & description updated.',
416 response, u'Pull request title & description updated.',
417 category='success')
417 category='success')
418
418
419 pull_request = PullRequest.get(pull_request_id)
419 pull_request = PullRequest.get(pull_request_id)
420 assert pull_request.title == 'New title'
420 assert pull_request.title == 'New title'
421 assert pull_request.description == 'New description'
421 assert pull_request.description == 'New description'
422
422
423 def test_edit_title_description_closed(self, pr_util, csrf_token):
423 def test_edit_title_description_closed(self, pr_util, csrf_token):
424 pull_request = pr_util.create_pull_request()
424 pull_request = pr_util.create_pull_request()
425 pull_request_id = pull_request.pull_request_id
425 pull_request_id = pull_request.pull_request_id
426 repo_name = pull_request.target_repo.repo_name
426 repo_name = pull_request.target_repo.repo_name
427 pr_util.close()
427 pr_util.close()
428
428
429 response = self.app.post(
429 response = self.app.post(
430 route_path('pullrequest_update',
430 route_path('pullrequest_update',
431 repo_name=repo_name, pull_request_id=pull_request_id),
431 repo_name=repo_name, pull_request_id=pull_request_id),
432 params={
432 params={
433 'edit_pull_request': 'true',
433 'edit_pull_request': 'true',
434 'title': 'New title',
434 'title': 'New title',
435 'description': 'New description',
435 'description': 'New description',
436 'csrf_token': csrf_token}, status=200)
436 'csrf_token': csrf_token}, status=200)
437 assert_session_flash(
437 assert_session_flash(
438 response, u'Cannot update closed pull requests.',
438 response, u'Cannot update closed pull requests.',
439 category='error')
439 category='error')
440
440
441 def test_update_invalid_source_reference(self, pr_util, csrf_token):
441 def test_update_invalid_source_reference(self, pr_util, csrf_token):
442 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
442 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
443
443
444 pull_request = pr_util.create_pull_request()
444 pull_request = pr_util.create_pull_request()
445 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
445 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
446 Session().add(pull_request)
446 Session().add(pull_request)
447 Session().commit()
447 Session().commit()
448
448
449 pull_request_id = pull_request.pull_request_id
449 pull_request_id = pull_request.pull_request_id
450
450
451 response = self.app.post(
451 response = self.app.post(
452 route_path('pullrequest_update',
452 route_path('pullrequest_update',
453 repo_name=pull_request.target_repo.repo_name,
453 repo_name=pull_request.target_repo.repo_name,
454 pull_request_id=pull_request_id),
454 pull_request_id=pull_request_id),
455 params={'update_commits': 'true', 'csrf_token': csrf_token})
455 params={'update_commits': 'true', 'csrf_token': csrf_token})
456
456
457 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
457 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
458 UpdateFailureReason.MISSING_SOURCE_REF])
458 UpdateFailureReason.MISSING_SOURCE_REF])
459 assert_session_flash(response, expected_msg, category='error')
459 assert_session_flash(response, expected_msg, category='error')
460
460
461 def test_missing_target_reference(self, pr_util, csrf_token):
461 def test_missing_target_reference(self, pr_util, csrf_token):
462 from rhodecode.lib.vcs.backends.base import MergeFailureReason
462 from rhodecode.lib.vcs.backends.base import MergeFailureReason
463 pull_request = pr_util.create_pull_request(
463 pull_request = pr_util.create_pull_request(
464 approved=True, mergeable=True)
464 approved=True, mergeable=True)
465 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
465 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
466 pull_request.target_ref = unicode_reference
466 pull_request.target_ref = unicode_reference
467 Session().add(pull_request)
467 Session().add(pull_request)
468 Session().commit()
468 Session().commit()
469
469
470 pull_request_id = pull_request.pull_request_id
470 pull_request_id = pull_request.pull_request_id
471 pull_request_url = route_path(
471 pull_request_url = route_path(
472 'pullrequest_show',
472 'pullrequest_show',
473 repo_name=pull_request.target_repo.repo_name,
473 repo_name=pull_request.target_repo.repo_name,
474 pull_request_id=pull_request_id)
474 pull_request_id=pull_request_id)
475
475
476 response = self.app.get(pull_request_url)
476 response = self.app.get(pull_request_url)
477 target_ref_id = 'invalid-branch'
477 target_ref_id = 'invalid-branch'
478 merge_resp = MergeResponse(
478 merge_resp = MergeResponse(
479 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
479 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
480 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
480 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
481 response.assert_response().element_contains(
481 response.assert_response().element_contains(
482 'div[data-role="merge-message"]', merge_resp.merge_status_message)
482 'div[data-role="merge-message"]', merge_resp.merge_status_message)
483
483
484 def test_comment_and_close_pull_request_custom_message_approved(
484 def test_comment_and_close_pull_request_custom_message_approved(
485 self, pr_util, csrf_token, xhr_header):
485 self, pr_util, csrf_token, xhr_header):
486
486
487 pull_request = pr_util.create_pull_request(approved=True)
487 pull_request = pr_util.create_pull_request(approved=True)
488 pull_request_id = pull_request.pull_request_id
488 pull_request_id = pull_request.pull_request_id
489 author = pull_request.user_id
489 author = pull_request.user_id
490 repo = pull_request.target_repo.repo_id
490 repo = pull_request.target_repo.repo_id
491
491
492 self.app.post(
492 self.app.post(
493 route_path('pullrequest_comment_create',
493 route_path('pullrequest_comment_create',
494 repo_name=pull_request.target_repo.scm_instance().name,
494 repo_name=pull_request.target_repo.scm_instance().name,
495 pull_request_id=pull_request_id),
495 pull_request_id=pull_request_id),
496 params={
496 params={
497 'close_pull_request': '1',
497 'close_pull_request': '1',
498 'text': 'Closing a PR',
498 'text': 'Closing a PR',
499 'csrf_token': csrf_token},
499 'csrf_token': csrf_token},
500 extra_environ=xhr_header,)
500 extra_environ=xhr_header,)
501
501
502 journal = UserLog.query()\
502 journal = UserLog.query()\
503 .filter(UserLog.user_id == author)\
503 .filter(UserLog.user_id == author)\
504 .filter(UserLog.repository_id == repo) \
504 .filter(UserLog.repository_id == repo) \
505 .order_by(UserLog.user_log_id.asc()) \
505 .order_by(UserLog.user_log_id.asc()) \
506 .all()
506 .all()
507 assert journal[-1].action == 'repo.pull_request.close'
507 assert journal[-1].action == 'repo.pull_request.close'
508
508
509 pull_request = PullRequest.get(pull_request_id)
509 pull_request = PullRequest.get(pull_request_id)
510 assert pull_request.is_closed()
510 assert pull_request.is_closed()
511
511
512 status = ChangesetStatusModel().get_status(
512 status = ChangesetStatusModel().get_status(
513 pull_request.source_repo, pull_request=pull_request)
513 pull_request.source_repo, pull_request=pull_request)
514 assert status == ChangesetStatus.STATUS_APPROVED
514 assert status == ChangesetStatus.STATUS_APPROVED
515 comments = ChangesetComment().query() \
515 comments = ChangesetComment().query() \
516 .filter(ChangesetComment.pull_request == pull_request) \
516 .filter(ChangesetComment.pull_request == pull_request) \
517 .order_by(ChangesetComment.comment_id.asc())\
517 .order_by(ChangesetComment.comment_id.asc())\
518 .all()
518 .all()
519 assert comments[-1].text == 'Closing a PR'
519 assert comments[-1].text == 'Closing a PR'
520
520
521 def test_comment_force_close_pull_request_rejected(
521 def test_comment_force_close_pull_request_rejected(
522 self, pr_util, csrf_token, xhr_header):
522 self, pr_util, csrf_token, xhr_header):
523 pull_request = pr_util.create_pull_request()
523 pull_request = pr_util.create_pull_request()
524 pull_request_id = pull_request.pull_request_id
524 pull_request_id = pull_request.pull_request_id
525 PullRequestModel().update_reviewers(
525 PullRequestModel().update_reviewers(
526 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
526 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
527 pull_request.author)
527 pull_request.author)
528 author = pull_request.user_id
528 author = pull_request.user_id
529 repo = pull_request.target_repo.repo_id
529 repo = pull_request.target_repo.repo_id
530
530
531 self.app.post(
531 self.app.post(
532 route_path('pullrequest_comment_create',
532 route_path('pullrequest_comment_create',
533 repo_name=pull_request.target_repo.scm_instance().name,
533 repo_name=pull_request.target_repo.scm_instance().name,
534 pull_request_id=pull_request_id),
534 pull_request_id=pull_request_id),
535 params={
535 params={
536 'close_pull_request': '1',
536 'close_pull_request': '1',
537 'csrf_token': csrf_token},
537 'csrf_token': csrf_token},
538 extra_environ=xhr_header)
538 extra_environ=xhr_header)
539
539
540 pull_request = PullRequest.get(pull_request_id)
540 pull_request = PullRequest.get(pull_request_id)
541
541
542 journal = UserLog.query()\
542 journal = UserLog.query()\
543 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
543 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
544 .order_by(UserLog.user_log_id.asc()) \
544 .order_by(UserLog.user_log_id.asc()) \
545 .all()
545 .all()
546 assert journal[-1].action == 'repo.pull_request.close'
546 assert journal[-1].action == 'repo.pull_request.close'
547
547
548 # check only the latest status, not the review status
548 # check only the latest status, not the review status
549 status = ChangesetStatusModel().get_status(
549 status = ChangesetStatusModel().get_status(
550 pull_request.source_repo, pull_request=pull_request)
550 pull_request.source_repo, pull_request=pull_request)
551 assert status == ChangesetStatus.STATUS_REJECTED
551 assert status == ChangesetStatus.STATUS_REJECTED
552
552
553 def test_comment_and_close_pull_request(
553 def test_comment_and_close_pull_request(
554 self, pr_util, csrf_token, xhr_header):
554 self, pr_util, csrf_token, xhr_header):
555 pull_request = pr_util.create_pull_request()
555 pull_request = pr_util.create_pull_request()
556 pull_request_id = pull_request.pull_request_id
556 pull_request_id = pull_request.pull_request_id
557
557
558 response = self.app.post(
558 response = self.app.post(
559 route_path('pullrequest_comment_create',
559 route_path('pullrequest_comment_create',
560 repo_name=pull_request.target_repo.scm_instance().name,
560 repo_name=pull_request.target_repo.scm_instance().name,
561 pull_request_id=pull_request.pull_request_id),
561 pull_request_id=pull_request.pull_request_id),
562 params={
562 params={
563 'close_pull_request': 'true',
563 'close_pull_request': 'true',
564 'csrf_token': csrf_token},
564 'csrf_token': csrf_token},
565 extra_environ=xhr_header)
565 extra_environ=xhr_header)
566
566
567 assert response.json
567 assert response.json
568
568
569 pull_request = PullRequest.get(pull_request_id)
569 pull_request = PullRequest.get(pull_request_id)
570 assert pull_request.is_closed()
570 assert pull_request.is_closed()
571
571
572 # check only the latest status, not the review status
572 # check only the latest status, not the review status
573 status = ChangesetStatusModel().get_status(
573 status = ChangesetStatusModel().get_status(
574 pull_request.source_repo, pull_request=pull_request)
574 pull_request.source_repo, pull_request=pull_request)
575 assert status == ChangesetStatus.STATUS_REJECTED
575 assert status == ChangesetStatus.STATUS_REJECTED
576
576
577 def test_comment_and_close_pull_request_try_edit_comment(
577 def test_comment_and_close_pull_request_try_edit_comment(
578 self, pr_util, csrf_token, xhr_header
578 self, pr_util, csrf_token, xhr_header
579 ):
579 ):
580 pull_request = pr_util.create_pull_request()
580 pull_request = pr_util.create_pull_request()
581 pull_request_id = pull_request.pull_request_id
581 pull_request_id = pull_request.pull_request_id
582 target_scm = pull_request.target_repo.scm_instance()
582 target_scm = pull_request.target_repo.scm_instance()
583 target_scm_name = target_scm.name
583 target_scm_name = target_scm.name
584
584
585 response = self.app.post(
585 response = self.app.post(
586 route_path(
586 route_path(
587 'pullrequest_comment_create',
587 'pullrequest_comment_create',
588 repo_name=target_scm_name,
588 repo_name=target_scm_name,
589 pull_request_id=pull_request_id,
589 pull_request_id=pull_request_id,
590 ),
590 ),
591 params={
591 params={
592 'close_pull_request': 'true',
592 'close_pull_request': 'true',
593 'csrf_token': csrf_token,
593 'csrf_token': csrf_token,
594 },
594 },
595 extra_environ=xhr_header)
595 extra_environ=xhr_header)
596
596
597 assert response.json
597 assert response.json
598
598
599 pull_request = PullRequest.get(pull_request_id)
599 pull_request = PullRequest.get(pull_request_id)
600 target_scm = pull_request.target_repo.scm_instance()
600 target_scm = pull_request.target_repo.scm_instance()
601 target_scm_name = target_scm.name
601 target_scm_name = target_scm.name
602 assert pull_request.is_closed()
602 assert pull_request.is_closed()
603
603
604 # check only the latest status, not the review status
604 # check only the latest status, not the review status
605 status = ChangesetStatusModel().get_status(
605 status = ChangesetStatusModel().get_status(
606 pull_request.source_repo, pull_request=pull_request)
606 pull_request.source_repo, pull_request=pull_request)
607 assert status == ChangesetStatus.STATUS_REJECTED
607 assert status == ChangesetStatus.STATUS_REJECTED
608
608
609 comment_id = response.json.get('comment_id', None)
609 comment_id = response.json.get('comment_id', None)
610 test_text = 'test'
610 test_text = 'test'
611 response = self.app.post(
611 response = self.app.post(
612 route_path(
612 route_path(
613 'pullrequest_comment_edit',
613 'pullrequest_comment_edit',
614 repo_name=target_scm_name,
614 repo_name=target_scm_name,
615 pull_request_id=pull_request_id,
615 pull_request_id=pull_request_id,
616 comment_id=comment_id,
616 comment_id=comment_id,
617 ),
617 ),
618 extra_environ=xhr_header,
618 extra_environ=xhr_header,
619 params={
619 params={
620 'csrf_token': csrf_token,
620 'csrf_token': csrf_token,
621 'text': test_text,
621 'text': test_text,
622 },
622 },
623 status=403,
623 status=403,
624 )
624 )
625 assert response.status_int == 403
625 assert response.status_int == 403
626
626
627 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
627 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
628 pull_request = pr_util.create_pull_request()
628 pull_request = pr_util.create_pull_request()
629 target_scm = pull_request.target_repo.scm_instance()
629 target_scm = pull_request.target_repo.scm_instance()
630 target_scm_name = target_scm.name
630 target_scm_name = target_scm.name
631
631
632 response = self.app.post(
632 response = self.app.post(
633 route_path(
633 route_path(
634 'pullrequest_comment_create',
634 'pullrequest_comment_create',
635 repo_name=target_scm_name,
635 repo_name=target_scm_name,
636 pull_request_id=pull_request.pull_request_id),
636 pull_request_id=pull_request.pull_request_id),
637 params={
637 params={
638 'csrf_token': csrf_token,
638 'csrf_token': csrf_token,
639 'text': 'init',
639 'text': 'init',
640 },
640 },
641 extra_environ=xhr_header,
641 extra_environ=xhr_header,
642 )
642 )
643 assert response.json
643 assert response.json
644
644
645 comment_id = response.json.get('comment_id', None)
645 comment_id = response.json.get('comment_id', None)
646 assert comment_id
646 assert comment_id
647 test_text = 'test'
647 test_text = 'test'
648 self.app.post(
648 self.app.post(
649 route_path(
649 route_path(
650 'pullrequest_comment_edit',
650 'pullrequest_comment_edit',
651 repo_name=target_scm_name,
651 repo_name=target_scm_name,
652 pull_request_id=pull_request.pull_request_id,
652 pull_request_id=pull_request.pull_request_id,
653 comment_id=comment_id,
653 comment_id=comment_id,
654 ),
654 ),
655 extra_environ=xhr_header,
655 extra_environ=xhr_header,
656 params={
656 params={
657 'csrf_token': csrf_token,
657 'csrf_token': csrf_token,
658 'text': test_text,
658 'text': test_text,
659 'version': '0',
659 'version': '0',
660 },
660 },
661
661
662 )
662 )
663 text_form_db = ChangesetComment.query().filter(
663 text_form_db = ChangesetComment.query().filter(
664 ChangesetComment.comment_id == comment_id).first().text
664 ChangesetComment.comment_id == comment_id).first().text
665 assert test_text == text_form_db
665 assert test_text == text_form_db
666
666
667 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
667 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
668 pull_request = pr_util.create_pull_request()
668 pull_request = pr_util.create_pull_request()
669 target_scm = pull_request.target_repo.scm_instance()
669 target_scm = pull_request.target_repo.scm_instance()
670 target_scm_name = target_scm.name
670 target_scm_name = target_scm.name
671
671
672 response = self.app.post(
672 response = self.app.post(
673 route_path(
673 route_path(
674 'pullrequest_comment_create',
674 'pullrequest_comment_create',
675 repo_name=target_scm_name,
675 repo_name=target_scm_name,
676 pull_request_id=pull_request.pull_request_id),
676 pull_request_id=pull_request.pull_request_id),
677 params={
677 params={
678 'csrf_token': csrf_token,
678 'csrf_token': csrf_token,
679 'text': 'init',
679 'text': 'init',
680 },
680 },
681 extra_environ=xhr_header,
681 extra_environ=xhr_header,
682 )
682 )
683 assert response.json
683 assert response.json
684
684
685 comment_id = response.json.get('comment_id', None)
685 comment_id = response.json.get('comment_id', None)
686 assert comment_id
686 assert comment_id
687 test_text = 'init'
687 test_text = 'init'
688 response = self.app.post(
688 response = self.app.post(
689 route_path(
689 route_path(
690 'pullrequest_comment_edit',
690 'pullrequest_comment_edit',
691 repo_name=target_scm_name,
691 repo_name=target_scm_name,
692 pull_request_id=pull_request.pull_request_id,
692 pull_request_id=pull_request.pull_request_id,
693 comment_id=comment_id,
693 comment_id=comment_id,
694 ),
694 ),
695 extra_environ=xhr_header,
695 extra_environ=xhr_header,
696 params={
696 params={
697 'csrf_token': csrf_token,
697 'csrf_token': csrf_token,
698 'text': test_text,
698 'text': test_text,
699 'version': '0',
699 'version': '0',
700 },
700 },
701 status=404,
701 status=404,
702
702
703 )
703 )
704 assert response.status_int == 404
704 assert response.status_int == 404
705
705
706 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
706 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
707 pull_request = pr_util.create_pull_request()
707 pull_request = pr_util.create_pull_request()
708 target_scm = pull_request.target_repo.scm_instance()
708 target_scm = pull_request.target_repo.scm_instance()
709 target_scm_name = target_scm.name
709 target_scm_name = target_scm.name
710
710
711 response = self.app.post(
711 response = self.app.post(
712 route_path(
712 route_path(
713 'pullrequest_comment_create',
713 'pullrequest_comment_create',
714 repo_name=target_scm_name,
714 repo_name=target_scm_name,
715 pull_request_id=pull_request.pull_request_id),
715 pull_request_id=pull_request.pull_request_id),
716 params={
716 params={
717 'csrf_token': csrf_token,
717 'csrf_token': csrf_token,
718 'text': 'init',
718 'text': 'init',
719 },
719 },
720 extra_environ=xhr_header,
720 extra_environ=xhr_header,
721 )
721 )
722 assert response.json
722 assert response.json
723 comment_id = response.json.get('comment_id', None)
723 comment_id = response.json.get('comment_id', None)
724 assert comment_id
724 assert comment_id
725
725
726 test_text = 'test'
726 test_text = 'test'
727 self.app.post(
727 self.app.post(
728 route_path(
728 route_path(
729 'pullrequest_comment_edit',
729 'pullrequest_comment_edit',
730 repo_name=target_scm_name,
730 repo_name=target_scm_name,
731 pull_request_id=pull_request.pull_request_id,
731 pull_request_id=pull_request.pull_request_id,
732 comment_id=comment_id,
732 comment_id=comment_id,
733 ),
733 ),
734 extra_environ=xhr_header,
734 extra_environ=xhr_header,
735 params={
735 params={
736 'csrf_token': csrf_token,
736 'csrf_token': csrf_token,
737 'text': test_text,
737 'text': test_text,
738 'version': '0',
738 'version': '0',
739 },
739 },
740
740
741 )
741 )
742 test_text_v2 = 'test_v2'
742 test_text_v2 = 'test_v2'
743 response = self.app.post(
743 response = self.app.post(
744 route_path(
744 route_path(
745 'pullrequest_comment_edit',
745 'pullrequest_comment_edit',
746 repo_name=target_scm_name,
746 repo_name=target_scm_name,
747 pull_request_id=pull_request.pull_request_id,
747 pull_request_id=pull_request.pull_request_id,
748 comment_id=comment_id,
748 comment_id=comment_id,
749 ),
749 ),
750 extra_environ=xhr_header,
750 extra_environ=xhr_header,
751 params={
751 params={
752 'csrf_token': csrf_token,
752 'csrf_token': csrf_token,
753 'text': test_text_v2,
753 'text': test_text_v2,
754 'version': '0',
754 'version': '0',
755 },
755 },
756 status=409,
756 status=409,
757 )
757 )
758 assert response.status_int == 409
758 assert response.status_int == 409
759
759
760 text_form_db = ChangesetComment.query().filter(
760 text_form_db = ChangesetComment.query().filter(
761 ChangesetComment.comment_id == comment_id).first().text
761 ChangesetComment.comment_id == comment_id).first().text
762
762
763 assert test_text == text_form_db
763 assert test_text == text_form_db
764 assert test_text_v2 != text_form_db
764 assert test_text_v2 != text_form_db
765
765
766 def test_comment_and_comment_edit_permissions_forbidden(
766 def test_comment_and_comment_edit_permissions_forbidden(
767 self, autologin_regular_user, user_regular, user_admin, pr_util,
767 self, autologin_regular_user, user_regular, user_admin, pr_util,
768 csrf_token, xhr_header):
768 csrf_token, xhr_header):
769 pull_request = pr_util.create_pull_request(
769 pull_request = pr_util.create_pull_request(
770 author=user_admin.username, enable_notifications=False)
770 author=user_admin.username, enable_notifications=False)
771 comment = CommentsModel().create(
771 comment = CommentsModel().create(
772 text='test',
772 text='test',
773 repo=pull_request.target_repo.scm_instance().name,
773 repo=pull_request.target_repo.scm_instance().name,
774 user=user_admin,
774 user=user_admin,
775 pull_request=pull_request,
775 pull_request=pull_request,
776 )
776 )
777 response = self.app.post(
777 response = self.app.post(
778 route_path(
778 route_path(
779 'pullrequest_comment_edit',
779 'pullrequest_comment_edit',
780 repo_name=pull_request.target_repo.scm_instance().name,
780 repo_name=pull_request.target_repo.scm_instance().name,
781 pull_request_id=pull_request.pull_request_id,
781 pull_request_id=pull_request.pull_request_id,
782 comment_id=comment.comment_id,
782 comment_id=comment.comment_id,
783 ),
783 ),
784 extra_environ=xhr_header,
784 extra_environ=xhr_header,
785 params={
785 params={
786 'csrf_token': csrf_token,
786 'csrf_token': csrf_token,
787 'text': 'test_text',
787 'text': 'test_text',
788 },
788 },
789 status=403,
789 status=403,
790 )
790 )
791 assert response.status_int == 403
791 assert response.status_int == 403
792
792
793 def test_create_pull_request(self, backend, csrf_token):
793 def test_create_pull_request(self, backend, csrf_token):
794 commits = [
794 commits = [
795 {'message': 'ancestor'},
795 {'message': 'ancestor'},
796 {'message': 'change'},
796 {'message': 'change'},
797 {'message': 'change2'},
797 {'message': 'change2'},
798 ]
798 ]
799 commit_ids = backend.create_master_repo(commits)
799 commit_ids = backend.create_master_repo(commits)
800 target = backend.create_repo(heads=['ancestor'])
800 target = backend.create_repo(heads=['ancestor'])
801 source = backend.create_repo(heads=['change2'])
801 source = backend.create_repo(heads=['change2'])
802
802
803 response = self.app.post(
803 response = self.app.post(
804 route_path('pullrequest_create', repo_name=source.repo_name),
804 route_path('pullrequest_create', repo_name=source.repo_name),
805 [
805 [
806 ('source_repo', source.repo_name),
806 ('source_repo', source.repo_name),
807 ('source_ref', 'branch:default:' + commit_ids['change2']),
807 ('source_ref', 'branch:default:' + commit_ids['change2']),
808 ('target_repo', target.repo_name),
808 ('target_repo', target.repo_name),
809 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
809 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
810 ('common_ancestor', commit_ids['ancestor']),
810 ('common_ancestor', commit_ids['ancestor']),
811 ('pullrequest_title', 'Title'),
811 ('pullrequest_title', 'Title'),
812 ('pullrequest_desc', 'Description'),
812 ('pullrequest_desc', 'Description'),
813 ('description_renderer', 'markdown'),
813 ('description_renderer', 'markdown'),
814 ('__start__', 'review_members:sequence'),
814 ('__start__', 'review_members:sequence'),
815 ('__start__', 'reviewer:mapping'),
815 ('__start__', 'reviewer:mapping'),
816 ('user_id', '1'),
816 ('user_id', '1'),
817 ('__start__', 'reasons:sequence'),
817 ('__start__', 'reasons:sequence'),
818 ('reason', 'Some reason'),
818 ('reason', 'Some reason'),
819 ('__end__', 'reasons:sequence'),
819 ('__end__', 'reasons:sequence'),
820 ('__start__', 'rules:sequence'),
820 ('__start__', 'rules:sequence'),
821 ('__end__', 'rules:sequence'),
821 ('__end__', 'rules:sequence'),
822 ('mandatory', 'False'),
822 ('mandatory', 'False'),
823 ('__end__', 'reviewer:mapping'),
823 ('__end__', 'reviewer:mapping'),
824 ('__end__', 'review_members:sequence'),
824 ('__end__', 'review_members:sequence'),
825 ('__start__', 'revisions:sequence'),
825 ('__start__', 'revisions:sequence'),
826 ('revisions', commit_ids['change']),
826 ('revisions', commit_ids['change']),
827 ('revisions', commit_ids['change2']),
827 ('revisions', commit_ids['change2']),
828 ('__end__', 'revisions:sequence'),
828 ('__end__', 'revisions:sequence'),
829 ('user', ''),
829 ('user', ''),
830 ('csrf_token', csrf_token),
830 ('csrf_token', csrf_token),
831 ],
831 ],
832 status=302)
832 status=302)
833
833
834 location = response.headers['Location']
834 location = response.headers['Location']
835 pull_request_id = location.rsplit('/', 1)[1]
835 pull_request_id = location.rsplit('/', 1)[1]
836 assert pull_request_id != 'new'
836 assert pull_request_id != 'new'
837 pull_request = PullRequest.get(int(pull_request_id))
837 pull_request = PullRequest.get(int(pull_request_id))
838
838
839 # check that we have now both revisions
839 # check that we have now both revisions
840 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
840 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
841 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
841 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
842 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
842 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
843 assert pull_request.target_ref == expected_target_ref
843 assert pull_request.target_ref == expected_target_ref
844
844
845 def test_reviewer_notifications(self, backend, csrf_token):
845 def test_reviewer_notifications(self, backend, csrf_token):
846 # We have to use the app.post for this test so it will create the
846 # We have to use the app.post for this test so it will create the
847 # notifications properly with the new PR
847 # notifications properly with the new PR
848 commits = [
848 commits = [
849 {'message': 'ancestor',
849 {'message': 'ancestor',
850 'added': [FileNode('file_A', content='content_of_ancestor')]},
850 'added': [FileNode('file_A', content='content_of_ancestor')]},
851 {'message': 'change',
851 {'message': 'change',
852 'added': [FileNode('file_a', content='content_of_change')]},
852 'added': [FileNode('file_a', content='content_of_change')]},
853 {'message': 'change-child'},
853 {'message': 'change-child'},
854 {'message': 'ancestor-child', 'parents': ['ancestor'],
854 {'message': 'ancestor-child', 'parents': ['ancestor'],
855 'added': [
855 'added': [
856 FileNode('file_B', content='content_of_ancestor_child')]},
856 FileNode('file_B', content='content_of_ancestor_child')]},
857 {'message': 'ancestor-child-2'},
857 {'message': 'ancestor-child-2'},
858 ]
858 ]
859 commit_ids = backend.create_master_repo(commits)
859 commit_ids = backend.create_master_repo(commits)
860 target = backend.create_repo(heads=['ancestor-child'])
860 target = backend.create_repo(heads=['ancestor-child'])
861 source = backend.create_repo(heads=['change'])
861 source = backend.create_repo(heads=['change'])
862
862
863 response = self.app.post(
863 response = self.app.post(
864 route_path('pullrequest_create', repo_name=source.repo_name),
864 route_path('pullrequest_create', repo_name=source.repo_name),
865 [
865 [
866 ('source_repo', source.repo_name),
866 ('source_repo', source.repo_name),
867 ('source_ref', 'branch:default:' + commit_ids['change']),
867 ('source_ref', 'branch:default:' + commit_ids['change']),
868 ('target_repo', target.repo_name),
868 ('target_repo', target.repo_name),
869 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
869 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
870 ('common_ancestor', commit_ids['ancestor']),
870 ('common_ancestor', commit_ids['ancestor']),
871 ('pullrequest_title', 'Title'),
871 ('pullrequest_title', 'Title'),
872 ('pullrequest_desc', 'Description'),
872 ('pullrequest_desc', 'Description'),
873 ('description_renderer', 'markdown'),
873 ('description_renderer', 'markdown'),
874 ('__start__', 'review_members:sequence'),
874 ('__start__', 'review_members:sequence'),
875 ('__start__', 'reviewer:mapping'),
875 ('__start__', 'reviewer:mapping'),
876 ('user_id', '2'),
876 ('user_id', '2'),
877 ('__start__', 'reasons:sequence'),
877 ('__start__', 'reasons:sequence'),
878 ('reason', 'Some reason'),
878 ('reason', 'Some reason'),
879 ('__end__', 'reasons:sequence'),
879 ('__end__', 'reasons:sequence'),
880 ('__start__', 'rules:sequence'),
880 ('__start__', 'rules:sequence'),
881 ('__end__', 'rules:sequence'),
881 ('__end__', 'rules:sequence'),
882 ('mandatory', 'False'),
882 ('mandatory', 'False'),
883 ('__end__', 'reviewer:mapping'),
883 ('__end__', 'reviewer:mapping'),
884 ('__end__', 'review_members:sequence'),
884 ('__end__', 'review_members:sequence'),
885 ('__start__', 'revisions:sequence'),
885 ('__start__', 'revisions:sequence'),
886 ('revisions', commit_ids['change']),
886 ('revisions', commit_ids['change']),
887 ('__end__', 'revisions:sequence'),
887 ('__end__', 'revisions:sequence'),
888 ('user', ''),
888 ('user', ''),
889 ('csrf_token', csrf_token),
889 ('csrf_token', csrf_token),
890 ],
890 ],
891 status=302)
891 status=302)
892
892
893 location = response.headers['Location']
893 location = response.headers['Location']
894
894
895 pull_request_id = location.rsplit('/', 1)[1]
895 pull_request_id = location.rsplit('/', 1)[1]
896 assert pull_request_id != 'new'
896 assert pull_request_id != 'new'
897 pull_request = PullRequest.get(int(pull_request_id))
897 pull_request = PullRequest.get(int(pull_request_id))
898
898
899 # Check that a notification was made
899 # Check that a notification was made
900 notifications = Notification.query()\
900 notifications = Notification.query()\
901 .filter(Notification.created_by == pull_request.author.user_id,
901 .filter(Notification.created_by == pull_request.author.user_id,
902 Notification.type_ == Notification.TYPE_PULL_REQUEST,
902 Notification.type_ == Notification.TYPE_PULL_REQUEST,
903 Notification.subject.contains(
903 Notification.subject.contains(
904 "requested a pull request review. !%s" % pull_request_id))
904 "requested a pull request review. !%s" % pull_request_id))
905 assert len(notifications.all()) == 1
905 assert len(notifications.all()) == 1
906
906
907 # Change reviewers and check that a notification was made
907 # Change reviewers and check that a notification was made
908 PullRequestModel().update_reviewers(
908 PullRequestModel().update_reviewers(
909 pull_request.pull_request_id, [(1, [], False, [])],
909 pull_request.pull_request_id, [(1, [], False, [])],
910 pull_request.author)
910 pull_request.author)
911 assert len(notifications.all()) == 2
911 assert len(notifications.all()) == 2
912
912
913 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
913 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
914 csrf_token):
914 csrf_token):
915 commits = [
915 commits = [
916 {'message': 'ancestor',
916 {'message': 'ancestor',
917 'added': [FileNode('file_A', content='content_of_ancestor')]},
917 'added': [FileNode('file_A', content='content_of_ancestor')]},
918 {'message': 'change',
918 {'message': 'change',
919 'added': [FileNode('file_a', content='content_of_change')]},
919 'added': [FileNode('file_a', content='content_of_change')]},
920 {'message': 'change-child'},
920 {'message': 'change-child'},
921 {'message': 'ancestor-child', 'parents': ['ancestor'],
921 {'message': 'ancestor-child', 'parents': ['ancestor'],
922 'added': [
922 'added': [
923 FileNode('file_B', content='content_of_ancestor_child')]},
923 FileNode('file_B', content='content_of_ancestor_child')]},
924 {'message': 'ancestor-child-2'},
924 {'message': 'ancestor-child-2'},
925 ]
925 ]
926 commit_ids = backend.create_master_repo(commits)
926 commit_ids = backend.create_master_repo(commits)
927 target = backend.create_repo(heads=['ancestor-child'])
927 target = backend.create_repo(heads=['ancestor-child'])
928 source = backend.create_repo(heads=['change'])
928 source = backend.create_repo(heads=['change'])
929
929
930 response = self.app.post(
930 response = self.app.post(
931 route_path('pullrequest_create', repo_name=source.repo_name),
931 route_path('pullrequest_create', repo_name=source.repo_name),
932 [
932 [
933 ('source_repo', source.repo_name),
933 ('source_repo', source.repo_name),
934 ('source_ref', 'branch:default:' + commit_ids['change']),
934 ('source_ref', 'branch:default:' + commit_ids['change']),
935 ('target_repo', target.repo_name),
935 ('target_repo', target.repo_name),
936 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
936 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
937 ('common_ancestor', commit_ids['ancestor']),
937 ('common_ancestor', commit_ids['ancestor']),
938 ('pullrequest_title', 'Title'),
938 ('pullrequest_title', 'Title'),
939 ('pullrequest_desc', 'Description'),
939 ('pullrequest_desc', 'Description'),
940 ('description_renderer', 'markdown'),
940 ('description_renderer', 'markdown'),
941 ('__start__', 'review_members:sequence'),
941 ('__start__', 'review_members:sequence'),
942 ('__start__', 'reviewer:mapping'),
942 ('__start__', 'reviewer:mapping'),
943 ('user_id', '1'),
943 ('user_id', '1'),
944 ('__start__', 'reasons:sequence'),
944 ('__start__', 'reasons:sequence'),
945 ('reason', 'Some reason'),
945 ('reason', 'Some reason'),
946 ('__end__', 'reasons:sequence'),
946 ('__end__', 'reasons:sequence'),
947 ('__start__', 'rules:sequence'),
947 ('__start__', 'rules:sequence'),
948 ('__end__', 'rules:sequence'),
948 ('__end__', 'rules:sequence'),
949 ('mandatory', 'False'),
949 ('mandatory', 'False'),
950 ('__end__', 'reviewer:mapping'),
950 ('__end__', 'reviewer:mapping'),
951 ('__end__', 'review_members:sequence'),
951 ('__end__', 'review_members:sequence'),
952 ('__start__', 'revisions:sequence'),
952 ('__start__', 'revisions:sequence'),
953 ('revisions', commit_ids['change']),
953 ('revisions', commit_ids['change']),
954 ('__end__', 'revisions:sequence'),
954 ('__end__', 'revisions:sequence'),
955 ('user', ''),
955 ('user', ''),
956 ('csrf_token', csrf_token),
956 ('csrf_token', csrf_token),
957 ],
957 ],
958 status=302)
958 status=302)
959
959
960 location = response.headers['Location']
960 location = response.headers['Location']
961
961
962 pull_request_id = location.rsplit('/', 1)[1]
962 pull_request_id = location.rsplit('/', 1)[1]
963 assert pull_request_id != 'new'
963 assert pull_request_id != 'new'
964 pull_request = PullRequest.get(int(pull_request_id))
964 pull_request = PullRequest.get(int(pull_request_id))
965
965
966 # target_ref has to point to the ancestor's commit_id in order to
966 # target_ref has to point to the ancestor's commit_id in order to
967 # show the correct diff
967 # show the correct diff
968 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
968 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
969 assert pull_request.target_ref == expected_target_ref
969 assert pull_request.target_ref == expected_target_ref
970
970
971 # Check generated diff contents
971 # Check generated diff contents
972 response = response.follow()
972 response = response.follow()
973 response.mustcontain(no=['content_of_ancestor'])
973 response.mustcontain(no=['content_of_ancestor'])
974 response.mustcontain(no=['content_of_ancestor-child'])
974 response.mustcontain(no=['content_of_ancestor-child'])
975 response.mustcontain('content_of_change')
975 response.mustcontain('content_of_change')
976
976
977 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
977 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
978 # Clear any previous calls to rcextensions
978 # Clear any previous calls to rcextensions
979 rhodecode.EXTENSIONS.calls.clear()
979 rhodecode.EXTENSIONS.calls.clear()
980
980
981 pull_request = pr_util.create_pull_request(
981 pull_request = pr_util.create_pull_request(
982 approved=True, mergeable=True)
982 approved=True, mergeable=True)
983 pull_request_id = pull_request.pull_request_id
983 pull_request_id = pull_request.pull_request_id
984 repo_name = pull_request.target_repo.scm_instance().name,
984 repo_name = pull_request.target_repo.scm_instance().name,
985
985
986 url = route_path('pullrequest_merge',
986 url = route_path('pullrequest_merge',
987 repo_name=str(repo_name[0]),
987 repo_name=str(repo_name[0]),
988 pull_request_id=pull_request_id)
988 pull_request_id=pull_request_id)
989 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
989 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
990
990
991 pull_request = PullRequest.get(pull_request_id)
991 pull_request = PullRequest.get(pull_request_id)
992
992
993 assert response.status_int == 200
993 assert response.status_int == 200
994 assert pull_request.is_closed()
994 assert pull_request.is_closed()
995 assert_pull_request_status(
995 assert_pull_request_status(
996 pull_request, ChangesetStatus.STATUS_APPROVED)
996 pull_request, ChangesetStatus.STATUS_APPROVED)
997
997
998 # Check the relevant log entries were added
998 # Check the relevant log entries were added
999 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
999 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1000 actions = [log.action for log in user_logs]
1000 actions = [log.action for log in user_logs]
1001 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1001 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1002 expected_actions = [
1002 expected_actions = [
1003 u'repo.pull_request.close',
1003 u'repo.pull_request.close',
1004 u'repo.pull_request.merge',
1004 u'repo.pull_request.merge',
1005 u'repo.pull_request.comment.create'
1005 u'repo.pull_request.comment.create'
1006 ]
1006 ]
1007 assert actions == expected_actions
1007 assert actions == expected_actions
1008
1008
1009 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1009 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1010 actions = [log for log in user_logs]
1010 actions = [log for log in user_logs]
1011 assert actions[-1].action == 'user.push'
1011 assert actions[-1].action == 'user.push'
1012 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1012 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1013
1013
1014 # Check post_push rcextension was really executed
1014 # Check post_push rcextension was really executed
1015 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1015 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1016 assert len(push_calls) == 1
1016 assert len(push_calls) == 1
1017 unused_last_call_args, last_call_kwargs = push_calls[0]
1017 unused_last_call_args, last_call_kwargs = push_calls[0]
1018 assert last_call_kwargs['action'] == 'push'
1018 assert last_call_kwargs['action'] == 'push'
1019 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1019 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1020
1020
1021 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1021 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1022 pull_request = pr_util.create_pull_request(mergeable=False)
1022 pull_request = pr_util.create_pull_request(mergeable=False)
1023 pull_request_id = pull_request.pull_request_id
1023 pull_request_id = pull_request.pull_request_id
1024 pull_request = PullRequest.get(pull_request_id)
1024 pull_request = PullRequest.get(pull_request_id)
1025
1025
1026 response = self.app.post(
1026 response = self.app.post(
1027 route_path('pullrequest_merge',
1027 route_path('pullrequest_merge',
1028 repo_name=pull_request.target_repo.scm_instance().name,
1028 repo_name=pull_request.target_repo.scm_instance().name,
1029 pull_request_id=pull_request.pull_request_id),
1029 pull_request_id=pull_request.pull_request_id),
1030 params={'csrf_token': csrf_token}).follow()
1030 params={'csrf_token': csrf_token}).follow()
1031
1031
1032 assert response.status_int == 200
1032 assert response.status_int == 200
1033 response.mustcontain(
1033 response.mustcontain(
1034 'Merge is not currently possible because of below failed checks.')
1034 'Merge is not currently possible because of below failed checks.')
1035 response.mustcontain('Server-side pull request merging is disabled.')
1035 response.mustcontain('Server-side pull request merging is disabled.')
1036
1036
1037 @pytest.mark.skip_backends('svn')
1037 @pytest.mark.skip_backends('svn')
1038 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1038 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1039 pull_request = pr_util.create_pull_request(mergeable=True)
1039 pull_request = pr_util.create_pull_request(mergeable=True)
1040 pull_request_id = pull_request.pull_request_id
1040 pull_request_id = pull_request.pull_request_id
1041 repo_name = pull_request.target_repo.scm_instance().name
1041 repo_name = pull_request.target_repo.scm_instance().name
1042
1042
1043 response = self.app.post(
1043 response = self.app.post(
1044 route_path('pullrequest_merge',
1044 route_path('pullrequest_merge',
1045 repo_name=repo_name, pull_request_id=pull_request_id),
1045 repo_name=repo_name, pull_request_id=pull_request_id),
1046 params={'csrf_token': csrf_token}).follow()
1046 params={'csrf_token': csrf_token}).follow()
1047
1047
1048 assert response.status_int == 200
1048 assert response.status_int == 200
1049
1049
1050 response.mustcontain(
1050 response.mustcontain(
1051 'Merge is not currently possible because of below failed checks.')
1051 'Merge is not currently possible because of below failed checks.')
1052 response.mustcontain('Pull request reviewer approval is pending.')
1052 response.mustcontain('Pull request reviewer approval is pending.')
1053
1053
1054 def test_merge_pull_request_renders_failure_reason(
1054 def test_merge_pull_request_renders_failure_reason(
1055 self, user_regular, csrf_token, pr_util):
1055 self, user_regular, csrf_token, pr_util):
1056 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1056 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1057 pull_request_id = pull_request.pull_request_id
1057 pull_request_id = pull_request.pull_request_id
1058 repo_name = pull_request.target_repo.scm_instance().name
1058 repo_name = pull_request.target_repo.scm_instance().name
1059
1059
1060 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1060 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1061 MergeFailureReason.PUSH_FAILED,
1061 MergeFailureReason.PUSH_FAILED,
1062 metadata={'target': 'shadow repo',
1062 metadata={'target': 'shadow repo',
1063 'merge_commit': 'xxx'})
1063 'merge_commit': 'xxx'})
1064 model_patcher = mock.patch.multiple(
1064 model_patcher = mock.patch.multiple(
1065 PullRequestModel,
1065 PullRequestModel,
1066 merge_repo=mock.Mock(return_value=merge_resp),
1066 merge_repo=mock.Mock(return_value=merge_resp),
1067 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1067 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1068
1068
1069 with model_patcher:
1069 with model_patcher:
1070 response = self.app.post(
1070 response = self.app.post(
1071 route_path('pullrequest_merge',
1071 route_path('pullrequest_merge',
1072 repo_name=repo_name,
1072 repo_name=repo_name,
1073 pull_request_id=pull_request_id),
1073 pull_request_id=pull_request_id),
1074 params={'csrf_token': csrf_token}, status=302)
1074 params={'csrf_token': csrf_token}, status=302)
1075
1075
1076 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1076 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1077 metadata={'target': 'shadow repo',
1077 metadata={'target': 'shadow repo',
1078 'merge_commit': 'xxx'})
1078 'merge_commit': 'xxx'})
1079 assert_session_flash(response, merge_resp.merge_status_message)
1079 assert_session_flash(response, merge_resp.merge_status_message)
1080
1080
1081 def test_update_source_revision(self, backend, csrf_token):
1081 def test_update_source_revision(self, backend, csrf_token):
1082 commits = [
1082 commits = [
1083 {'message': 'ancestor'},
1083 {'message': 'ancestor'},
1084 {'message': 'change'},
1084 {'message': 'change'},
1085 {'message': 'change-2'},
1085 {'message': 'change-2'},
1086 ]
1086 ]
1087 commit_ids = backend.create_master_repo(commits)
1087 commit_ids = backend.create_master_repo(commits)
1088 target = backend.create_repo(heads=['ancestor'])
1088 target = backend.create_repo(heads=['ancestor'])
1089 source = backend.create_repo(heads=['change'])
1089 source = backend.create_repo(heads=['change'])
1090
1090
1091 # create pr from a in source to A in target
1091 # create pr from a in source to A in target
1092 pull_request = PullRequest()
1092 pull_request = PullRequest()
1093
1093
1094 pull_request.source_repo = source
1094 pull_request.source_repo = source
1095 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1095 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1096 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1096 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1097
1097
1098 pull_request.target_repo = target
1098 pull_request.target_repo = target
1099 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1099 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1100 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1100 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1101
1101
1102 pull_request.revisions = [commit_ids['change']]
1102 pull_request.revisions = [commit_ids['change']]
1103 pull_request.title = u"Test"
1103 pull_request.title = u"Test"
1104 pull_request.description = u"Description"
1104 pull_request.description = u"Description"
1105 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1105 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1106 pull_request.pull_request_state = PullRequest.STATE_CREATED
1106 pull_request.pull_request_state = PullRequest.STATE_CREATED
1107 Session().add(pull_request)
1107 Session().add(pull_request)
1108 Session().commit()
1108 Session().commit()
1109 pull_request_id = pull_request.pull_request_id
1109 pull_request_id = pull_request.pull_request_id
1110
1110
1111 # source has ancestor - change - change-2
1111 # source has ancestor - change - change-2
1112 backend.pull_heads(source, heads=['change-2'])
1112 backend.pull_heads(source, heads=['change-2'])
1113 target_repo_name = target.repo_name
1113
1114
1114 # update PR
1115 # update PR
1115 self.app.post(
1116 self.app.post(
1116 route_path('pullrequest_update',
1117 route_path('pullrequest_update',
1117 repo_name=target.repo_name, pull_request_id=pull_request_id),
1118 repo_name=target_repo_name, pull_request_id=pull_request_id),
1118 params={'update_commits': 'true', 'csrf_token': csrf_token})
1119 params={'update_commits': 'true', 'csrf_token': csrf_token})
1119
1120
1120 response = self.app.get(
1121 response = self.app.get(
1121 route_path('pullrequest_show',
1122 route_path('pullrequest_show',
1122 repo_name=target.repo_name,
1123 repo_name=target_repo_name,
1123 pull_request_id=pull_request.pull_request_id))
1124 pull_request_id=pull_request.pull_request_id))
1124
1125
1125 assert response.status_int == 200
1126 assert response.status_int == 200
1126 response.mustcontain('Pull request updated to')
1127 response.mustcontain('Pull request updated to')
1127 response.mustcontain('with 1 added, 0 removed commits.')
1128 response.mustcontain('with 1 added, 0 removed commits.')
1128
1129
1129 # check that we have now both revisions
1130 # check that we have now both revisions
1130 pull_request = PullRequest.get(pull_request_id)
1131 pull_request = PullRequest.get(pull_request_id)
1131 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1132 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1132
1133
1133 def test_update_target_revision(self, backend, csrf_token):
1134 def test_update_target_revision(self, backend, csrf_token):
1134 commits = [
1135 commits = [
1135 {'message': 'ancestor'},
1136 {'message': 'ancestor'},
1136 {'message': 'change'},
1137 {'message': 'change'},
1137 {'message': 'ancestor-new', 'parents': ['ancestor']},
1138 {'message': 'ancestor-new', 'parents': ['ancestor']},
1138 {'message': 'change-rebased'},
1139 {'message': 'change-rebased'},
1139 ]
1140 ]
1140 commit_ids = backend.create_master_repo(commits)
1141 commit_ids = backend.create_master_repo(commits)
1141 target = backend.create_repo(heads=['ancestor'])
1142 target = backend.create_repo(heads=['ancestor'])
1142 source = backend.create_repo(heads=['change'])
1143 source = backend.create_repo(heads=['change'])
1143
1144
1144 # create pr from a in source to A in target
1145 # create pr from a in source to A in target
1145 pull_request = PullRequest()
1146 pull_request = PullRequest()
1146
1147
1147 pull_request.source_repo = source
1148 pull_request.source_repo = source
1148 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1149 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1149 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1150 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1150
1151
1151 pull_request.target_repo = target
1152 pull_request.target_repo = target
1152 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1153 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1153 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1154 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1154
1155
1155 pull_request.revisions = [commit_ids['change']]
1156 pull_request.revisions = [commit_ids['change']]
1156 pull_request.title = u"Test"
1157 pull_request.title = u"Test"
1157 pull_request.description = u"Description"
1158 pull_request.description = u"Description"
1158 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1159 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1159 pull_request.pull_request_state = PullRequest.STATE_CREATED
1160 pull_request.pull_request_state = PullRequest.STATE_CREATED
1160
1161
1161 Session().add(pull_request)
1162 Session().add(pull_request)
1162 Session().commit()
1163 Session().commit()
1163 pull_request_id = pull_request.pull_request_id
1164 pull_request_id = pull_request.pull_request_id
1164
1165
1165 # target has ancestor - ancestor-new
1166 # target has ancestor - ancestor-new
1166 # source has ancestor - ancestor-new - change-rebased
1167 # source has ancestor - ancestor-new - change-rebased
1167 backend.pull_heads(target, heads=['ancestor-new'])
1168 backend.pull_heads(target, heads=['ancestor-new'])
1168 backend.pull_heads(source, heads=['change-rebased'])
1169 backend.pull_heads(source, heads=['change-rebased'])
1170 target_repo_name = target.repo_name
1169
1171
1170 # update PR
1172 # update PR
1171 url = route_path('pullrequest_update',
1173 url = route_path('pullrequest_update',
1172 repo_name=target.repo_name,
1174 repo_name=target_repo_name,
1173 pull_request_id=pull_request_id)
1175 pull_request_id=pull_request_id)
1174 self.app.post(url,
1176 self.app.post(url,
1175 params={'update_commits': 'true', 'csrf_token': csrf_token},
1177 params={'update_commits': 'true', 'csrf_token': csrf_token},
1176 status=200)
1178 status=200)
1177
1179
1178 # check that we have now both revisions
1180 # check that we have now both revisions
1179 pull_request = PullRequest.get(pull_request_id)
1181 pull_request = PullRequest.get(pull_request_id)
1180 assert pull_request.revisions == [commit_ids['change-rebased']]
1182 assert pull_request.revisions == [commit_ids['change-rebased']]
1181 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1183 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1182 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1184 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1183
1185
1184 response = self.app.get(
1186 response = self.app.get(
1185 route_path('pullrequest_show',
1187 route_path('pullrequest_show',
1186 repo_name=target.repo_name,
1188 repo_name=target_repo_name,
1187 pull_request_id=pull_request.pull_request_id))
1189 pull_request_id=pull_request.pull_request_id))
1188 assert response.status_int == 200
1190 assert response.status_int == 200
1189 response.mustcontain('Pull request updated to')
1191 response.mustcontain('Pull request updated to')
1190 response.mustcontain('with 1 added, 1 removed commits.')
1192 response.mustcontain('with 1 added, 1 removed commits.')
1191
1193
1192 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1194 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1193 backend = backend_git
1195 backend = backend_git
1194 commits = [
1196 commits = [
1195 {'message': 'master-commit-1'},
1197 {'message': 'master-commit-1'},
1196 {'message': 'master-commit-2-change-1'},
1198 {'message': 'master-commit-2-change-1'},
1197 {'message': 'master-commit-3-change-2'},
1199 {'message': 'master-commit-3-change-2'},
1198
1200
1199 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1201 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1200 {'message': 'feat-commit-2'},
1202 {'message': 'feat-commit-2'},
1201 ]
1203 ]
1202 commit_ids = backend.create_master_repo(commits)
1204 commit_ids = backend.create_master_repo(commits)
1203 target = backend.create_repo(heads=['master-commit-3-change-2'])
1205 target = backend.create_repo(heads=['master-commit-3-change-2'])
1204 source = backend.create_repo(heads=['feat-commit-2'])
1206 source = backend.create_repo(heads=['feat-commit-2'])
1205
1207
1206 # create pr from a in source to A in target
1208 # create pr from a in source to A in target
1207 pull_request = PullRequest()
1209 pull_request = PullRequest()
1208 pull_request.source_repo = source
1210 pull_request.source_repo = source
1209
1211
1210 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1212 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1211 branch=backend.default_branch_name,
1213 branch=backend.default_branch_name,
1212 commit_id=commit_ids['master-commit-3-change-2'])
1214 commit_id=commit_ids['master-commit-3-change-2'])
1213
1215
1214 pull_request.target_repo = target
1216 pull_request.target_repo = target
1215 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1217 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1216 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1218 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1217
1219
1218 pull_request.revisions = [
1220 pull_request.revisions = [
1219 commit_ids['feat-commit-1'],
1221 commit_ids['feat-commit-1'],
1220 commit_ids['feat-commit-2']
1222 commit_ids['feat-commit-2']
1221 ]
1223 ]
1222 pull_request.title = u"Test"
1224 pull_request.title = u"Test"
1223 pull_request.description = u"Description"
1225 pull_request.description = u"Description"
1224 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1226 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1225 pull_request.pull_request_state = PullRequest.STATE_CREATED
1227 pull_request.pull_request_state = PullRequest.STATE_CREATED
1226 Session().add(pull_request)
1228 Session().add(pull_request)
1227 Session().commit()
1229 Session().commit()
1228 pull_request_id = pull_request.pull_request_id
1230 pull_request_id = pull_request.pull_request_id
1229
1231
1230 # PR is created, now we simulate a force-push into target,
1232 # PR is created, now we simulate a force-push into target,
1231 # that drops a 2 last commits
1233 # that drops a 2 last commits
1232 vcsrepo = target.scm_instance()
1234 vcsrepo = target.scm_instance()
1233 vcsrepo.config.clear_section('hooks')
1235 vcsrepo.config.clear_section('hooks')
1234 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1236 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1237 target_repo_name = target.repo_name
1235
1238
1236 # update PR
1239 # update PR
1237 url = route_path('pullrequest_update',
1240 url = route_path('pullrequest_update',
1238 repo_name=target.repo_name,
1241 repo_name=target_repo_name,
1239 pull_request_id=pull_request_id)
1242 pull_request_id=pull_request_id)
1240 self.app.post(url,
1243 self.app.post(url,
1241 params={'update_commits': 'true', 'csrf_token': csrf_token},
1244 params={'update_commits': 'true', 'csrf_token': csrf_token},
1242 status=200)
1245 status=200)
1243
1246
1244 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
1247 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1245 assert response.status_int == 200
1248 assert response.status_int == 200
1246 response.mustcontain('Pull request updated to')
1249 response.mustcontain('Pull request updated to')
1247 response.mustcontain('with 0 added, 0 removed commits.')
1250 response.mustcontain('with 0 added, 0 removed commits.')
1248
1251
1249 def test_update_of_ancestor_reference(self, backend, csrf_token):
1252 def test_update_of_ancestor_reference(self, backend, csrf_token):
1250 commits = [
1253 commits = [
1251 {'message': 'ancestor'},
1254 {'message': 'ancestor'},
1252 {'message': 'change'},
1255 {'message': 'change'},
1253 {'message': 'change-2'},
1256 {'message': 'change-2'},
1254 {'message': 'ancestor-new', 'parents': ['ancestor']},
1257 {'message': 'ancestor-new', 'parents': ['ancestor']},
1255 {'message': 'change-rebased'},
1258 {'message': 'change-rebased'},
1256 ]
1259 ]
1257 commit_ids = backend.create_master_repo(commits)
1260 commit_ids = backend.create_master_repo(commits)
1258 target = backend.create_repo(heads=['ancestor'])
1261 target = backend.create_repo(heads=['ancestor'])
1259 source = backend.create_repo(heads=['change'])
1262 source = backend.create_repo(heads=['change'])
1260
1263
1261 # create pr from a in source to A in target
1264 # create pr from a in source to A in target
1262 pull_request = PullRequest()
1265 pull_request = PullRequest()
1263 pull_request.source_repo = source
1266 pull_request.source_repo = source
1264
1267
1265 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1268 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1266 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1269 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1267 pull_request.target_repo = target
1270 pull_request.target_repo = target
1268 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1271 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1269 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1272 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1270 pull_request.revisions = [commit_ids['change']]
1273 pull_request.revisions = [commit_ids['change']]
1271 pull_request.title = u"Test"
1274 pull_request.title = u"Test"
1272 pull_request.description = u"Description"
1275 pull_request.description = u"Description"
1273 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1276 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1274 pull_request.pull_request_state = PullRequest.STATE_CREATED
1277 pull_request.pull_request_state = PullRequest.STATE_CREATED
1275 Session().add(pull_request)
1278 Session().add(pull_request)
1276 Session().commit()
1279 Session().commit()
1277 pull_request_id = pull_request.pull_request_id
1280 pull_request_id = pull_request.pull_request_id
1278
1281
1279 # target has ancestor - ancestor-new
1282 # target has ancestor - ancestor-new
1280 # source has ancestor - ancestor-new - change-rebased
1283 # source has ancestor - ancestor-new - change-rebased
1281 backend.pull_heads(target, heads=['ancestor-new'])
1284 backend.pull_heads(target, heads=['ancestor-new'])
1282 backend.pull_heads(source, heads=['change-rebased'])
1285 backend.pull_heads(source, heads=['change-rebased'])
1286 target_repo_name = target.repo_name
1283
1287
1284 # update PR
1288 # update PR
1285 self.app.post(
1289 self.app.post(
1286 route_path('pullrequest_update',
1290 route_path('pullrequest_update',
1287 repo_name=target.repo_name, pull_request_id=pull_request_id),
1291 repo_name=target_repo_name, pull_request_id=pull_request_id),
1288 params={'update_commits': 'true', 'csrf_token': csrf_token},
1292 params={'update_commits': 'true', 'csrf_token': csrf_token},
1289 status=200)
1293 status=200)
1290
1294
1291 # Expect the target reference to be updated correctly
1295 # Expect the target reference to be updated correctly
1292 pull_request = PullRequest.get(pull_request_id)
1296 pull_request = PullRequest.get(pull_request_id)
1293 assert pull_request.revisions == [commit_ids['change-rebased']]
1297 assert pull_request.revisions == [commit_ids['change-rebased']]
1294 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1298 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1295 branch=backend.default_branch_name,
1299 branch=backend.default_branch_name,
1296 commit_id=commit_ids['ancestor-new'])
1300 commit_id=commit_ids['ancestor-new'])
1297 assert pull_request.target_ref == expected_target_ref
1301 assert pull_request.target_ref == expected_target_ref
1298
1302
1299 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1303 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1300 branch_name = 'development'
1304 branch_name = 'development'
1301 commits = [
1305 commits = [
1302 {'message': 'initial-commit'},
1306 {'message': 'initial-commit'},
1303 {'message': 'old-feature'},
1307 {'message': 'old-feature'},
1304 {'message': 'new-feature', 'branch': branch_name},
1308 {'message': 'new-feature', 'branch': branch_name},
1305 ]
1309 ]
1306 repo = backend_git.create_repo(commits)
1310 repo = backend_git.create_repo(commits)
1307 repo_name = repo.repo_name
1311 repo_name = repo.repo_name
1308 commit_ids = backend_git.commit_ids
1312 commit_ids = backend_git.commit_ids
1309
1313
1310 pull_request = PullRequest()
1314 pull_request = PullRequest()
1311 pull_request.source_repo = repo
1315 pull_request.source_repo = repo
1312 pull_request.target_repo = repo
1316 pull_request.target_repo = repo
1313 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1317 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1314 branch=branch_name, commit_id=commit_ids['new-feature'])
1318 branch=branch_name, commit_id=commit_ids['new-feature'])
1315 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1319 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1316 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1320 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1317 pull_request.revisions = [commit_ids['new-feature']]
1321 pull_request.revisions = [commit_ids['new-feature']]
1318 pull_request.title = u"Test"
1322 pull_request.title = u"Test"
1319 pull_request.description = u"Description"
1323 pull_request.description = u"Description"
1320 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1324 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1321 pull_request.pull_request_state = PullRequest.STATE_CREATED
1325 pull_request.pull_request_state = PullRequest.STATE_CREATED
1322 Session().add(pull_request)
1326 Session().add(pull_request)
1323 Session().commit()
1327 Session().commit()
1324
1328
1325 pull_request_id = pull_request.pull_request_id
1329 pull_request_id = pull_request.pull_request_id
1326
1330
1327 vcs = repo.scm_instance()
1331 vcs = repo.scm_instance()
1328 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1332 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1329 # NOTE(marcink): run GC to ensure the commits are gone
1333 # NOTE(marcink): run GC to ensure the commits are gone
1330 vcs.run_gc()
1334 vcs.run_gc()
1331
1335
1332 response = self.app.get(route_path(
1336 response = self.app.get(route_path(
1333 'pullrequest_show',
1337 'pullrequest_show',
1334 repo_name=repo_name,
1338 repo_name=repo_name,
1335 pull_request_id=pull_request_id))
1339 pull_request_id=pull_request_id))
1336
1340
1337 assert response.status_int == 200
1341 assert response.status_int == 200
1338
1342
1339 response.assert_response().element_contains(
1343 response.assert_response().element_contains(
1340 '#changeset_compare_view_content .alert strong',
1344 '#changeset_compare_view_content .alert strong',
1341 'Missing commits')
1345 'Missing commits')
1342 response.assert_response().element_contains(
1346 response.assert_response().element_contains(
1343 '#changeset_compare_view_content .alert',
1347 '#changeset_compare_view_content .alert',
1344 'This pull request cannot be displayed, because one or more'
1348 'This pull request cannot be displayed, because one or more'
1345 ' commits no longer exist in the source repository.')
1349 ' commits no longer exist in the source repository.')
1346
1350
1347 def test_strip_commits_from_pull_request(
1351 def test_strip_commits_from_pull_request(
1348 self, backend, pr_util, csrf_token):
1352 self, backend, pr_util, csrf_token):
1349 commits = [
1353 commits = [
1350 {'message': 'initial-commit'},
1354 {'message': 'initial-commit'},
1351 {'message': 'old-feature'},
1355 {'message': 'old-feature'},
1352 {'message': 'new-feature', 'parents': ['initial-commit']},
1356 {'message': 'new-feature', 'parents': ['initial-commit']},
1353 ]
1357 ]
1354 pull_request = pr_util.create_pull_request(
1358 pull_request = pr_util.create_pull_request(
1355 commits, target_head='initial-commit', source_head='new-feature',
1359 commits, target_head='initial-commit', source_head='new-feature',
1356 revisions=['new-feature'])
1360 revisions=['new-feature'])
1357
1361
1358 vcs = pr_util.source_repository.scm_instance()
1362 vcs = pr_util.source_repository.scm_instance()
1359 if backend.alias == 'git':
1363 if backend.alias == 'git':
1360 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1364 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1361 else:
1365 else:
1362 vcs.strip(pr_util.commit_ids['new-feature'])
1366 vcs.strip(pr_util.commit_ids['new-feature'])
1363
1367
1364 response = self.app.get(route_path(
1368 response = self.app.get(route_path(
1365 'pullrequest_show',
1369 'pullrequest_show',
1366 repo_name=pr_util.target_repository.repo_name,
1370 repo_name=pr_util.target_repository.repo_name,
1367 pull_request_id=pull_request.pull_request_id))
1371 pull_request_id=pull_request.pull_request_id))
1368
1372
1369 assert response.status_int == 200
1373 assert response.status_int == 200
1370
1374
1371 response.assert_response().element_contains(
1375 response.assert_response().element_contains(
1372 '#changeset_compare_view_content .alert strong',
1376 '#changeset_compare_view_content .alert strong',
1373 'Missing commits')
1377 'Missing commits')
1374 response.assert_response().element_contains(
1378 response.assert_response().element_contains(
1375 '#changeset_compare_view_content .alert',
1379 '#changeset_compare_view_content .alert',
1376 'This pull request cannot be displayed, because one or more'
1380 'This pull request cannot be displayed, because one or more'
1377 ' commits no longer exist in the source repository.')
1381 ' commits no longer exist in the source repository.')
1378 response.assert_response().element_contains(
1382 response.assert_response().element_contains(
1379 '#update_commits',
1383 '#update_commits',
1380 'Update commits')
1384 'Update commits')
1381
1385
1382 def test_strip_commits_and_update(
1386 def test_strip_commits_and_update(
1383 self, backend, pr_util, csrf_token):
1387 self, backend, pr_util, csrf_token):
1384 commits = [
1388 commits = [
1385 {'message': 'initial-commit'},
1389 {'message': 'initial-commit'},
1386 {'message': 'old-feature'},
1390 {'message': 'old-feature'},
1387 {'message': 'new-feature', 'parents': ['old-feature']},
1391 {'message': 'new-feature', 'parents': ['old-feature']},
1388 ]
1392 ]
1389 pull_request = pr_util.create_pull_request(
1393 pull_request = pr_util.create_pull_request(
1390 commits, target_head='old-feature', source_head='new-feature',
1394 commits, target_head='old-feature', source_head='new-feature',
1391 revisions=['new-feature'], mergeable=True)
1395 revisions=['new-feature'], mergeable=True)
1396 pr_id = pull_request.pull_request_id
1397 target_repo_name = pull_request.target_repo.repo_name
1392
1398
1393 vcs = pr_util.source_repository.scm_instance()
1399 vcs = pr_util.source_repository.scm_instance()
1394 if backend.alias == 'git':
1400 if backend.alias == 'git':
1395 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1401 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1396 else:
1402 else:
1397 vcs.strip(pr_util.commit_ids['new-feature'])
1403 vcs.strip(pr_util.commit_ids['new-feature'])
1398
1404
1399 url = route_path('pullrequest_update',
1405 url = route_path('pullrequest_update',
1400 repo_name=pull_request.target_repo.repo_name,
1406 repo_name=target_repo_name,
1401 pull_request_id=pull_request.pull_request_id)
1407 pull_request_id=pr_id)
1402 response = self.app.post(url,
1408 response = self.app.post(url,
1403 params={'update_commits': 'true',
1409 params={'update_commits': 'true',
1404 'csrf_token': csrf_token})
1410 'csrf_token': csrf_token})
1405
1411
1406 assert response.status_int == 200
1412 assert response.status_int == 200
1407 assert response.body == '{"response": true, "redirect_url": null}'
1413 assert response.body == '{"response": true, "redirect_url": null}'
1408
1414
1409 # Make sure that after update, it won't raise 500 errors
1415 # Make sure that after update, it won't raise 500 errors
1410 response = self.app.get(route_path(
1416 response = self.app.get(route_path(
1411 'pullrequest_show',
1417 'pullrequest_show',
1412 repo_name=pr_util.target_repository.repo_name,
1418 repo_name=target_repo_name,
1413 pull_request_id=pull_request.pull_request_id))
1419 pull_request_id=pr_id))
1414
1420
1415 assert response.status_int == 200
1421 assert response.status_int == 200
1416 response.assert_response().element_contains(
1422 response.assert_response().element_contains(
1417 '#changeset_compare_view_content .alert strong',
1423 '#changeset_compare_view_content .alert strong',
1418 'Missing commits')
1424 'Missing commits')
1419
1425
1420 def test_branch_is_a_link(self, pr_util):
1426 def test_branch_is_a_link(self, pr_util):
1421 pull_request = pr_util.create_pull_request()
1427 pull_request = pr_util.create_pull_request()
1422 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1428 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1423 pull_request.target_ref = 'branch:target:abcdef1234567890'
1429 pull_request.target_ref = 'branch:target:abcdef1234567890'
1424 Session().add(pull_request)
1430 Session().add(pull_request)
1425 Session().commit()
1431 Session().commit()
1426
1432
1427 response = self.app.get(route_path(
1433 response = self.app.get(route_path(
1428 'pullrequest_show',
1434 'pullrequest_show',
1429 repo_name=pull_request.target_repo.scm_instance().name,
1435 repo_name=pull_request.target_repo.scm_instance().name,
1430 pull_request_id=pull_request.pull_request_id))
1436 pull_request_id=pull_request.pull_request_id))
1431 assert response.status_int == 200
1437 assert response.status_int == 200
1432
1438
1433 source = response.assert_response().get_element('.pr-source-info')
1439 source = response.assert_response().get_element('.pr-source-info')
1434 source_parent = source.getparent()
1440 source_parent = source.getparent()
1435 assert len(source_parent) == 1
1441 assert len(source_parent) == 1
1436
1442
1437 target = response.assert_response().get_element('.pr-target-info')
1443 target = response.assert_response().get_element('.pr-target-info')
1438 target_parent = target.getparent()
1444 target_parent = target.getparent()
1439 assert len(target_parent) == 1
1445 assert len(target_parent) == 1
1440
1446
1441 expected_origin_link = route_path(
1447 expected_origin_link = route_path(
1442 'repo_commits',
1448 'repo_commits',
1443 repo_name=pull_request.source_repo.scm_instance().name,
1449 repo_name=pull_request.source_repo.scm_instance().name,
1444 params=dict(branch='origin'))
1450 params=dict(branch='origin'))
1445 expected_target_link = route_path(
1451 expected_target_link = route_path(
1446 'repo_commits',
1452 'repo_commits',
1447 repo_name=pull_request.target_repo.scm_instance().name,
1453 repo_name=pull_request.target_repo.scm_instance().name,
1448 params=dict(branch='target'))
1454 params=dict(branch='target'))
1449 assert source_parent.attrib['href'] == expected_origin_link
1455 assert source_parent.attrib['href'] == expected_origin_link
1450 assert target_parent.attrib['href'] == expected_target_link
1456 assert target_parent.attrib['href'] == expected_target_link
1451
1457
1452 def test_bookmark_is_not_a_link(self, pr_util):
1458 def test_bookmark_is_not_a_link(self, pr_util):
1453 pull_request = pr_util.create_pull_request()
1459 pull_request = pr_util.create_pull_request()
1454 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1460 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1455 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1461 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1456 Session().add(pull_request)
1462 Session().add(pull_request)
1457 Session().commit()
1463 Session().commit()
1458
1464
1459 response = self.app.get(route_path(
1465 response = self.app.get(route_path(
1460 'pullrequest_show',
1466 'pullrequest_show',
1461 repo_name=pull_request.target_repo.scm_instance().name,
1467 repo_name=pull_request.target_repo.scm_instance().name,
1462 pull_request_id=pull_request.pull_request_id))
1468 pull_request_id=pull_request.pull_request_id))
1463 assert response.status_int == 200
1469 assert response.status_int == 200
1464
1470
1465 source = response.assert_response().get_element('.pr-source-info')
1471 source = response.assert_response().get_element('.pr-source-info')
1466 assert source.text.strip() == 'bookmark:origin'
1472 assert source.text.strip() == 'bookmark:origin'
1467 assert source.getparent().attrib.get('href') is None
1473 assert source.getparent().attrib.get('href') is None
1468
1474
1469 target = response.assert_response().get_element('.pr-target-info')
1475 target = response.assert_response().get_element('.pr-target-info')
1470 assert target.text.strip() == 'bookmark:target'
1476 assert target.text.strip() == 'bookmark:target'
1471 assert target.getparent().attrib.get('href') is None
1477 assert target.getparent().attrib.get('href') is None
1472
1478
1473 def test_tag_is_not_a_link(self, pr_util):
1479 def test_tag_is_not_a_link(self, pr_util):
1474 pull_request = pr_util.create_pull_request()
1480 pull_request = pr_util.create_pull_request()
1475 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1481 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1476 pull_request.target_ref = 'tag:target:abcdef1234567890'
1482 pull_request.target_ref = 'tag:target:abcdef1234567890'
1477 Session().add(pull_request)
1483 Session().add(pull_request)
1478 Session().commit()
1484 Session().commit()
1479
1485
1480 response = self.app.get(route_path(
1486 response = self.app.get(route_path(
1481 'pullrequest_show',
1487 'pullrequest_show',
1482 repo_name=pull_request.target_repo.scm_instance().name,
1488 repo_name=pull_request.target_repo.scm_instance().name,
1483 pull_request_id=pull_request.pull_request_id))
1489 pull_request_id=pull_request.pull_request_id))
1484 assert response.status_int == 200
1490 assert response.status_int == 200
1485
1491
1486 source = response.assert_response().get_element('.pr-source-info')
1492 source = response.assert_response().get_element('.pr-source-info')
1487 assert source.text.strip() == 'tag:origin'
1493 assert source.text.strip() == 'tag:origin'
1488 assert source.getparent().attrib.get('href') is None
1494 assert source.getparent().attrib.get('href') is None
1489
1495
1490 target = response.assert_response().get_element('.pr-target-info')
1496 target = response.assert_response().get_element('.pr-target-info')
1491 assert target.text.strip() == 'tag:target'
1497 assert target.text.strip() == 'tag:target'
1492 assert target.getparent().attrib.get('href') is None
1498 assert target.getparent().attrib.get('href') is None
1493
1499
1494 @pytest.mark.parametrize('mergeable', [True, False])
1500 @pytest.mark.parametrize('mergeable', [True, False])
1495 def test_shadow_repository_link(
1501 def test_shadow_repository_link(
1496 self, mergeable, pr_util, http_host_only_stub):
1502 self, mergeable, pr_util, http_host_only_stub):
1497 """
1503 """
1498 Check that the pull request summary page displays a link to the shadow
1504 Check that the pull request summary page displays a link to the shadow
1499 repository if the pull request is mergeable. If it is not mergeable
1505 repository if the pull request is mergeable. If it is not mergeable
1500 the link should not be displayed.
1506 the link should not be displayed.
1501 """
1507 """
1502 pull_request = pr_util.create_pull_request(
1508 pull_request = pr_util.create_pull_request(
1503 mergeable=mergeable, enable_notifications=False)
1509 mergeable=mergeable, enable_notifications=False)
1504 target_repo = pull_request.target_repo.scm_instance()
1510 target_repo = pull_request.target_repo.scm_instance()
1505 pr_id = pull_request.pull_request_id
1511 pr_id = pull_request.pull_request_id
1506 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1512 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1507 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1513 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1508
1514
1509 response = self.app.get(route_path(
1515 response = self.app.get(route_path(
1510 'pullrequest_show',
1516 'pullrequest_show',
1511 repo_name=target_repo.name,
1517 repo_name=target_repo.name,
1512 pull_request_id=pr_id))
1518 pull_request_id=pr_id))
1513
1519
1514 if mergeable:
1520 if mergeable:
1515 response.assert_response().element_value_contains(
1521 response.assert_response().element_value_contains(
1516 'input.pr-mergeinfo', shadow_url)
1522 'input.pr-mergeinfo', shadow_url)
1517 response.assert_response().element_value_contains(
1523 response.assert_response().element_value_contains(
1518 'input.pr-mergeinfo ', 'pr-merge')
1524 'input.pr-mergeinfo ', 'pr-merge')
1519 else:
1525 else:
1520 response.assert_response().no_element_exists('.pr-mergeinfo')
1526 response.assert_response().no_element_exists('.pr-mergeinfo')
1521
1527
1522
1528
1523 @pytest.mark.usefixtures('app')
1529 @pytest.mark.usefixtures('app')
1524 @pytest.mark.backends("git", "hg")
1530 @pytest.mark.backends("git", "hg")
1525 class TestPullrequestsControllerDelete(object):
1531 class TestPullrequestsControllerDelete(object):
1526 def test_pull_request_delete_button_permissions_admin(
1532 def test_pull_request_delete_button_permissions_admin(
1527 self, autologin_user, user_admin, pr_util):
1533 self, autologin_user, user_admin, pr_util):
1528 pull_request = pr_util.create_pull_request(
1534 pull_request = pr_util.create_pull_request(
1529 author=user_admin.username, enable_notifications=False)
1535 author=user_admin.username, enable_notifications=False)
1530
1536
1531 response = self.app.get(route_path(
1537 response = self.app.get(route_path(
1532 'pullrequest_show',
1538 'pullrequest_show',
1533 repo_name=pull_request.target_repo.scm_instance().name,
1539 repo_name=pull_request.target_repo.scm_instance().name,
1534 pull_request_id=pull_request.pull_request_id))
1540 pull_request_id=pull_request.pull_request_id))
1535
1541
1536 response.mustcontain('id="delete_pullrequest"')
1542 response.mustcontain('id="delete_pullrequest"')
1537 response.mustcontain('Confirm to delete this pull request')
1543 response.mustcontain('Confirm to delete this pull request')
1538
1544
1539 def test_pull_request_delete_button_permissions_owner(
1545 def test_pull_request_delete_button_permissions_owner(
1540 self, autologin_regular_user, user_regular, pr_util):
1546 self, autologin_regular_user, user_regular, pr_util):
1541 pull_request = pr_util.create_pull_request(
1547 pull_request = pr_util.create_pull_request(
1542 author=user_regular.username, enable_notifications=False)
1548 author=user_regular.username, enable_notifications=False)
1543
1549
1544 response = self.app.get(route_path(
1550 response = self.app.get(route_path(
1545 'pullrequest_show',
1551 'pullrequest_show',
1546 repo_name=pull_request.target_repo.scm_instance().name,
1552 repo_name=pull_request.target_repo.scm_instance().name,
1547 pull_request_id=pull_request.pull_request_id))
1553 pull_request_id=pull_request.pull_request_id))
1548
1554
1549 response.mustcontain('id="delete_pullrequest"')
1555 response.mustcontain('id="delete_pullrequest"')
1550 response.mustcontain('Confirm to delete this pull request')
1556 response.mustcontain('Confirm to delete this pull request')
1551
1557
1552 def test_pull_request_delete_button_permissions_forbidden(
1558 def test_pull_request_delete_button_permissions_forbidden(
1553 self, autologin_regular_user, user_regular, user_admin, pr_util):
1559 self, autologin_regular_user, user_regular, user_admin, pr_util):
1554 pull_request = pr_util.create_pull_request(
1560 pull_request = pr_util.create_pull_request(
1555 author=user_admin.username, enable_notifications=False)
1561 author=user_admin.username, enable_notifications=False)
1556
1562
1557 response = self.app.get(route_path(
1563 response = self.app.get(route_path(
1558 'pullrequest_show',
1564 'pullrequest_show',
1559 repo_name=pull_request.target_repo.scm_instance().name,
1565 repo_name=pull_request.target_repo.scm_instance().name,
1560 pull_request_id=pull_request.pull_request_id))
1566 pull_request_id=pull_request.pull_request_id))
1561 response.mustcontain(no=['id="delete_pullrequest"'])
1567 response.mustcontain(no=['id="delete_pullrequest"'])
1562 response.mustcontain(no=['Confirm to delete this pull request'])
1568 response.mustcontain(no=['Confirm to delete this pull request'])
1563
1569
1564 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1570 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1565 self, autologin_regular_user, user_regular, user_admin, pr_util,
1571 self, autologin_regular_user, user_regular, user_admin, pr_util,
1566 user_util):
1572 user_util):
1567
1573
1568 pull_request = pr_util.create_pull_request(
1574 pull_request = pr_util.create_pull_request(
1569 author=user_admin.username, enable_notifications=False)
1575 author=user_admin.username, enable_notifications=False)
1570
1576
1571 user_util.grant_user_permission_to_repo(
1577 user_util.grant_user_permission_to_repo(
1572 pull_request.target_repo, user_regular,
1578 pull_request.target_repo, user_regular,
1573 'repository.write')
1579 'repository.write')
1574
1580
1575 response = self.app.get(route_path(
1581 response = self.app.get(route_path(
1576 'pullrequest_show',
1582 'pullrequest_show',
1577 repo_name=pull_request.target_repo.scm_instance().name,
1583 repo_name=pull_request.target_repo.scm_instance().name,
1578 pull_request_id=pull_request.pull_request_id))
1584 pull_request_id=pull_request.pull_request_id))
1579
1585
1580 response.mustcontain('id="open_edit_pullrequest"')
1586 response.mustcontain('id="open_edit_pullrequest"')
1581 response.mustcontain('id="delete_pullrequest"')
1587 response.mustcontain('id="delete_pullrequest"')
1582 response.mustcontain(no=['Confirm to delete this pull request'])
1588 response.mustcontain(no=['Confirm to delete this pull request'])
1583
1589
1584 def test_delete_comment_returns_404_if_comment_does_not_exist(
1590 def test_delete_comment_returns_404_if_comment_does_not_exist(
1585 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1591 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1586
1592
1587 pull_request = pr_util.create_pull_request(
1593 pull_request = pr_util.create_pull_request(
1588 author=user_admin.username, enable_notifications=False)
1594 author=user_admin.username, enable_notifications=False)
1589
1595
1590 self.app.post(
1596 self.app.post(
1591 route_path(
1597 route_path(
1592 'pullrequest_comment_delete',
1598 'pullrequest_comment_delete',
1593 repo_name=pull_request.target_repo.scm_instance().name,
1599 repo_name=pull_request.target_repo.scm_instance().name,
1594 pull_request_id=pull_request.pull_request_id,
1600 pull_request_id=pull_request.pull_request_id,
1595 comment_id=1024404),
1601 comment_id=1024404),
1596 extra_environ=xhr_header,
1602 extra_environ=xhr_header,
1597 params={'csrf_token': csrf_token},
1603 params={'csrf_token': csrf_token},
1598 status=404
1604 status=404
1599 )
1605 )
1600
1606
1601 def test_delete_comment(
1607 def test_delete_comment(
1602 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1608 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1603
1609
1604 pull_request = pr_util.create_pull_request(
1610 pull_request = pr_util.create_pull_request(
1605 author=user_admin.username, enable_notifications=False)
1611 author=user_admin.username, enable_notifications=False)
1606 comment = pr_util.create_comment()
1612 comment = pr_util.create_comment()
1607 comment_id = comment.comment_id
1613 comment_id = comment.comment_id
1608
1614
1609 response = self.app.post(
1615 response = self.app.post(
1610 route_path(
1616 route_path(
1611 'pullrequest_comment_delete',
1617 'pullrequest_comment_delete',
1612 repo_name=pull_request.target_repo.scm_instance().name,
1618 repo_name=pull_request.target_repo.scm_instance().name,
1613 pull_request_id=pull_request.pull_request_id,
1619 pull_request_id=pull_request.pull_request_id,
1614 comment_id=comment_id),
1620 comment_id=comment_id),
1615 extra_environ=xhr_header,
1621 extra_environ=xhr_header,
1616 params={'csrf_token': csrf_token},
1622 params={'csrf_token': csrf_token},
1617 status=200
1623 status=200
1618 )
1624 )
1619 assert response.body == 'true'
1625 assert response.body == 'true'
1620
1626
1621 @pytest.mark.parametrize('url_type', [
1627 @pytest.mark.parametrize('url_type', [
1622 'pullrequest_new',
1628 'pullrequest_new',
1623 'pullrequest_create',
1629 'pullrequest_create',
1624 'pullrequest_update',
1630 'pullrequest_update',
1625 'pullrequest_merge',
1631 'pullrequest_merge',
1626 ])
1632 ])
1627 def test_pull_request_is_forbidden_on_archived_repo(
1633 def test_pull_request_is_forbidden_on_archived_repo(
1628 self, autologin_user, backend, xhr_header, user_util, url_type):
1634 self, autologin_user, backend, xhr_header, user_util, url_type):
1629
1635
1630 # create a temporary repo
1636 # create a temporary repo
1631 source = user_util.create_repo(repo_type=backend.alias)
1637 source = user_util.create_repo(repo_type=backend.alias)
1632 repo_name = source.repo_name
1638 repo_name = source.repo_name
1633 repo = Repository.get_by_repo_name(repo_name)
1639 repo = Repository.get_by_repo_name(repo_name)
1634 repo.archived = True
1640 repo.archived = True
1635 Session().commit()
1641 Session().commit()
1636
1642
1637 response = self.app.get(
1643 response = self.app.get(
1638 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1644 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1639
1645
1640 msg = 'Action not supported for archived repository.'
1646 msg = 'Action not supported for archived repository.'
1641 assert_session_flash(response, msg)
1647 assert_session_flash(response, msg)
1642
1648
1643
1649
1644 def assert_pull_request_status(pull_request, expected_status):
1650 def assert_pull_request_status(pull_request, expected_status):
1645 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1651 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1646 assert status == expected_status
1652 assert status == expected_status
1647
1653
1648
1654
1649 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1655 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1650 @pytest.mark.usefixtures("autologin_user")
1656 @pytest.mark.usefixtures("autologin_user")
1651 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1657 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1652 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1658 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,723 +1,781 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
22 import logging
21 import logging
22 import collections
23
23
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps._base import RepoAppView
31 from rhodecode.apps.file_store import utils as store_utils
31 from rhodecode.apps.file_store import utils as store_utils
32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
33
33
34 from rhodecode.lib import diffs, codeblocks
34 from rhodecode.lib import diffs, codeblocks
35 from rhodecode.lib.auth import (
35 from rhodecode.lib.auth import (
36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
37
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.diffs import (
39 from rhodecode.lib.diffs import (
40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
41 get_diff_whitespace_flag)
41 get_diff_whitespace_flag)
42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
43 import rhodecode.lib.helpers as h
43 import rhodecode.lib.helpers as h
44 from rhodecode.lib.utils2 import safe_unicode, str2bool
44 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict
45 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.backends.base import EmptyCommit
46 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
47 RepositoryError, CommitDoesNotExistError)
47 RepositoryError, CommitDoesNotExistError)
48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
49 ChangesetCommentHistory
49 ChangesetCommentHistory
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def _update_with_GET(params, request):
58 def _update_with_GET(params, request):
59 for k in ['diff1', 'diff2', 'diff']:
59 for k in ['diff1', 'diff2', 'diff']:
60 params[k] += request.GET.getall(k)
60 params[k] += request.GET.getall(k)
61
61
62
62
63 class RepoCommitsView(RepoAppView):
63 class RepoCommitsView(RepoAppView):
64 def load_default_context(self):
64 def load_default_context(self):
65 c = self._get_local_tmpl_context(include_app_defaults=True)
65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 c.rhodecode_repo = self.rhodecode_vcs_repo
66 c.rhodecode_repo = self.rhodecode_vcs_repo
67
67
68 return c
68 return c
69
69
70 def _is_diff_cache_enabled(self, target_repo):
70 def _is_diff_cache_enabled(self, target_repo):
71 caching_enabled = self._get_general_setting(
71 caching_enabled = self._get_general_setting(
72 target_repo, 'rhodecode_diff_cache')
72 target_repo, 'rhodecode_diff_cache')
73 log.debug('Diff caching enabled: %s', caching_enabled)
73 log.debug('Diff caching enabled: %s', caching_enabled)
74 return caching_enabled
74 return caching_enabled
75
75
76 def _commit(self, commit_id_range, method):
76 def _commit(self, commit_id_range, method):
77 _ = self.request.translate
77 _ = self.request.translate
78 c = self.load_default_context()
78 c = self.load_default_context()
79 c.fulldiff = self.request.GET.get('fulldiff')
79 c.fulldiff = self.request.GET.get('fulldiff')
80
80
81 # fetch global flags of ignore ws or context lines
81 # fetch global flags of ignore ws or context lines
82 diff_context = get_diff_context(self.request)
82 diff_context = get_diff_context(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84
84
85 # diff_limit will cut off the whole diff if the limit is applied
85 # diff_limit will cut off the whole diff if the limit is applied
86 # otherwise it will just hide the big files from the front-end
86 # otherwise it will just hide the big files from the front-end
87 diff_limit = c.visual.cut_off_limit_diff
87 diff_limit = c.visual.cut_off_limit_diff
88 file_limit = c.visual.cut_off_limit_file
88 file_limit = c.visual.cut_off_limit_file
89
89
90 # get ranges of commit ids if preset
90 # get ranges of commit ids if preset
91 commit_range = commit_id_range.split('...')[:2]
91 commit_range = commit_id_range.split('...')[:2]
92
92
93 try:
93 try:
94 pre_load = ['affected_files', 'author', 'branch', 'date',
94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 'message', 'parents']
95 'message', 'parents']
96 if self.rhodecode_vcs_repo.alias == 'hg':
96 if self.rhodecode_vcs_repo.alias == 'hg':
97 pre_load += ['hidden', 'obsolete', 'phase']
97 pre_load += ['hidden', 'obsolete', 'phase']
98
98
99 if len(commit_range) == 2:
99 if len(commit_range) == 2:
100 commits = self.rhodecode_vcs_repo.get_commits(
100 commits = self.rhodecode_vcs_repo.get_commits(
101 start_id=commit_range[0], end_id=commit_range[1],
101 start_id=commit_range[0], end_id=commit_range[1],
102 pre_load=pre_load, translate_tags=False)
102 pre_load=pre_load, translate_tags=False)
103 commits = list(commits)
103 commits = list(commits)
104 else:
104 else:
105 commits = [self.rhodecode_vcs_repo.get_commit(
105 commits = [self.rhodecode_vcs_repo.get_commit(
106 commit_id=commit_id_range, pre_load=pre_load)]
106 commit_id=commit_id_range, pre_load=pre_load)]
107
107
108 c.commit_ranges = commits
108 c.commit_ranges = commits
109 if not c.commit_ranges:
109 if not c.commit_ranges:
110 raise RepositoryError('The commit range returned an empty result')
110 raise RepositoryError('The commit range returned an empty result')
111 except CommitDoesNotExistError as e:
111 except CommitDoesNotExistError as e:
112 msg = _('No such commit exists. Org exception: `{}`').format(e)
112 msg = _('No such commit exists. Org exception: `{}`').format(e)
113 h.flash(msg, category='error')
113 h.flash(msg, category='error')
114 raise HTTPNotFound()
114 raise HTTPNotFound()
115 except Exception:
115 except Exception:
116 log.exception("General failure")
116 log.exception("General failure")
117 raise HTTPNotFound()
117 raise HTTPNotFound()
118 single_commit = len(c.commit_ranges) == 1
118
119
119 c.changes = OrderedDict()
120 c.changes = OrderedDict()
120 c.lines_added = 0
121 c.lines_added = 0
121 c.lines_deleted = 0
122 c.lines_deleted = 0
122
123
123 # auto collapse if we have more than limit
124 # auto collapse if we have more than limit
124 collapse_limit = diffs.DiffProcessor._collapse_commits_over
125 collapse_limit = diffs.DiffProcessor._collapse_commits_over
125 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
126 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
126
127
127 c.commit_statuses = ChangesetStatus.STATUSES
128 c.commit_statuses = ChangesetStatus.STATUSES
128 c.inline_comments = []
129 c.inline_comments = []
129 c.files = []
130 c.files = []
130
131
131 c.statuses = []
132 c.comments = []
132 c.comments = []
133 c.unresolved_comments = []
133 c.unresolved_comments = []
134 c.resolved_comments = []
134 c.resolved_comments = []
135 if len(c.commit_ranges) == 1:
135
136 # Single commit
137 if single_commit:
136 commit = c.commit_ranges[0]
138 commit = c.commit_ranges[0]
137 c.comments = CommentsModel().get_comments(
139 c.comments = CommentsModel().get_comments(
138 self.db_repo.repo_id,
140 self.db_repo.repo_id,
139 revision=commit.raw_id)
141 revision=commit.raw_id)
140 c.statuses.append(ChangesetStatusModel().get_status(
142
141 self.db_repo.repo_id, commit.raw_id))
142 # comments from PR
143 # comments from PR
143 statuses = ChangesetStatusModel().get_statuses(
144 statuses = ChangesetStatusModel().get_statuses(
144 self.db_repo.repo_id, commit.raw_id,
145 self.db_repo.repo_id, commit.raw_id,
145 with_revisions=True)
146 with_revisions=True)
146 prs = set(st.pull_request for st in statuses
147
147 if st.pull_request is not None)
148 prs = set()
149 reviewers = list()
150 reviewers_duplicates = set() # to not have duplicates from multiple votes
151 for c_status in statuses:
152
153 # extract associated pull-requests from votes
154 if c_status.pull_request:
155 prs.add(c_status.pull_request)
156
157 # extract reviewers
158 _user_id = c_status.author.user_id
159 if _user_id not in reviewers_duplicates:
160 reviewers.append(
161 StrictAttributeDict({
162 'user': c_status.author,
163
164 # fake attributed for commit, page that we don't have
165 # but we share the display with PR page
166 'mandatory': False,
167 'reasons': [],
168 'rule_user_group_data': lambda: None
169 })
170 )
171 reviewers_duplicates.add(_user_id)
172
173 c.allowed_reviewers = reviewers
148 # from associated statuses, check the pull requests, and
174 # from associated statuses, check the pull requests, and
149 # show comments from them
175 # show comments from them
150 for pr in prs:
176 for pr in prs:
151 c.comments.extend(pr.comments)
177 c.comments.extend(pr.comments)
152
178
153 c.unresolved_comments = CommentsModel()\
179 c.unresolved_comments = CommentsModel()\
154 .get_commit_unresolved_todos(commit.raw_id)
180 .get_commit_unresolved_todos(commit.raw_id)
155 c.resolved_comments = CommentsModel()\
181 c.resolved_comments = CommentsModel()\
156 .get_commit_resolved_todos(commit.raw_id)
182 .get_commit_resolved_todos(commit.raw_id)
157
183
184 c.inline_comments_flat = CommentsModel()\
185 .get_commit_inline_comments(commit.raw_id)
186
187 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
188 statuses, reviewers)
189
190 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
191
192 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
193
194 for review_obj, member, reasons, mandatory, status in review_statuses:
195 member_reviewer = h.reviewer_as_json(
196 member, reasons=reasons, mandatory=mandatory,
197 user_group=None
198 )
199
200 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
201 member_reviewer['review_status'] = current_review_status
202 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
203 member_reviewer['allowed_to_update'] = False
204 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
205
206 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
207
208 # NOTE(marcink): this uses the same voting logic as in pull-requests
209 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
210 c.commit_broadcast_channel = u'/repo${}$/commit/{}'.format(
211 c.repo_name,
212 commit.raw_id
213 )
214
158 diff = None
215 diff = None
159 # Iterate over ranges (default commit view is always one commit)
216 # Iterate over ranges (default commit view is always one commit)
160 for commit in c.commit_ranges:
217 for commit in c.commit_ranges:
161 c.changes[commit.raw_id] = []
218 c.changes[commit.raw_id] = []
162
219
163 commit2 = commit
220 commit2 = commit
164 commit1 = commit.first_parent
221 commit1 = commit.first_parent
165
222
166 if method == 'show':
223 if method == 'show':
167 inline_comments = CommentsModel().get_inline_comments(
224 inline_comments = CommentsModel().get_inline_comments(
168 self.db_repo.repo_id, revision=commit.raw_id)
225 self.db_repo.repo_id, revision=commit.raw_id)
169 c.inline_cnt = CommentsModel().get_inline_comments_count(
226 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
170 inline_comments)
227 inline_comments))
171 c.inline_comments = inline_comments
228 c.inline_comments = inline_comments
172
229
173 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
230 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
174 self.db_repo)
231 self.db_repo)
175 cache_file_path = diff_cache_exist(
232 cache_file_path = diff_cache_exist(
176 cache_path, 'diff', commit.raw_id,
233 cache_path, 'diff', commit.raw_id,
177 hide_whitespace_changes, diff_context, c.fulldiff)
234 hide_whitespace_changes, diff_context, c.fulldiff)
178
235
179 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
236 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
180 force_recache = str2bool(self.request.GET.get('force_recache'))
237 force_recache = str2bool(self.request.GET.get('force_recache'))
181
238
182 cached_diff = None
239 cached_diff = None
183 if caching_enabled:
240 if caching_enabled:
184 cached_diff = load_cached_diff(cache_file_path)
241 cached_diff = load_cached_diff(cache_file_path)
185
242
186 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
243 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
187 if not force_recache and has_proper_diff_cache:
244 if not force_recache and has_proper_diff_cache:
188 diffset = cached_diff['diff']
245 diffset = cached_diff['diff']
189 else:
246 else:
190 vcs_diff = self.rhodecode_vcs_repo.get_diff(
247 vcs_diff = self.rhodecode_vcs_repo.get_diff(
191 commit1, commit2,
248 commit1, commit2,
192 ignore_whitespace=hide_whitespace_changes,
249 ignore_whitespace=hide_whitespace_changes,
193 context=diff_context)
250 context=diff_context)
194
251
195 diff_processor = diffs.DiffProcessor(
252 diff_processor = diffs.DiffProcessor(
196 vcs_diff, format='newdiff', diff_limit=diff_limit,
253 vcs_diff, format='newdiff', diff_limit=diff_limit,
197 file_limit=file_limit, show_full_diff=c.fulldiff)
254 file_limit=file_limit, show_full_diff=c.fulldiff)
198
255
199 _parsed = diff_processor.prepare()
256 _parsed = diff_processor.prepare()
200
257
201 diffset = codeblocks.DiffSet(
258 diffset = codeblocks.DiffSet(
202 repo_name=self.db_repo_name,
259 repo_name=self.db_repo_name,
203 source_node_getter=codeblocks.diffset_node_getter(commit1),
260 source_node_getter=codeblocks.diffset_node_getter(commit1),
204 target_node_getter=codeblocks.diffset_node_getter(commit2))
261 target_node_getter=codeblocks.diffset_node_getter(commit2))
205
262
206 diffset = self.path_filter.render_patchset_filtered(
263 diffset = self.path_filter.render_patchset_filtered(
207 diffset, _parsed, commit1.raw_id, commit2.raw_id)
264 diffset, _parsed, commit1.raw_id, commit2.raw_id)
208
265
209 # save cached diff
266 # save cached diff
210 if caching_enabled:
267 if caching_enabled:
211 cache_diff(cache_file_path, diffset, None)
268 cache_diff(cache_file_path, diffset, None)
212
269
213 c.limited_diff = diffset.limited_diff
270 c.limited_diff = diffset.limited_diff
214 c.changes[commit.raw_id] = diffset
271 c.changes[commit.raw_id] = diffset
215 else:
272 else:
216 # TODO(marcink): no cache usage here...
273 # TODO(marcink): no cache usage here...
217 _diff = self.rhodecode_vcs_repo.get_diff(
274 _diff = self.rhodecode_vcs_repo.get_diff(
218 commit1, commit2,
275 commit1, commit2,
219 ignore_whitespace=hide_whitespace_changes, context=diff_context)
276 ignore_whitespace=hide_whitespace_changes, context=diff_context)
220 diff_processor = diffs.DiffProcessor(
277 diff_processor = diffs.DiffProcessor(
221 _diff, format='newdiff', diff_limit=diff_limit,
278 _diff, format='newdiff', diff_limit=diff_limit,
222 file_limit=file_limit, show_full_diff=c.fulldiff)
279 file_limit=file_limit, show_full_diff=c.fulldiff)
223 # downloads/raw we only need RAW diff nothing else
280 # downloads/raw we only need RAW diff nothing else
224 diff = self.path_filter.get_raw_patch(diff_processor)
281 diff = self.path_filter.get_raw_patch(diff_processor)
225 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
282 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
226
283
227 # sort comments by how they were generated
284 # sort comments by how they were generated
228 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
285 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
286 c.at_version_num = None
229
287
230 if len(c.commit_ranges) == 1:
288 if len(c.commit_ranges) == 1:
231 c.commit = c.commit_ranges[0]
289 c.commit = c.commit_ranges[0]
232 c.parent_tmpl = ''.join(
290 c.parent_tmpl = ''.join(
233 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
291 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
234
292
235 if method == 'download':
293 if method == 'download':
236 response = Response(diff)
294 response = Response(diff)
237 response.content_type = 'text/plain'
295 response.content_type = 'text/plain'
238 response.content_disposition = (
296 response.content_disposition = (
239 'attachment; filename=%s.diff' % commit_id_range[:12])
297 'attachment; filename=%s.diff' % commit_id_range[:12])
240 return response
298 return response
241 elif method == 'patch':
299 elif method == 'patch':
242 c.diff = safe_unicode(diff)
300 c.diff = safe_unicode(diff)
243 patch = render(
301 patch = render(
244 'rhodecode:templates/changeset/patch_changeset.mako',
302 'rhodecode:templates/changeset/patch_changeset.mako',
245 self._get_template_context(c), self.request)
303 self._get_template_context(c), self.request)
246 response = Response(patch)
304 response = Response(patch)
247 response.content_type = 'text/plain'
305 response.content_type = 'text/plain'
248 return response
306 return response
249 elif method == 'raw':
307 elif method == 'raw':
250 response = Response(diff)
308 response = Response(diff)
251 response.content_type = 'text/plain'
309 response.content_type = 'text/plain'
252 return response
310 return response
253 elif method == 'show':
311 elif method == 'show':
254 if len(c.commit_ranges) == 1:
312 if len(c.commit_ranges) == 1:
255 html = render(
313 html = render(
256 'rhodecode:templates/changeset/changeset.mako',
314 'rhodecode:templates/changeset/changeset.mako',
257 self._get_template_context(c), self.request)
315 self._get_template_context(c), self.request)
258 return Response(html)
316 return Response(html)
259 else:
317 else:
260 c.ancestor = None
318 c.ancestor = None
261 c.target_repo = self.db_repo
319 c.target_repo = self.db_repo
262 html = render(
320 html = render(
263 'rhodecode:templates/changeset/changeset_range.mako',
321 'rhodecode:templates/changeset/changeset_range.mako',
264 self._get_template_context(c), self.request)
322 self._get_template_context(c), self.request)
265 return Response(html)
323 return Response(html)
266
324
267 raise HTTPBadRequest()
325 raise HTTPBadRequest()
268
326
269 @LoginRequired()
327 @LoginRequired()
270 @HasRepoPermissionAnyDecorator(
328 @HasRepoPermissionAnyDecorator(
271 'repository.read', 'repository.write', 'repository.admin')
329 'repository.read', 'repository.write', 'repository.admin')
272 @view_config(
330 @view_config(
273 route_name='repo_commit', request_method='GET',
331 route_name='repo_commit', request_method='GET',
274 renderer=None)
332 renderer=None)
275 def repo_commit_show(self):
333 def repo_commit_show(self):
276 commit_id = self.request.matchdict['commit_id']
334 commit_id = self.request.matchdict['commit_id']
277 return self._commit(commit_id, method='show')
335 return self._commit(commit_id, method='show')
278
336
279 @LoginRequired()
337 @LoginRequired()
280 @HasRepoPermissionAnyDecorator(
338 @HasRepoPermissionAnyDecorator(
281 'repository.read', 'repository.write', 'repository.admin')
339 'repository.read', 'repository.write', 'repository.admin')
282 @view_config(
340 @view_config(
283 route_name='repo_commit_raw', request_method='GET',
341 route_name='repo_commit_raw', request_method='GET',
284 renderer=None)
342 renderer=None)
285 @view_config(
343 @view_config(
286 route_name='repo_commit_raw_deprecated', request_method='GET',
344 route_name='repo_commit_raw_deprecated', request_method='GET',
287 renderer=None)
345 renderer=None)
288 def repo_commit_raw(self):
346 def repo_commit_raw(self):
289 commit_id = self.request.matchdict['commit_id']
347 commit_id = self.request.matchdict['commit_id']
290 return self._commit(commit_id, method='raw')
348 return self._commit(commit_id, method='raw')
291
349
292 @LoginRequired()
350 @LoginRequired()
293 @HasRepoPermissionAnyDecorator(
351 @HasRepoPermissionAnyDecorator(
294 'repository.read', 'repository.write', 'repository.admin')
352 'repository.read', 'repository.write', 'repository.admin')
295 @view_config(
353 @view_config(
296 route_name='repo_commit_patch', request_method='GET',
354 route_name='repo_commit_patch', request_method='GET',
297 renderer=None)
355 renderer=None)
298 def repo_commit_patch(self):
356 def repo_commit_patch(self):
299 commit_id = self.request.matchdict['commit_id']
357 commit_id = self.request.matchdict['commit_id']
300 return self._commit(commit_id, method='patch')
358 return self._commit(commit_id, method='patch')
301
359
302 @LoginRequired()
360 @LoginRequired()
303 @HasRepoPermissionAnyDecorator(
361 @HasRepoPermissionAnyDecorator(
304 'repository.read', 'repository.write', 'repository.admin')
362 'repository.read', 'repository.write', 'repository.admin')
305 @view_config(
363 @view_config(
306 route_name='repo_commit_download', request_method='GET',
364 route_name='repo_commit_download', request_method='GET',
307 renderer=None)
365 renderer=None)
308 def repo_commit_download(self):
366 def repo_commit_download(self):
309 commit_id = self.request.matchdict['commit_id']
367 commit_id = self.request.matchdict['commit_id']
310 return self._commit(commit_id, method='download')
368 return self._commit(commit_id, method='download')
311
369
312 @LoginRequired()
370 @LoginRequired()
313 @NotAnonymous()
371 @NotAnonymous()
314 @HasRepoPermissionAnyDecorator(
372 @HasRepoPermissionAnyDecorator(
315 'repository.read', 'repository.write', 'repository.admin')
373 'repository.read', 'repository.write', 'repository.admin')
316 @CSRFRequired()
374 @CSRFRequired()
317 @view_config(
375 @view_config(
318 route_name='repo_commit_comment_create', request_method='POST',
376 route_name='repo_commit_comment_create', request_method='POST',
319 renderer='json_ext')
377 renderer='json_ext')
320 def repo_commit_comment_create(self):
378 def repo_commit_comment_create(self):
321 _ = self.request.translate
379 _ = self.request.translate
322 commit_id = self.request.matchdict['commit_id']
380 commit_id = self.request.matchdict['commit_id']
323
381
324 c = self.load_default_context()
382 c = self.load_default_context()
325 status = self.request.POST.get('changeset_status', None)
383 status = self.request.POST.get('changeset_status', None)
326 text = self.request.POST.get('text')
384 text = self.request.POST.get('text')
327 comment_type = self.request.POST.get('comment_type')
385 comment_type = self.request.POST.get('comment_type')
328 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
386 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
329
387
330 if status:
388 if status:
331 text = text or (_('Status change %(transition_icon)s %(status)s')
389 text = text or (_('Status change %(transition_icon)s %(status)s')
332 % {'transition_icon': '>',
390 % {'transition_icon': '>',
333 'status': ChangesetStatus.get_status_lbl(status)})
391 'status': ChangesetStatus.get_status_lbl(status)})
334
392
335 multi_commit_ids = []
393 multi_commit_ids = []
336 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
394 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
337 if _commit_id not in ['', None, EmptyCommit.raw_id]:
395 if _commit_id not in ['', None, EmptyCommit.raw_id]:
338 if _commit_id not in multi_commit_ids:
396 if _commit_id not in multi_commit_ids:
339 multi_commit_ids.append(_commit_id)
397 multi_commit_ids.append(_commit_id)
340
398
341 commit_ids = multi_commit_ids or [commit_id]
399 commit_ids = multi_commit_ids or [commit_id]
342
400
343 comment = None
401 comment = None
344 for current_id in filter(None, commit_ids):
402 for current_id in filter(None, commit_ids):
345 comment = CommentsModel().create(
403 comment = CommentsModel().create(
346 text=text,
404 text=text,
347 repo=self.db_repo.repo_id,
405 repo=self.db_repo.repo_id,
348 user=self._rhodecode_db_user.user_id,
406 user=self._rhodecode_db_user.user_id,
349 commit_id=current_id,
407 commit_id=current_id,
350 f_path=self.request.POST.get('f_path'),
408 f_path=self.request.POST.get('f_path'),
351 line_no=self.request.POST.get('line'),
409 line_no=self.request.POST.get('line'),
352 status_change=(ChangesetStatus.get_status_lbl(status)
410 status_change=(ChangesetStatus.get_status_lbl(status)
353 if status else None),
411 if status else None),
354 status_change_type=status,
412 status_change_type=status,
355 comment_type=comment_type,
413 comment_type=comment_type,
356 resolves_comment_id=resolves_comment_id,
414 resolves_comment_id=resolves_comment_id,
357 auth_user=self._rhodecode_user
415 auth_user=self._rhodecode_user
358 )
416 )
359
417
360 # get status if set !
418 # get status if set !
361 if status:
419 if status:
362 # if latest status was from pull request and it's closed
420 # if latest status was from pull request and it's closed
363 # disallow changing status !
421 # disallow changing status !
364 # dont_allow_on_closed_pull_request = True !
422 # dont_allow_on_closed_pull_request = True !
365
423
366 try:
424 try:
367 ChangesetStatusModel().set_status(
425 ChangesetStatusModel().set_status(
368 self.db_repo.repo_id,
426 self.db_repo.repo_id,
369 status,
427 status,
370 self._rhodecode_db_user.user_id,
428 self._rhodecode_db_user.user_id,
371 comment,
429 comment,
372 revision=current_id,
430 revision=current_id,
373 dont_allow_on_closed_pull_request=True
431 dont_allow_on_closed_pull_request=True
374 )
432 )
375 except StatusChangeOnClosedPullRequestError:
433 except StatusChangeOnClosedPullRequestError:
376 msg = _('Changing the status of a commit associated with '
434 msg = _('Changing the status of a commit associated with '
377 'a closed pull request is not allowed')
435 'a closed pull request is not allowed')
378 log.exception(msg)
436 log.exception(msg)
379 h.flash(msg, category='warning')
437 h.flash(msg, category='warning')
380 raise HTTPFound(h.route_path(
438 raise HTTPFound(h.route_path(
381 'repo_commit', repo_name=self.db_repo_name,
439 'repo_commit', repo_name=self.db_repo_name,
382 commit_id=current_id))
440 commit_id=current_id))
383
441
384 commit = self.db_repo.get_commit(current_id)
442 commit = self.db_repo.get_commit(current_id)
385 CommentsModel().trigger_commit_comment_hook(
443 CommentsModel().trigger_commit_comment_hook(
386 self.db_repo, self._rhodecode_user, 'create',
444 self.db_repo, self._rhodecode_user, 'create',
387 data={'comment': comment, 'commit': commit})
445 data={'comment': comment, 'commit': commit})
388
446
389 # finalize, commit and redirect
447 # finalize, commit and redirect
390 Session().commit()
448 Session().commit()
391
449
392 data = {
450 data = {
393 'target_id': h.safeid(h.safe_unicode(
451 'target_id': h.safeid(h.safe_unicode(
394 self.request.POST.get('f_path'))),
452 self.request.POST.get('f_path'))),
395 }
453 }
396 if comment:
454 if comment:
397 c.co = comment
455 c.co = comment
456 c.at_version_num = 0
398 rendered_comment = render(
457 rendered_comment = render(
399 'rhodecode:templates/changeset/changeset_comment_block.mako',
458 'rhodecode:templates/changeset/changeset_comment_block.mako',
400 self._get_template_context(c), self.request)
459 self._get_template_context(c), self.request)
401
460
402 data.update(comment.get_dict())
461 data.update(comment.get_dict())
403 data.update({'rendered_text': rendered_comment})
462 data.update({'rendered_text': rendered_comment})
404
463
405 return data
464 return data
406
465
407 @LoginRequired()
466 @LoginRequired()
408 @NotAnonymous()
467 @NotAnonymous()
409 @HasRepoPermissionAnyDecorator(
468 @HasRepoPermissionAnyDecorator(
410 'repository.read', 'repository.write', 'repository.admin')
469 'repository.read', 'repository.write', 'repository.admin')
411 @CSRFRequired()
470 @CSRFRequired()
412 @view_config(
471 @view_config(
413 route_name='repo_commit_comment_preview', request_method='POST',
472 route_name='repo_commit_comment_preview', request_method='POST',
414 renderer='string', xhr=True)
473 renderer='string', xhr=True)
415 def repo_commit_comment_preview(self):
474 def repo_commit_comment_preview(self):
416 # Technically a CSRF token is not needed as no state changes with this
475 # Technically a CSRF token is not needed as no state changes with this
417 # call. However, as this is a POST is better to have it, so automated
476 # call. However, as this is a POST is better to have it, so automated
418 # tools don't flag it as potential CSRF.
477 # tools don't flag it as potential CSRF.
419 # Post is required because the payload could be bigger than the maximum
478 # Post is required because the payload could be bigger than the maximum
420 # allowed by GET.
479 # allowed by GET.
421
480
422 text = self.request.POST.get('text')
481 text = self.request.POST.get('text')
423 renderer = self.request.POST.get('renderer') or 'rst'
482 renderer = self.request.POST.get('renderer') or 'rst'
424 if text:
483 if text:
425 return h.render(text, renderer=renderer, mentions=True,
484 return h.render(text, renderer=renderer, mentions=True,
426 repo_name=self.db_repo_name)
485 repo_name=self.db_repo_name)
427 return ''
486 return ''
428
487
429 @LoginRequired()
488 @LoginRequired()
430 @NotAnonymous()
431 @HasRepoPermissionAnyDecorator(
489 @HasRepoPermissionAnyDecorator(
432 'repository.read', 'repository.write', 'repository.admin')
490 'repository.read', 'repository.write', 'repository.admin')
433 @CSRFRequired()
491 @CSRFRequired()
434 @view_config(
492 @view_config(
435 route_name='repo_commit_comment_history_view', request_method='POST',
493 route_name='repo_commit_comment_history_view', request_method='POST',
436 renderer='string', xhr=True)
494 renderer='string', xhr=True)
437 def repo_commit_comment_history_view(self):
495 def repo_commit_comment_history_view(self):
438 c = self.load_default_context()
496 c = self.load_default_context()
439
497
440 comment_history_id = self.request.matchdict['comment_history_id']
498 comment_history_id = self.request.matchdict['comment_history_id']
441 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
499 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
442 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
500 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
443
501
444 if is_repo_comment:
502 if is_repo_comment:
445 c.comment_history = comment_history
503 c.comment_history = comment_history
446
504
447 rendered_comment = render(
505 rendered_comment = render(
448 'rhodecode:templates/changeset/comment_history.mako',
506 'rhodecode:templates/changeset/comment_history.mako',
449 self._get_template_context(c)
507 self._get_template_context(c)
450 , self.request)
508 , self.request)
451 return rendered_comment
509 return rendered_comment
452 else:
510 else:
453 log.warning('No permissions for user %s to show comment_history_id: %s',
511 log.warning('No permissions for user %s to show comment_history_id: %s',
454 self._rhodecode_db_user, comment_history_id)
512 self._rhodecode_db_user, comment_history_id)
455 raise HTTPNotFound()
513 raise HTTPNotFound()
456
514
457 @LoginRequired()
515 @LoginRequired()
458 @NotAnonymous()
516 @NotAnonymous()
459 @HasRepoPermissionAnyDecorator(
517 @HasRepoPermissionAnyDecorator(
460 'repository.read', 'repository.write', 'repository.admin')
518 'repository.read', 'repository.write', 'repository.admin')
461 @CSRFRequired()
519 @CSRFRequired()
462 @view_config(
520 @view_config(
463 route_name='repo_commit_comment_attachment_upload', request_method='POST',
521 route_name='repo_commit_comment_attachment_upload', request_method='POST',
464 renderer='json_ext', xhr=True)
522 renderer='json_ext', xhr=True)
465 def repo_commit_comment_attachment_upload(self):
523 def repo_commit_comment_attachment_upload(self):
466 c = self.load_default_context()
524 c = self.load_default_context()
467 upload_key = 'attachment'
525 upload_key = 'attachment'
468
526
469 file_obj = self.request.POST.get(upload_key)
527 file_obj = self.request.POST.get(upload_key)
470
528
471 if file_obj is None:
529 if file_obj is None:
472 self.request.response.status = 400
530 self.request.response.status = 400
473 return {'store_fid': None,
531 return {'store_fid': None,
474 'access_path': None,
532 'access_path': None,
475 'error': '{} data field is missing'.format(upload_key)}
533 'error': '{} data field is missing'.format(upload_key)}
476
534
477 if not hasattr(file_obj, 'filename'):
535 if not hasattr(file_obj, 'filename'):
478 self.request.response.status = 400
536 self.request.response.status = 400
479 return {'store_fid': None,
537 return {'store_fid': None,
480 'access_path': None,
538 'access_path': None,
481 'error': 'filename cannot be read from the data field'}
539 'error': 'filename cannot be read from the data field'}
482
540
483 filename = file_obj.filename
541 filename = file_obj.filename
484 file_display_name = filename
542 file_display_name = filename
485
543
486 metadata = {
544 metadata = {
487 'user_uploaded': {'username': self._rhodecode_user.username,
545 'user_uploaded': {'username': self._rhodecode_user.username,
488 'user_id': self._rhodecode_user.user_id,
546 'user_id': self._rhodecode_user.user_id,
489 'ip': self._rhodecode_user.ip_addr}}
547 'ip': self._rhodecode_user.ip_addr}}
490
548
491 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
549 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
492 allowed_extensions = [
550 allowed_extensions = [
493 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
551 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
494 '.pptx', '.txt', '.xlsx', '.zip']
552 '.pptx', '.txt', '.xlsx', '.zip']
495 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
553 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
496
554
497 try:
555 try:
498 storage = store_utils.get_file_storage(self.request.registry.settings)
556 storage = store_utils.get_file_storage(self.request.registry.settings)
499 store_uid, metadata = storage.save_file(
557 store_uid, metadata = storage.save_file(
500 file_obj.file, filename, extra_metadata=metadata,
558 file_obj.file, filename, extra_metadata=metadata,
501 extensions=allowed_extensions, max_filesize=max_file_size)
559 extensions=allowed_extensions, max_filesize=max_file_size)
502 except FileNotAllowedException:
560 except FileNotAllowedException:
503 self.request.response.status = 400
561 self.request.response.status = 400
504 permitted_extensions = ', '.join(allowed_extensions)
562 permitted_extensions = ', '.join(allowed_extensions)
505 error_msg = 'File `{}` is not allowed. ' \
563 error_msg = 'File `{}` is not allowed. ' \
506 'Only following extensions are permitted: {}'.format(
564 'Only following extensions are permitted: {}'.format(
507 filename, permitted_extensions)
565 filename, permitted_extensions)
508 return {'store_fid': None,
566 return {'store_fid': None,
509 'access_path': None,
567 'access_path': None,
510 'error': error_msg}
568 'error': error_msg}
511 except FileOverSizeException:
569 except FileOverSizeException:
512 self.request.response.status = 400
570 self.request.response.status = 400
513 limit_mb = h.format_byte_size_binary(max_file_size)
571 limit_mb = h.format_byte_size_binary(max_file_size)
514 return {'store_fid': None,
572 return {'store_fid': None,
515 'access_path': None,
573 'access_path': None,
516 'error': 'File {} is exceeding allowed limit of {}.'.format(
574 'error': 'File {} is exceeding allowed limit of {}.'.format(
517 filename, limit_mb)}
575 filename, limit_mb)}
518
576
519 try:
577 try:
520 entry = FileStore.create(
578 entry = FileStore.create(
521 file_uid=store_uid, filename=metadata["filename"],
579 file_uid=store_uid, filename=metadata["filename"],
522 file_hash=metadata["sha256"], file_size=metadata["size"],
580 file_hash=metadata["sha256"], file_size=metadata["size"],
523 file_display_name=file_display_name,
581 file_display_name=file_display_name,
524 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
582 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
525 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
583 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
526 scope_repo_id=self.db_repo.repo_id
584 scope_repo_id=self.db_repo.repo_id
527 )
585 )
528 Session().add(entry)
586 Session().add(entry)
529 Session().commit()
587 Session().commit()
530 log.debug('Stored upload in DB as %s', entry)
588 log.debug('Stored upload in DB as %s', entry)
531 except Exception:
589 except Exception:
532 log.exception('Failed to store file %s', filename)
590 log.exception('Failed to store file %s', filename)
533 self.request.response.status = 400
591 self.request.response.status = 400
534 return {'store_fid': None,
592 return {'store_fid': None,
535 'access_path': None,
593 'access_path': None,
536 'error': 'File {} failed to store in DB.'.format(filename)}
594 'error': 'File {} failed to store in DB.'.format(filename)}
537
595
538 Session().commit()
596 Session().commit()
539
597
540 return {
598 return {
541 'store_fid': store_uid,
599 'store_fid': store_uid,
542 'access_path': h.route_path(
600 'access_path': h.route_path(
543 'download_file', fid=store_uid),
601 'download_file', fid=store_uid),
544 'fqn_access_path': h.route_url(
602 'fqn_access_path': h.route_url(
545 'download_file', fid=store_uid),
603 'download_file', fid=store_uid),
546 'repo_access_path': h.route_path(
604 'repo_access_path': h.route_path(
547 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
605 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
548 'repo_fqn_access_path': h.route_url(
606 'repo_fqn_access_path': h.route_url(
549 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
607 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
550 }
608 }
551
609
552 @LoginRequired()
610 @LoginRequired()
553 @NotAnonymous()
611 @NotAnonymous()
554 @HasRepoPermissionAnyDecorator(
612 @HasRepoPermissionAnyDecorator(
555 'repository.read', 'repository.write', 'repository.admin')
613 'repository.read', 'repository.write', 'repository.admin')
556 @CSRFRequired()
614 @CSRFRequired()
557 @view_config(
615 @view_config(
558 route_name='repo_commit_comment_delete', request_method='POST',
616 route_name='repo_commit_comment_delete', request_method='POST',
559 renderer='json_ext')
617 renderer='json_ext')
560 def repo_commit_comment_delete(self):
618 def repo_commit_comment_delete(self):
561 commit_id = self.request.matchdict['commit_id']
619 commit_id = self.request.matchdict['commit_id']
562 comment_id = self.request.matchdict['comment_id']
620 comment_id = self.request.matchdict['comment_id']
563
621
564 comment = ChangesetComment.get_or_404(comment_id)
622 comment = ChangesetComment.get_or_404(comment_id)
565 if not comment:
623 if not comment:
566 log.debug('Comment with id:%s not found, skipping', comment_id)
624 log.debug('Comment with id:%s not found, skipping', comment_id)
567 # comment already deleted in another call probably
625 # comment already deleted in another call probably
568 return True
626 return True
569
627
570 if comment.immutable:
628 if comment.immutable:
571 # don't allow deleting comments that are immutable
629 # don't allow deleting comments that are immutable
572 raise HTTPForbidden()
630 raise HTTPForbidden()
573
631
574 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
632 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
575 super_admin = h.HasPermissionAny('hg.admin')()
633 super_admin = h.HasPermissionAny('hg.admin')()
576 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
634 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
577 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
635 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
578 comment_repo_admin = is_repo_admin and is_repo_comment
636 comment_repo_admin = is_repo_admin and is_repo_comment
579
637
580 if super_admin or comment_owner or comment_repo_admin:
638 if super_admin or comment_owner or comment_repo_admin:
581 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
639 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
582 Session().commit()
640 Session().commit()
583 return True
641 return True
584 else:
642 else:
585 log.warning('No permissions for user %s to delete comment_id: %s',
643 log.warning('No permissions for user %s to delete comment_id: %s',
586 self._rhodecode_db_user, comment_id)
644 self._rhodecode_db_user, comment_id)
587 raise HTTPNotFound()
645 raise HTTPNotFound()
588
646
589 @LoginRequired()
647 @LoginRequired()
590 @NotAnonymous()
648 @NotAnonymous()
591 @HasRepoPermissionAnyDecorator(
649 @HasRepoPermissionAnyDecorator(
592 'repository.read', 'repository.write', 'repository.admin')
650 'repository.read', 'repository.write', 'repository.admin')
593 @CSRFRequired()
651 @CSRFRequired()
594 @view_config(
652 @view_config(
595 route_name='repo_commit_comment_edit', request_method='POST',
653 route_name='repo_commit_comment_edit', request_method='POST',
596 renderer='json_ext')
654 renderer='json_ext')
597 def repo_commit_comment_edit(self):
655 def repo_commit_comment_edit(self):
598 self.load_default_context()
656 self.load_default_context()
599
657
600 comment_id = self.request.matchdict['comment_id']
658 comment_id = self.request.matchdict['comment_id']
601 comment = ChangesetComment.get_or_404(comment_id)
659 comment = ChangesetComment.get_or_404(comment_id)
602
660
603 if comment.immutable:
661 if comment.immutable:
604 # don't allow deleting comments that are immutable
662 # don't allow deleting comments that are immutable
605 raise HTTPForbidden()
663 raise HTTPForbidden()
606
664
607 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
665 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
608 super_admin = h.HasPermissionAny('hg.admin')()
666 super_admin = h.HasPermissionAny('hg.admin')()
609 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
667 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
610 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
668 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
611 comment_repo_admin = is_repo_admin and is_repo_comment
669 comment_repo_admin = is_repo_admin and is_repo_comment
612
670
613 if super_admin or comment_owner or comment_repo_admin:
671 if super_admin or comment_owner or comment_repo_admin:
614 text = self.request.POST.get('text')
672 text = self.request.POST.get('text')
615 version = self.request.POST.get('version')
673 version = self.request.POST.get('version')
616 if text == comment.text:
674 if text == comment.text:
617 log.warning(
675 log.warning(
618 'Comment(repo): '
676 'Comment(repo): '
619 'Trying to create new version '
677 'Trying to create new version '
620 'with the same comment body {}'.format(
678 'with the same comment body {}'.format(
621 comment_id,
679 comment_id,
622 )
680 )
623 )
681 )
624 raise HTTPNotFound()
682 raise HTTPNotFound()
625
683
626 if version.isdigit():
684 if version.isdigit():
627 version = int(version)
685 version = int(version)
628 else:
686 else:
629 log.warning(
687 log.warning(
630 'Comment(repo): Wrong version type {} {} '
688 'Comment(repo): Wrong version type {} {} '
631 'for comment {}'.format(
689 'for comment {}'.format(
632 version,
690 version,
633 type(version),
691 type(version),
634 comment_id,
692 comment_id,
635 )
693 )
636 )
694 )
637 raise HTTPNotFound()
695 raise HTTPNotFound()
638
696
639 try:
697 try:
640 comment_history = CommentsModel().edit(
698 comment_history = CommentsModel().edit(
641 comment_id=comment_id,
699 comment_id=comment_id,
642 text=text,
700 text=text,
643 auth_user=self._rhodecode_user,
701 auth_user=self._rhodecode_user,
644 version=version,
702 version=version,
645 )
703 )
646 except CommentVersionMismatch:
704 except CommentVersionMismatch:
647 raise HTTPConflict()
705 raise HTTPConflict()
648
706
649 if not comment_history:
707 if not comment_history:
650 raise HTTPNotFound()
708 raise HTTPNotFound()
651
709
652 commit_id = self.request.matchdict['commit_id']
710 commit_id = self.request.matchdict['commit_id']
653 commit = self.db_repo.get_commit(commit_id)
711 commit = self.db_repo.get_commit(commit_id)
654 CommentsModel().trigger_commit_comment_hook(
712 CommentsModel().trigger_commit_comment_hook(
655 self.db_repo, self._rhodecode_user, 'edit',
713 self.db_repo, self._rhodecode_user, 'edit',
656 data={'comment': comment, 'commit': commit})
714 data={'comment': comment, 'commit': commit})
657
715
658 Session().commit()
716 Session().commit()
659 return {
717 return {
660 'comment_history_id': comment_history.comment_history_id,
718 'comment_history_id': comment_history.comment_history_id,
661 'comment_id': comment.comment_id,
719 'comment_id': comment.comment_id,
662 'comment_version': comment_history.version,
720 'comment_version': comment_history.version,
663 'comment_author_username': comment_history.author.username,
721 'comment_author_username': comment_history.author.username,
664 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
722 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
665 'comment_created_on': h.age_component(comment_history.created_on,
723 'comment_created_on': h.age_component(comment_history.created_on,
666 time_is_local=True),
724 time_is_local=True),
667 }
725 }
668 else:
726 else:
669 log.warning('No permissions for user %s to edit comment_id: %s',
727 log.warning('No permissions for user %s to edit comment_id: %s',
670 self._rhodecode_db_user, comment_id)
728 self._rhodecode_db_user, comment_id)
671 raise HTTPNotFound()
729 raise HTTPNotFound()
672
730
673 @LoginRequired()
731 @LoginRequired()
674 @HasRepoPermissionAnyDecorator(
732 @HasRepoPermissionAnyDecorator(
675 'repository.read', 'repository.write', 'repository.admin')
733 'repository.read', 'repository.write', 'repository.admin')
676 @view_config(
734 @view_config(
677 route_name='repo_commit_data', request_method='GET',
735 route_name='repo_commit_data', request_method='GET',
678 renderer='json_ext', xhr=True)
736 renderer='json_ext', xhr=True)
679 def repo_commit_data(self):
737 def repo_commit_data(self):
680 commit_id = self.request.matchdict['commit_id']
738 commit_id = self.request.matchdict['commit_id']
681 self.load_default_context()
739 self.load_default_context()
682
740
683 try:
741 try:
684 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
742 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
685 except CommitDoesNotExistError as e:
743 except CommitDoesNotExistError as e:
686 return EmptyCommit(message=str(e))
744 return EmptyCommit(message=str(e))
687
745
688 @LoginRequired()
746 @LoginRequired()
689 @HasRepoPermissionAnyDecorator(
747 @HasRepoPermissionAnyDecorator(
690 'repository.read', 'repository.write', 'repository.admin')
748 'repository.read', 'repository.write', 'repository.admin')
691 @view_config(
749 @view_config(
692 route_name='repo_commit_children', request_method='GET',
750 route_name='repo_commit_children', request_method='GET',
693 renderer='json_ext', xhr=True)
751 renderer='json_ext', xhr=True)
694 def repo_commit_children(self):
752 def repo_commit_children(self):
695 commit_id = self.request.matchdict['commit_id']
753 commit_id = self.request.matchdict['commit_id']
696 self.load_default_context()
754 self.load_default_context()
697
755
698 try:
756 try:
699 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
757 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
700 children = commit.children
758 children = commit.children
701 except CommitDoesNotExistError:
759 except CommitDoesNotExistError:
702 children = []
760 children = []
703
761
704 result = {"results": children}
762 result = {"results": children}
705 return result
763 return result
706
764
707 @LoginRequired()
765 @LoginRequired()
708 @HasRepoPermissionAnyDecorator(
766 @HasRepoPermissionAnyDecorator(
709 'repository.read', 'repository.write', 'repository.admin')
767 'repository.read', 'repository.write', 'repository.admin')
710 @view_config(
768 @view_config(
711 route_name='repo_commit_parents', request_method='GET',
769 route_name='repo_commit_parents', request_method='GET',
712 renderer='json_ext')
770 renderer='json_ext')
713 def repo_commit_parents(self):
771 def repo_commit_parents(self):
714 commit_id = self.request.matchdict['commit_id']
772 commit_id = self.request.matchdict['commit_id']
715 self.load_default_context()
773 self.load_default_context()
716
774
717 try:
775 try:
718 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
776 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
719 parents = commit.parents
777 parents = commit.parents
720 except CommitDoesNotExistError:
778 except CommitDoesNotExistError:
721 parents = []
779 parents = []
722 result = {"results": parents}
780 result = {"results": parents}
723 return result
781 return result
@@ -1,1637 +1,1757 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
45 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository)
49 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository)
50 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.forms import PullRequestForm
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59
59
60 def load_default_context(self):
60 def load_default_context(self):
61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64 # backward compat., we use for OLD PRs a plain renderer
64 # backward compat., we use for OLD PRs a plain renderer
65 c.renderer = 'plain'
65 c.renderer = 'plain'
66 return c
66 return c
67
67
68 def _get_pull_requests_list(
68 def _get_pull_requests_list(
69 self, repo_name, source, filter_type, opened_by, statuses):
69 self, repo_name, source, filter_type, opened_by, statuses):
70
70
71 draw, start, limit = self._extract_chunk(self.request)
71 draw, start, limit = self._extract_chunk(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
73 _render = self.request.get_partial_renderer(
73 _render = self.request.get_partial_renderer(
74 'rhodecode:templates/data_table/_dt_elements.mako')
74 'rhodecode:templates/data_table/_dt_elements.mako')
75
75
76 # pagination
76 # pagination
77
77
78 if filter_type == 'awaiting_review':
78 if filter_type == 'awaiting_review':
79 pull_requests = PullRequestModel().get_awaiting_review(
79 pull_requests = PullRequestModel().get_awaiting_review(
80 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 repo_name, search_q=search_q, source=source, opened_by=opened_by,
81 statuses=statuses, offset=start, length=limit,
81 statuses=statuses, offset=start, length=limit,
82 order_by=order_by, order_dir=order_dir)
82 order_by=order_by, order_dir=order_dir)
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
84 repo_name, search_q=search_q, source=source, statuses=statuses,
84 repo_name, search_q=search_q, source=source, statuses=statuses,
85 opened_by=opened_by)
85 opened_by=opened_by)
86 elif filter_type == 'awaiting_my_review':
86 elif filter_type == 'awaiting_my_review':
87 pull_requests = PullRequestModel().get_awaiting_my_review(
87 pull_requests = PullRequestModel().get_awaiting_my_review(
88 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 repo_name, search_q=search_q, source=source, opened_by=opened_by,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
90 offset=start, length=limit, order_by=order_by,
90 offset=start, length=limit, order_by=order_by,
91 order_dir=order_dir)
91 order_dir=order_dir)
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
93 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
94 statuses=statuses, opened_by=opened_by)
94 statuses=statuses, opened_by=opened_by)
95 else:
95 else:
96 pull_requests = PullRequestModel().get_all(
96 pull_requests = PullRequestModel().get_all(
97 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 repo_name, search_q=search_q, source=source, opened_by=opened_by,
98 statuses=statuses, offset=start, length=limit,
98 statuses=statuses, offset=start, length=limit,
99 order_by=order_by, order_dir=order_dir)
99 order_by=order_by, order_dir=order_dir)
100 pull_requests_total_count = PullRequestModel().count_all(
100 pull_requests_total_count = PullRequestModel().count_all(
101 repo_name, search_q=search_q, source=source, statuses=statuses,
101 repo_name, search_q=search_q, source=source, statuses=statuses,
102 opened_by=opened_by)
102 opened_by=opened_by)
103
103
104 data = []
104 data = []
105 comments_model = CommentsModel()
105 comments_model = CommentsModel()
106 for pr in pull_requests:
106 for pr in pull_requests:
107 comments = comments_model.get_all_comments(
107 comments = comments_model.get_all_comments(
108 self.db_repo.repo_id, pull_request=pr)
108 self.db_repo.repo_id, pull_request=pr)
109
109
110 data.append({
110 data.append({
111 'name': _render('pullrequest_name',
111 'name': _render('pullrequest_name',
112 pr.pull_request_id, pr.pull_request_state,
112 pr.pull_request_id, pr.pull_request_state,
113 pr.work_in_progress, pr.target_repo.repo_name),
113 pr.work_in_progress, pr.target_repo.repo_name),
114 'name_raw': pr.pull_request_id,
114 'name_raw': pr.pull_request_id,
115 'status': _render('pullrequest_status',
115 'status': _render('pullrequest_status',
116 pr.calculated_review_status()),
116 pr.calculated_review_status()),
117 'title': _render('pullrequest_title', pr.title, pr.description),
117 'title': _render('pullrequest_title', pr.title, pr.description),
118 'description': h.escape(pr.description),
118 'description': h.escape(pr.description),
119 'updated_on': _render('pullrequest_updated_on',
119 'updated_on': _render('pullrequest_updated_on',
120 h.datetime_to_time(pr.updated_on)),
120 h.datetime_to_time(pr.updated_on)),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
122 'created_on': _render('pullrequest_updated_on',
122 'created_on': _render('pullrequest_updated_on',
123 h.datetime_to_time(pr.created_on)),
123 h.datetime_to_time(pr.created_on)),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
125 'state': pr.pull_request_state,
125 'state': pr.pull_request_state,
126 'author': _render('pullrequest_author',
126 'author': _render('pullrequest_author',
127 pr.author.full_contact, ),
127 pr.author.full_contact, ),
128 'author_raw': pr.author.full_name,
128 'author_raw': pr.author.full_name,
129 'comments': _render('pullrequest_comments', len(comments)),
129 'comments': _render('pullrequest_comments', len(comments)),
130 'comments_raw': len(comments),
130 'comments_raw': len(comments),
131 'closed': pr.is_closed(),
131 'closed': pr.is_closed(),
132 })
132 })
133
133
134 data = ({
134 data = ({
135 'draw': draw,
135 'draw': draw,
136 'data': data,
136 'data': data,
137 'recordsTotal': pull_requests_total_count,
137 'recordsTotal': pull_requests_total_count,
138 'recordsFiltered': pull_requests_total_count,
138 'recordsFiltered': pull_requests_total_count,
139 })
139 })
140 return data
140 return data
141
141
142 @LoginRequired()
142 @LoginRequired()
143 @HasRepoPermissionAnyDecorator(
143 @HasRepoPermissionAnyDecorator(
144 'repository.read', 'repository.write', 'repository.admin')
144 'repository.read', 'repository.write', 'repository.admin')
145 @view_config(
145 @view_config(
146 route_name='pullrequest_show_all', request_method='GET',
146 route_name='pullrequest_show_all', request_method='GET',
147 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
148 def pull_request_list(self):
148 def pull_request_list(self):
149 c = self.load_default_context()
149 c = self.load_default_context()
150
150
151 req_get = self.request.GET
151 req_get = self.request.GET
152 c.source = str2bool(req_get.get('source'))
152 c.source = str2bool(req_get.get('source'))
153 c.closed = str2bool(req_get.get('closed'))
153 c.closed = str2bool(req_get.get('closed'))
154 c.my = str2bool(req_get.get('my'))
154 c.my = str2bool(req_get.get('my'))
155 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
156 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
157
157
158 c.active = 'open'
158 c.active = 'open'
159 if c.my:
159 if c.my:
160 c.active = 'my'
160 c.active = 'my'
161 if c.closed:
161 if c.closed:
162 c.active = 'closed'
162 c.active = 'closed'
163 if c.awaiting_review and not c.source:
163 if c.awaiting_review and not c.source:
164 c.active = 'awaiting'
164 c.active = 'awaiting'
165 if c.source and not c.awaiting_review:
165 if c.source and not c.awaiting_review:
166 c.active = 'source'
166 c.active = 'source'
167 if c.awaiting_my_review:
167 if c.awaiting_my_review:
168 c.active = 'awaiting_my'
168 c.active = 'awaiting_my'
169
169
170 return self._get_template_context(c)
170 return self._get_template_context(c)
171
171
172 @LoginRequired()
172 @LoginRequired()
173 @HasRepoPermissionAnyDecorator(
173 @HasRepoPermissionAnyDecorator(
174 'repository.read', 'repository.write', 'repository.admin')
174 'repository.read', 'repository.write', 'repository.admin')
175 @view_config(
175 @view_config(
176 route_name='pullrequest_show_all_data', request_method='GET',
176 route_name='pullrequest_show_all_data', request_method='GET',
177 renderer='json_ext', xhr=True)
177 renderer='json_ext', xhr=True)
178 def pull_request_list_data(self):
178 def pull_request_list_data(self):
179 self.load_default_context()
179 self.load_default_context()
180
180
181 # additional filters
181 # additional filters
182 req_get = self.request.GET
182 req_get = self.request.GET
183 source = str2bool(req_get.get('source'))
183 source = str2bool(req_get.get('source'))
184 closed = str2bool(req_get.get('closed'))
184 closed = str2bool(req_get.get('closed'))
185 my = str2bool(req_get.get('my'))
185 my = str2bool(req_get.get('my'))
186 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 awaiting_review = str2bool(req_get.get('awaiting_review'))
187 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
188
188
189 filter_type = 'awaiting_review' if awaiting_review \
189 filter_type = 'awaiting_review' if awaiting_review \
190 else 'awaiting_my_review' if awaiting_my_review \
190 else 'awaiting_my_review' if awaiting_my_review \
191 else None
191 else None
192
192
193 opened_by = None
193 opened_by = None
194 if my:
194 if my:
195 opened_by = [self._rhodecode_user.user_id]
195 opened_by = [self._rhodecode_user.user_id]
196
196
197 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
198 if closed:
198 if closed:
199 statuses = [PullRequest.STATUS_CLOSED]
199 statuses = [PullRequest.STATUS_CLOSED]
200
200
201 data = self._get_pull_requests_list(
201 data = self._get_pull_requests_list(
202 repo_name=self.db_repo_name, source=source,
202 repo_name=self.db_repo_name, source=source,
203 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
204
204
205 return data
205 return data
206
206
207 def _is_diff_cache_enabled(self, target_repo):
207 def _is_diff_cache_enabled(self, target_repo):
208 caching_enabled = self._get_general_setting(
208 caching_enabled = self._get_general_setting(
209 target_repo, 'rhodecode_diff_cache')
209 target_repo, 'rhodecode_diff_cache')
210 log.debug('Diff caching enabled: %s', caching_enabled)
210 log.debug('Diff caching enabled: %s', caching_enabled)
211 return caching_enabled
211 return caching_enabled
212
212
213 def _get_diffset(self, source_repo_name, source_repo,
213 def _get_diffset(self, source_repo_name, source_repo,
214 ancestor_commit,
214 ancestor_commit,
215 source_ref_id, target_ref_id,
215 source_ref_id, target_ref_id,
216 target_commit, source_commit, diff_limit, file_limit,
216 target_commit, source_commit, diff_limit, file_limit,
217 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
217 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
218
218
219 if use_ancestor:
219 if use_ancestor:
220 # we might want to not use it for versions
220 # we might want to not use it for versions
221 target_ref_id = ancestor_commit.raw_id
221 target_ref_id = ancestor_commit.raw_id
222
222
223 vcs_diff = PullRequestModel().get_diff(
223 vcs_diff = PullRequestModel().get_diff(
224 source_repo, source_ref_id, target_ref_id,
224 source_repo, source_ref_id, target_ref_id,
225 hide_whitespace_changes, diff_context)
225 hide_whitespace_changes, diff_context)
226
226
227 diff_processor = diffs.DiffProcessor(
227 diff_processor = diffs.DiffProcessor(
228 vcs_diff, format='newdiff', diff_limit=diff_limit,
228 vcs_diff, format='newdiff', diff_limit=diff_limit,
229 file_limit=file_limit, show_full_diff=fulldiff)
229 file_limit=file_limit, show_full_diff=fulldiff)
230
230
231 _parsed = diff_processor.prepare()
231 _parsed = diff_processor.prepare()
232
232
233 diffset = codeblocks.DiffSet(
233 diffset = codeblocks.DiffSet(
234 repo_name=self.db_repo_name,
234 repo_name=self.db_repo_name,
235 source_repo_name=source_repo_name,
235 source_repo_name=source_repo_name,
236 source_node_getter=codeblocks.diffset_node_getter(target_commit),
236 source_node_getter=codeblocks.diffset_node_getter(target_commit),
237 target_node_getter=codeblocks.diffset_node_getter(source_commit),
237 target_node_getter=codeblocks.diffset_node_getter(source_commit),
238 )
238 )
239 diffset = self.path_filter.render_patchset_filtered(
239 diffset = self.path_filter.render_patchset_filtered(
240 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
240 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
241
241
242 return diffset
242 return diffset
243
243
244 def _get_range_diffset(self, source_scm, source_repo,
244 def _get_range_diffset(self, source_scm, source_repo,
245 commit1, commit2, diff_limit, file_limit,
245 commit1, commit2, diff_limit, file_limit,
246 fulldiff, hide_whitespace_changes, diff_context):
246 fulldiff, hide_whitespace_changes, diff_context):
247 vcs_diff = source_scm.get_diff(
247 vcs_diff = source_scm.get_diff(
248 commit1, commit2,
248 commit1, commit2,
249 ignore_whitespace=hide_whitespace_changes,
249 ignore_whitespace=hide_whitespace_changes,
250 context=diff_context)
250 context=diff_context)
251
251
252 diff_processor = diffs.DiffProcessor(
252 diff_processor = diffs.DiffProcessor(
253 vcs_diff, format='newdiff', diff_limit=diff_limit,
253 vcs_diff, format='newdiff', diff_limit=diff_limit,
254 file_limit=file_limit, show_full_diff=fulldiff)
254 file_limit=file_limit, show_full_diff=fulldiff)
255
255
256 _parsed = diff_processor.prepare()
256 _parsed = diff_processor.prepare()
257
257
258 diffset = codeblocks.DiffSet(
258 diffset = codeblocks.DiffSet(
259 repo_name=source_repo.repo_name,
259 repo_name=source_repo.repo_name,
260 source_node_getter=codeblocks.diffset_node_getter(commit1),
260 source_node_getter=codeblocks.diffset_node_getter(commit1),
261 target_node_getter=codeblocks.diffset_node_getter(commit2))
261 target_node_getter=codeblocks.diffset_node_getter(commit2))
262
262
263 diffset = self.path_filter.render_patchset_filtered(
263 diffset = self.path_filter.render_patchset_filtered(
264 diffset, _parsed, commit1.raw_id, commit2.raw_id)
264 diffset, _parsed, commit1.raw_id, commit2.raw_id)
265
265
266 return diffset
266 return diffset
267
267
268 def register_comments_vars(self, c, pull_request, versions):
269 comments_model = CommentsModel()
270
271 # GENERAL COMMENTS with versions #
272 q = comments_model._all_general_comments_of_pull_request(pull_request)
273 q = q.order_by(ChangesetComment.comment_id.asc())
274 general_comments = q
275
276 # pick comments we want to render at current version
277 c.comment_versions = comments_model.aggregate_comments(
278 general_comments, versions, c.at_version_num)
279
280 # INLINE COMMENTS with versions #
281 q = comments_model._all_inline_comments_of_pull_request(pull_request)
282 q = q.order_by(ChangesetComment.comment_id.asc())
283 inline_comments = q
284
285 c.inline_versions = comments_model.aggregate_comments(
286 inline_comments, versions, c.at_version_num, inline=True)
287
288 # Comments inline+general
289 if c.at_version:
290 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
291 c.comments = c.comment_versions[c.at_version_num]['display']
292 else:
293 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
294 c.comments = c.comment_versions[c.at_version_num]['until']
295
296 return general_comments, inline_comments
297
268 @LoginRequired()
298 @LoginRequired()
269 @HasRepoPermissionAnyDecorator(
299 @HasRepoPermissionAnyDecorator(
270 'repository.read', 'repository.write', 'repository.admin')
300 'repository.read', 'repository.write', 'repository.admin')
271 @view_config(
301 @view_config(
272 route_name='pullrequest_show', request_method='GET',
302 route_name='pullrequest_show', request_method='GET',
273 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
303 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
274 def pull_request_show(self):
304 def pull_request_show(self):
275 _ = self.request.translate
305 _ = self.request.translate
276 c = self.load_default_context()
306 c = self.load_default_context()
277
307
278 pull_request = PullRequest.get_or_404(
308 pull_request = PullRequest.get_or_404(
279 self.request.matchdict['pull_request_id'])
309 self.request.matchdict['pull_request_id'])
280 pull_request_id = pull_request.pull_request_id
310 pull_request_id = pull_request.pull_request_id
281
311
282 c.state_progressing = pull_request.is_state_changing()
312 c.state_progressing = pull_request.is_state_changing()
313 c.pr_broadcast_channel = '/repo${}$/pr/{}'.format(
314 pull_request.target_repo.repo_name, pull_request.pull_request_id)
283
315
284 _new_state = {
316 _new_state = {
285 'created': PullRequest.STATE_CREATED,
317 'created': PullRequest.STATE_CREATED,
286 }.get(self.request.GET.get('force_state'))
318 }.get(self.request.GET.get('force_state'))
287
319
288 if c.is_super_admin and _new_state:
320 if c.is_super_admin and _new_state:
289 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
321 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
290 h.flash(
322 h.flash(
291 _('Pull Request state was force changed to `{}`').format(_new_state),
323 _('Pull Request state was force changed to `{}`').format(_new_state),
292 category='success')
324 category='success')
293 Session().commit()
325 Session().commit()
294
326
295 raise HTTPFound(h.route_path(
327 raise HTTPFound(h.route_path(
296 'pullrequest_show', repo_name=self.db_repo_name,
328 'pullrequest_show', repo_name=self.db_repo_name,
297 pull_request_id=pull_request_id))
329 pull_request_id=pull_request_id))
298
330
299 version = self.request.GET.get('version')
331 version = self.request.GET.get('version')
300 from_version = self.request.GET.get('from_version') or version
332 from_version = self.request.GET.get('from_version') or version
301 merge_checks = self.request.GET.get('merge_checks')
333 merge_checks = self.request.GET.get('merge_checks')
302 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
334 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
335 force_refresh = str2bool(self.request.GET.get('force_refresh'))
336 c.range_diff_on = self.request.GET.get('range-diff') == "1"
303
337
304 # fetch global flags of ignore ws or context lines
338 # fetch global flags of ignore ws or context lines
305 diff_context = diffs.get_diff_context(self.request)
339 diff_context = diffs.get_diff_context(self.request)
306 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
340 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
307
341
308 force_refresh = str2bool(self.request.GET.get('force_refresh'))
309
310 (pull_request_latest,
342 (pull_request_latest,
311 pull_request_at_ver,
343 pull_request_at_ver,
312 pull_request_display_obj,
344 pull_request_display_obj,
313 at_version) = PullRequestModel().get_pr_version(
345 at_version) = PullRequestModel().get_pr_version(
314 pull_request_id, version=version)
346 pull_request_id, version=version)
347
315 pr_closed = pull_request_latest.is_closed()
348 pr_closed = pull_request_latest.is_closed()
316
349
317 if pr_closed and (version or from_version):
350 if pr_closed and (version or from_version):
318 # not allow to browse versions
351 # not allow to browse versions for closed PR
319 raise HTTPFound(h.route_path(
352 raise HTTPFound(h.route_path(
320 'pullrequest_show', repo_name=self.db_repo_name,
353 'pullrequest_show', repo_name=self.db_repo_name,
321 pull_request_id=pull_request_id))
354 pull_request_id=pull_request_id))
322
355
323 versions = pull_request_display_obj.versions()
356 versions = pull_request_display_obj.versions()
324 # used to store per-commit range diffs
357 # used to store per-commit range diffs
325 c.changes = collections.OrderedDict()
358 c.changes = collections.OrderedDict()
326 c.range_diff_on = self.request.GET.get('range-diff') == "1"
327
359
328 c.at_version = at_version
360 c.at_version = at_version
329 c.at_version_num = (at_version
361 c.at_version_num = (at_version
330 if at_version and at_version != 'latest'
362 if at_version and at_version != PullRequest.LATEST_VER
331 else None)
363 else None)
332 c.at_version_pos = ChangesetComment.get_index_from_version(
364
365 c.at_version_index = ChangesetComment.get_index_from_version(
333 c.at_version_num, versions)
366 c.at_version_num, versions)
334
367
335 (prev_pull_request_latest,
368 (prev_pull_request_latest,
336 prev_pull_request_at_ver,
369 prev_pull_request_at_ver,
337 prev_pull_request_display_obj,
370 prev_pull_request_display_obj,
338 prev_at_version) = PullRequestModel().get_pr_version(
371 prev_at_version) = PullRequestModel().get_pr_version(
339 pull_request_id, version=from_version)
372 pull_request_id, version=from_version)
340
373
341 c.from_version = prev_at_version
374 c.from_version = prev_at_version
342 c.from_version_num = (prev_at_version
375 c.from_version_num = (prev_at_version
343 if prev_at_version and prev_at_version != 'latest'
376 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
344 else None)
377 else None)
345 c.from_version_pos = ChangesetComment.get_index_from_version(
378 c.from_version_index = ChangesetComment.get_index_from_version(
346 c.from_version_num, versions)
379 c.from_version_num, versions)
347
380
348 # define if we're in COMPARE mode or VIEW at version mode
381 # define if we're in COMPARE mode or VIEW at version mode
349 compare = at_version != prev_at_version
382 compare = at_version != prev_at_version
350
383
351 # pull_requests repo_name we opened it against
384 # pull_requests repo_name we opened it against
352 # ie. target_repo must match
385 # ie. target_repo must match
353 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
386 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
387 log.warning('Mismatch between the current repo: %s, and target %s',
388 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
354 raise HTTPNotFound()
389 raise HTTPNotFound()
355
390
356 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
391 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
357 pull_request_at_ver)
358
392
359 c.pull_request = pull_request_display_obj
393 c.pull_request = pull_request_display_obj
360 c.renderer = pull_request_at_ver.description_renderer or c.renderer
394 c.renderer = pull_request_at_ver.description_renderer or c.renderer
361 c.pull_request_latest = pull_request_latest
395 c.pull_request_latest = pull_request_latest
362
396
363 if compare or (at_version and not at_version == 'latest'):
397 # inject latest version
398 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
399 c.versions = versions + [latest_ver]
400
401 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
364 c.allowed_to_change_status = False
402 c.allowed_to_change_status = False
365 c.allowed_to_update = False
403 c.allowed_to_update = False
366 c.allowed_to_merge = False
404 c.allowed_to_merge = False
367 c.allowed_to_delete = False
405 c.allowed_to_delete = False
368 c.allowed_to_comment = False
406 c.allowed_to_comment = False
369 c.allowed_to_close = False
407 c.allowed_to_close = False
370 else:
408 else:
371 can_change_status = PullRequestModel().check_user_change_status(
409 can_change_status = PullRequestModel().check_user_change_status(
372 pull_request_at_ver, self._rhodecode_user)
410 pull_request_at_ver, self._rhodecode_user)
373 c.allowed_to_change_status = can_change_status and not pr_closed
411 c.allowed_to_change_status = can_change_status and not pr_closed
374
412
375 c.allowed_to_update = PullRequestModel().check_user_update(
413 c.allowed_to_update = PullRequestModel().check_user_update(
376 pull_request_latest, self._rhodecode_user) and not pr_closed
414 pull_request_latest, self._rhodecode_user) and not pr_closed
377 c.allowed_to_merge = PullRequestModel().check_user_merge(
415 c.allowed_to_merge = PullRequestModel().check_user_merge(
378 pull_request_latest, self._rhodecode_user) and not pr_closed
416 pull_request_latest, self._rhodecode_user) and not pr_closed
379 c.allowed_to_delete = PullRequestModel().check_user_delete(
417 c.allowed_to_delete = PullRequestModel().check_user_delete(
380 pull_request_latest, self._rhodecode_user) and not pr_closed
418 pull_request_latest, self._rhodecode_user) and not pr_closed
381 c.allowed_to_comment = not pr_closed
419 c.allowed_to_comment = not pr_closed
382 c.allowed_to_close = c.allowed_to_merge and not pr_closed
420 c.allowed_to_close = c.allowed_to_merge and not pr_closed
383
421
384 c.forbid_adding_reviewers = False
422 c.forbid_adding_reviewers = False
385 c.forbid_author_to_review = False
423 c.forbid_author_to_review = False
386 c.forbid_commit_author_to_review = False
424 c.forbid_commit_author_to_review = False
387
425
388 if pull_request_latest.reviewer_data and \
426 if pull_request_latest.reviewer_data and \
389 'rules' in pull_request_latest.reviewer_data:
427 'rules' in pull_request_latest.reviewer_data:
390 rules = pull_request_latest.reviewer_data['rules'] or {}
428 rules = pull_request_latest.reviewer_data['rules'] or {}
391 try:
429 try:
392 c.forbid_adding_reviewers = rules.get(
430 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
393 'forbid_adding_reviewers')
431 c.forbid_author_to_review = rules.get('forbid_author_to_review')
394 c.forbid_author_to_review = rules.get(
432 c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review')
395 'forbid_author_to_review')
396 c.forbid_commit_author_to_review = rules.get(
397 'forbid_commit_author_to_review')
398 except Exception:
433 except Exception:
399 pass
434 pass
400
435
401 # check merge capabilities
436 # check merge capabilities
402 _merge_check = MergeCheck.validate(
437 _merge_check = MergeCheck.validate(
403 pull_request_latest, auth_user=self._rhodecode_user,
438 pull_request_latest, auth_user=self._rhodecode_user,
404 translator=self.request.translate,
439 translator=self.request.translate,
405 force_shadow_repo_refresh=force_refresh)
440 force_shadow_repo_refresh=force_refresh)
406
441
407 c.pr_merge_errors = _merge_check.error_details
442 c.pr_merge_errors = _merge_check.error_details
408 c.pr_merge_possible = not _merge_check.failed
443 c.pr_merge_possible = not _merge_check.failed
409 c.pr_merge_message = _merge_check.merge_msg
444 c.pr_merge_message = _merge_check.merge_msg
410 c.pr_merge_source_commit = _merge_check.source_commit
445 c.pr_merge_source_commit = _merge_check.source_commit
411 c.pr_merge_target_commit = _merge_check.target_commit
446 c.pr_merge_target_commit = _merge_check.target_commit
412
447
413 c.pr_merge_info = MergeCheck.get_merge_conditions(
448 c.pr_merge_info = MergeCheck.get_merge_conditions(
414 pull_request_latest, translator=self.request.translate)
449 pull_request_latest, translator=self.request.translate)
415
450
416 c.pull_request_review_status = _merge_check.review_status
451 c.pull_request_review_status = _merge_check.review_status
417 if merge_checks:
452 if merge_checks:
418 self.request.override_renderer = \
453 self.request.override_renderer = \
419 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
454 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
420 return self._get_template_context(c)
455 return self._get_template_context(c)
421
456
422 comments_model = CommentsModel()
457 c.allowed_reviewers = [obj.user_id for obj in pull_request.reviewers if obj.user]
423
458
424 # reviewers and statuses
459 # reviewers and statuses
425 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
460 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
426 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
461 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
427
462
428 # GENERAL COMMENTS with versions #
463 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
429 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
464 member_reviewer = h.reviewer_as_json(
430 q = q.order_by(ChangesetComment.comment_id.asc())
465 member, reasons=reasons, mandatory=mandatory,
431 general_comments = q
466 user_group=review_obj.rule_user_group_data()
467 )
432
468
433 # pick comments we want to render at current version
469 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
434 c.comment_versions = comments_model.aggregate_comments(
470 member_reviewer['review_status'] = current_review_status
435 general_comments, versions, c.at_version_num)
471 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
436 c.comments = c.comment_versions[c.at_version_num]['until']
472 member_reviewer['allowed_to_update'] = c.allowed_to_update
473 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
437
474
438 # INLINE COMMENTS with versions #
475 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
439 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
440 q = q.order_by(ChangesetComment.comment_id.asc())
441 inline_comments = q
442
476
443 c.inline_versions = comments_model.aggregate_comments(
477 general_comments, inline_comments = \
444 inline_comments, versions, c.at_version_num, inline=True)
478 self.register_comments_vars(c, pull_request_latest, versions)
445
479
446 # TODOs
480 # TODOs
447 c.unresolved_comments = CommentsModel() \
481 c.unresolved_comments = CommentsModel() \
448 .get_pull_request_unresolved_todos(pull_request)
482 .get_pull_request_unresolved_todos(pull_request_latest)
449 c.resolved_comments = CommentsModel() \
483 c.resolved_comments = CommentsModel() \
450 .get_pull_request_resolved_todos(pull_request)
484 .get_pull_request_resolved_todos(pull_request_latest)
451
452 # inject latest version
453 latest_ver = PullRequest.get_pr_display_object(
454 pull_request_latest, pull_request_latest)
455
456 c.versions = versions + [latest_ver]
457
485
458 # if we use version, then do not show later comments
486 # if we use version, then do not show later comments
459 # than current version
487 # than current version
460 display_inline_comments = collections.defaultdict(
488 display_inline_comments = collections.defaultdict(
461 lambda: collections.defaultdict(list))
489 lambda: collections.defaultdict(list))
462 for co in inline_comments:
490 for co in inline_comments:
463 if c.at_version_num:
491 if c.at_version_num:
464 # pick comments that are at least UPTO given version, so we
492 # pick comments that are at least UPTO given version, so we
465 # don't render comments for higher version
493 # don't render comments for higher version
466 should_render = co.pull_request_version_id and \
494 should_render = co.pull_request_version_id and \
467 co.pull_request_version_id <= c.at_version_num
495 co.pull_request_version_id <= c.at_version_num
468 else:
496 else:
469 # showing all, for 'latest'
497 # showing all, for 'latest'
470 should_render = True
498 should_render = True
471
499
472 if should_render:
500 if should_render:
473 display_inline_comments[co.f_path][co.line_no].append(co)
501 display_inline_comments[co.f_path][co.line_no].append(co)
474
502
475 # load diff data into template context, if we use compare mode then
503 # load diff data into template context, if we use compare mode then
476 # diff is calculated based on changes between versions of PR
504 # diff is calculated based on changes between versions of PR
477
505
478 source_repo = pull_request_at_ver.source_repo
506 source_repo = pull_request_at_ver.source_repo
479 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
507 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
480
508
481 target_repo = pull_request_at_ver.target_repo
509 target_repo = pull_request_at_ver.target_repo
482 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
510 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
483
511
484 if compare:
512 if compare:
485 # in compare switch the diff base to latest commit from prev version
513 # in compare switch the diff base to latest commit from prev version
486 target_ref_id = prev_pull_request_display_obj.revisions[0]
514 target_ref_id = prev_pull_request_display_obj.revisions[0]
487
515
488 # despite opening commits for bookmarks/branches/tags, we always
516 # despite opening commits for bookmarks/branches/tags, we always
489 # convert this to rev to prevent changes after bookmark or branch change
517 # convert this to rev to prevent changes after bookmark or branch change
490 c.source_ref_type = 'rev'
518 c.source_ref_type = 'rev'
491 c.source_ref = source_ref_id
519 c.source_ref = source_ref_id
492
520
493 c.target_ref_type = 'rev'
521 c.target_ref_type = 'rev'
494 c.target_ref = target_ref_id
522 c.target_ref = target_ref_id
495
523
496 c.source_repo = source_repo
524 c.source_repo = source_repo
497 c.target_repo = target_repo
525 c.target_repo = target_repo
498
526
499 c.commit_ranges = []
527 c.commit_ranges = []
500 source_commit = EmptyCommit()
528 source_commit = EmptyCommit()
501 target_commit = EmptyCommit()
529 target_commit = EmptyCommit()
502 c.missing_requirements = False
530 c.missing_requirements = False
503
531
504 source_scm = source_repo.scm_instance()
532 source_scm = source_repo.scm_instance()
505 target_scm = target_repo.scm_instance()
533 target_scm = target_repo.scm_instance()
506
534
507 shadow_scm = None
535 shadow_scm = None
508 try:
536 try:
509 shadow_scm = pull_request_latest.get_shadow_repo()
537 shadow_scm = pull_request_latest.get_shadow_repo()
510 except Exception:
538 except Exception:
511 log.debug('Failed to get shadow repo', exc_info=True)
539 log.debug('Failed to get shadow repo', exc_info=True)
512 # try first the existing source_repo, and then shadow
540 # try first the existing source_repo, and then shadow
513 # repo if we can obtain one
541 # repo if we can obtain one
514 commits_source_repo = source_scm
542 commits_source_repo = source_scm
515 if shadow_scm:
543 if shadow_scm:
516 commits_source_repo = shadow_scm
544 commits_source_repo = shadow_scm
517
545
518 c.commits_source_repo = commits_source_repo
546 c.commits_source_repo = commits_source_repo
519 c.ancestor = None # set it to None, to hide it from PR view
547 c.ancestor = None # set it to None, to hide it from PR view
520
548
521 # empty version means latest, so we keep this to prevent
549 # empty version means latest, so we keep this to prevent
522 # double caching
550 # double caching
523 version_normalized = version or 'latest'
551 version_normalized = version or PullRequest.LATEST_VER
524 from_version_normalized = from_version or 'latest'
552 from_version_normalized = from_version or PullRequest.LATEST_VER
525
553
526 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
554 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
527 cache_file_path = diff_cache_exist(
555 cache_file_path = diff_cache_exist(
528 cache_path, 'pull_request', pull_request_id, version_normalized,
556 cache_path, 'pull_request', pull_request_id, version_normalized,
529 from_version_normalized, source_ref_id, target_ref_id,
557 from_version_normalized, source_ref_id, target_ref_id,
530 hide_whitespace_changes, diff_context, c.fulldiff)
558 hide_whitespace_changes, diff_context, c.fulldiff)
531
559
532 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
560 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
533 force_recache = self.get_recache_flag()
561 force_recache = self.get_recache_flag()
534
562
535 cached_diff = None
563 cached_diff = None
536 if caching_enabled:
564 if caching_enabled:
537 cached_diff = load_cached_diff(cache_file_path)
565 cached_diff = load_cached_diff(cache_file_path)
538
566
539 has_proper_commit_cache = (
567 has_proper_commit_cache = (
540 cached_diff and cached_diff.get('commits')
568 cached_diff and cached_diff.get('commits')
541 and len(cached_diff.get('commits', [])) == 5
569 and len(cached_diff.get('commits', [])) == 5
542 and cached_diff.get('commits')[0]
570 and cached_diff.get('commits')[0]
543 and cached_diff.get('commits')[3])
571 and cached_diff.get('commits')[3])
544
572
545 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
573 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
546 diff_commit_cache = \
574 diff_commit_cache = \
547 (ancestor_commit, commit_cache, missing_requirements,
575 (ancestor_commit, commit_cache, missing_requirements,
548 source_commit, target_commit) = cached_diff['commits']
576 source_commit, target_commit) = cached_diff['commits']
549 else:
577 else:
550 # NOTE(marcink): we reach potentially unreachable errors when a PR has
578 # NOTE(marcink): we reach potentially unreachable errors when a PR has
551 # merge errors resulting in potentially hidden commits in the shadow repo.
579 # merge errors resulting in potentially hidden commits in the shadow repo.
552 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
580 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
553 and _merge_check.merge_response
581 and _merge_check.merge_response
554 maybe_unreachable = maybe_unreachable \
582 maybe_unreachable = maybe_unreachable \
555 and _merge_check.merge_response.metadata.get('unresolved_files')
583 and _merge_check.merge_response.metadata.get('unresolved_files')
556 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
584 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
557 diff_commit_cache = \
585 diff_commit_cache = \
558 (ancestor_commit, commit_cache, missing_requirements,
586 (ancestor_commit, commit_cache, missing_requirements,
559 source_commit, target_commit) = self.get_commits(
587 source_commit, target_commit) = self.get_commits(
560 commits_source_repo,
588 commits_source_repo,
561 pull_request_at_ver,
589 pull_request_at_ver,
562 source_commit,
590 source_commit,
563 source_ref_id,
591 source_ref_id,
564 source_scm,
592 source_scm,
565 target_commit,
593 target_commit,
566 target_ref_id,
594 target_ref_id,
567 target_scm,
595 target_scm,
568 maybe_unreachable=maybe_unreachable)
596 maybe_unreachable=maybe_unreachable)
569
597
570 # register our commit range
598 # register our commit range
571 for comm in commit_cache.values():
599 for comm in commit_cache.values():
572 c.commit_ranges.append(comm)
600 c.commit_ranges.append(comm)
573
601
574 c.missing_requirements = missing_requirements
602 c.missing_requirements = missing_requirements
575 c.ancestor_commit = ancestor_commit
603 c.ancestor_commit = ancestor_commit
576 c.statuses = source_repo.statuses(
604 c.statuses = source_repo.statuses(
577 [x.raw_id for x in c.commit_ranges])
605 [x.raw_id for x in c.commit_ranges])
578
606
579 # auto collapse if we have more than limit
607 # auto collapse if we have more than limit
580 collapse_limit = diffs.DiffProcessor._collapse_commits_over
608 collapse_limit = diffs.DiffProcessor._collapse_commits_over
581 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
609 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
582 c.compare_mode = compare
610 c.compare_mode = compare
583
611
584 # diff_limit is the old behavior, will cut off the whole diff
612 # diff_limit is the old behavior, will cut off the whole diff
585 # if the limit is applied otherwise will just hide the
613 # if the limit is applied otherwise will just hide the
586 # big files from the front-end
614 # big files from the front-end
587 diff_limit = c.visual.cut_off_limit_diff
615 diff_limit = c.visual.cut_off_limit_diff
588 file_limit = c.visual.cut_off_limit_file
616 file_limit = c.visual.cut_off_limit_file
589
617
590 c.missing_commits = False
618 c.missing_commits = False
591 if (c.missing_requirements
619 if (c.missing_requirements
592 or isinstance(source_commit, EmptyCommit)
620 or isinstance(source_commit, EmptyCommit)
593 or source_commit == target_commit):
621 or source_commit == target_commit):
594
622
595 c.missing_commits = True
623 c.missing_commits = True
596 else:
624 else:
597 c.inline_comments = display_inline_comments
625 c.inline_comments = display_inline_comments
598
626
599 use_ancestor = True
627 use_ancestor = True
600 if from_version_normalized != version_normalized:
628 if from_version_normalized != version_normalized:
601 use_ancestor = False
629 use_ancestor = False
602
630
603 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
631 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
604 if not force_recache and has_proper_diff_cache:
632 if not force_recache and has_proper_diff_cache:
605 c.diffset = cached_diff['diff']
633 c.diffset = cached_diff['diff']
606 else:
634 else:
607 try:
635 try:
608 c.diffset = self._get_diffset(
636 c.diffset = self._get_diffset(
609 c.source_repo.repo_name, commits_source_repo,
637 c.source_repo.repo_name, commits_source_repo,
610 c.ancestor_commit,
638 c.ancestor_commit,
611 source_ref_id, target_ref_id,
639 source_ref_id, target_ref_id,
612 target_commit, source_commit,
640 target_commit, source_commit,
613 diff_limit, file_limit, c.fulldiff,
641 diff_limit, file_limit, c.fulldiff,
614 hide_whitespace_changes, diff_context,
642 hide_whitespace_changes, diff_context,
615 use_ancestor=use_ancestor
643 use_ancestor=use_ancestor
616 )
644 )
617
645
618 # save cached diff
646 # save cached diff
619 if caching_enabled:
647 if caching_enabled:
620 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
648 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
621 except CommitDoesNotExistError:
649 except CommitDoesNotExistError:
622 log.exception('Failed to generate diffset')
650 log.exception('Failed to generate diffset')
623 c.missing_commits = True
651 c.missing_commits = True
624
652
625 if not c.missing_commits:
653 if not c.missing_commits:
626
654
627 c.limited_diff = c.diffset.limited_diff
655 c.limited_diff = c.diffset.limited_diff
628
656
629 # calculate removed files that are bound to comments
657 # calculate removed files that are bound to comments
630 comment_deleted_files = [
658 comment_deleted_files = [
631 fname for fname in display_inline_comments
659 fname for fname in display_inline_comments
632 if fname not in c.diffset.file_stats]
660 if fname not in c.diffset.file_stats]
633
661
634 c.deleted_files_comments = collections.defaultdict(dict)
662 c.deleted_files_comments = collections.defaultdict(dict)
635 for fname, per_line_comments in display_inline_comments.items():
663 for fname, per_line_comments in display_inline_comments.items():
636 if fname in comment_deleted_files:
664 if fname in comment_deleted_files:
637 c.deleted_files_comments[fname]['stats'] = 0
665 c.deleted_files_comments[fname]['stats'] = 0
638 c.deleted_files_comments[fname]['comments'] = list()
666 c.deleted_files_comments[fname]['comments'] = list()
639 for lno, comments in per_line_comments.items():
667 for lno, comments in per_line_comments.items():
640 c.deleted_files_comments[fname]['comments'].extend(comments)
668 c.deleted_files_comments[fname]['comments'].extend(comments)
641
669
642 # maybe calculate the range diff
670 # maybe calculate the range diff
643 if c.range_diff_on:
671 if c.range_diff_on:
644 # TODO(marcink): set whitespace/context
672 # TODO(marcink): set whitespace/context
645 context_lcl = 3
673 context_lcl = 3
646 ign_whitespace_lcl = False
674 ign_whitespace_lcl = False
647
675
648 for commit in c.commit_ranges:
676 for commit in c.commit_ranges:
649 commit2 = commit
677 commit2 = commit
650 commit1 = commit.first_parent
678 commit1 = commit.first_parent
651
679
652 range_diff_cache_file_path = diff_cache_exist(
680 range_diff_cache_file_path = diff_cache_exist(
653 cache_path, 'diff', commit.raw_id,
681 cache_path, 'diff', commit.raw_id,
654 ign_whitespace_lcl, context_lcl, c.fulldiff)
682 ign_whitespace_lcl, context_lcl, c.fulldiff)
655
683
656 cached_diff = None
684 cached_diff = None
657 if caching_enabled:
685 if caching_enabled:
658 cached_diff = load_cached_diff(range_diff_cache_file_path)
686 cached_diff = load_cached_diff(range_diff_cache_file_path)
659
687
660 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
688 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
661 if not force_recache and has_proper_diff_cache:
689 if not force_recache and has_proper_diff_cache:
662 diffset = cached_diff['diff']
690 diffset = cached_diff['diff']
663 else:
691 else:
664 diffset = self._get_range_diffset(
692 diffset = self._get_range_diffset(
665 commits_source_repo, source_repo,
693 commits_source_repo, source_repo,
666 commit1, commit2, diff_limit, file_limit,
694 commit1, commit2, diff_limit, file_limit,
667 c.fulldiff, ign_whitespace_lcl, context_lcl
695 c.fulldiff, ign_whitespace_lcl, context_lcl
668 )
696 )
669
697
670 # save cached diff
698 # save cached diff
671 if caching_enabled:
699 if caching_enabled:
672 cache_diff(range_diff_cache_file_path, diffset, None)
700 cache_diff(range_diff_cache_file_path, diffset, None)
673
701
674 c.changes[commit.raw_id] = diffset
702 c.changes[commit.raw_id] = diffset
675
703
676 # this is a hack to properly display links, when creating PR, the
704 # this is a hack to properly display links, when creating PR, the
677 # compare view and others uses different notation, and
705 # compare view and others uses different notation, and
678 # compare_commits.mako renders links based on the target_repo.
706 # compare_commits.mako renders links based on the target_repo.
679 # We need to swap that here to generate it properly on the html side
707 # We need to swap that here to generate it properly on the html side
680 c.target_repo = c.source_repo
708 c.target_repo = c.source_repo
681
709
682 c.commit_statuses = ChangesetStatus.STATUSES
710 c.commit_statuses = ChangesetStatus.STATUSES
683
711
684 c.show_version_changes = not pr_closed
712 c.show_version_changes = not pr_closed
685 if c.show_version_changes:
713 if c.show_version_changes:
686 cur_obj = pull_request_at_ver
714 cur_obj = pull_request_at_ver
687 prev_obj = prev_pull_request_at_ver
715 prev_obj = prev_pull_request_at_ver
688
716
689 old_commit_ids = prev_obj.revisions
717 old_commit_ids = prev_obj.revisions
690 new_commit_ids = cur_obj.revisions
718 new_commit_ids = cur_obj.revisions
691 commit_changes = PullRequestModel()._calculate_commit_id_changes(
719 commit_changes = PullRequestModel()._calculate_commit_id_changes(
692 old_commit_ids, new_commit_ids)
720 old_commit_ids, new_commit_ids)
693 c.commit_changes_summary = commit_changes
721 c.commit_changes_summary = commit_changes
694
722
695 # calculate the diff for commits between versions
723 # calculate the diff for commits between versions
696 c.commit_changes = []
724 c.commit_changes = []
697
725
698 def mark(cs, fw):
726 def mark(cs, fw):
699 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
727 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
700
728
701 for c_type, raw_id in mark(commit_changes.added, 'a') \
729 for c_type, raw_id in mark(commit_changes.added, 'a') \
702 + mark(commit_changes.removed, 'r') \
730 + mark(commit_changes.removed, 'r') \
703 + mark(commit_changes.common, 'c'):
731 + mark(commit_changes.common, 'c'):
704
732
705 if raw_id in commit_cache:
733 if raw_id in commit_cache:
706 commit = commit_cache[raw_id]
734 commit = commit_cache[raw_id]
707 else:
735 else:
708 try:
736 try:
709 commit = commits_source_repo.get_commit(raw_id)
737 commit = commits_source_repo.get_commit(raw_id)
710 except CommitDoesNotExistError:
738 except CommitDoesNotExistError:
711 # in case we fail extracting still use "dummy" commit
739 # in case we fail extracting still use "dummy" commit
712 # for display in commit diff
740 # for display in commit diff
713 commit = h.AttributeDict(
741 commit = h.AttributeDict(
714 {'raw_id': raw_id,
742 {'raw_id': raw_id,
715 'message': 'EMPTY or MISSING COMMIT'})
743 'message': 'EMPTY or MISSING COMMIT'})
716 c.commit_changes.append([c_type, commit])
744 c.commit_changes.append([c_type, commit])
717
745
718 # current user review statuses for each version
746 # current user review statuses for each version
719 c.review_versions = {}
747 c.review_versions = {}
720 if self._rhodecode_user.user_id in allowed_reviewers:
748 if self._rhodecode_user.user_id in c.allowed_reviewers:
721 for co in general_comments:
749 for co in general_comments:
722 if co.author.user_id == self._rhodecode_user.user_id:
750 if co.author.user_id == self._rhodecode_user.user_id:
723 status = co.status_change
751 status = co.status_change
724 if status:
752 if status:
725 _ver_pr = status[0].comment.pull_request_version_id
753 _ver_pr = status[0].comment.pull_request_version_id
726 c.review_versions[_ver_pr] = status[0]
754 c.review_versions[_ver_pr] = status[0]
727
755
728 return self._get_template_context(c)
756 return self._get_template_context(c)
729
757
730 def get_commits(
758 def get_commits(
731 self, commits_source_repo, pull_request_at_ver, source_commit,
759 self, commits_source_repo, pull_request_at_ver, source_commit,
732 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
760 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
733 maybe_unreachable=False):
761 maybe_unreachable=False):
734
762
735 commit_cache = collections.OrderedDict()
763 commit_cache = collections.OrderedDict()
736 missing_requirements = False
764 missing_requirements = False
737
765
738 try:
766 try:
739 pre_load = ["author", "date", "message", "branch", "parents"]
767 pre_load = ["author", "date", "message", "branch", "parents"]
740
768
741 pull_request_commits = pull_request_at_ver.revisions
769 pull_request_commits = pull_request_at_ver.revisions
742 log.debug('Loading %s commits from %s',
770 log.debug('Loading %s commits from %s',
743 len(pull_request_commits), commits_source_repo)
771 len(pull_request_commits), commits_source_repo)
744
772
745 for rev in pull_request_commits:
773 for rev in pull_request_commits:
746 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
774 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
747 maybe_unreachable=maybe_unreachable)
775 maybe_unreachable=maybe_unreachable)
748 commit_cache[comm.raw_id] = comm
776 commit_cache[comm.raw_id] = comm
749
777
750 # Order here matters, we first need to get target, and then
778 # Order here matters, we first need to get target, and then
751 # the source
779 # the source
752 target_commit = commits_source_repo.get_commit(
780 target_commit = commits_source_repo.get_commit(
753 commit_id=safe_str(target_ref_id))
781 commit_id=safe_str(target_ref_id))
754
782
755 source_commit = commits_source_repo.get_commit(
783 source_commit = commits_source_repo.get_commit(
756 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
784 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
757 except CommitDoesNotExistError:
785 except CommitDoesNotExistError:
758 log.warning('Failed to get commit from `{}` repo'.format(
786 log.warning('Failed to get commit from `{}` repo'.format(
759 commits_source_repo), exc_info=True)
787 commits_source_repo), exc_info=True)
760 except RepositoryRequirementError:
788 except RepositoryRequirementError:
761 log.warning('Failed to get all required data from repo', exc_info=True)
789 log.warning('Failed to get all required data from repo', exc_info=True)
762 missing_requirements = True
790 missing_requirements = True
763
791
764 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
792 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
765
793
766 try:
794 try:
767 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
795 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
768 except Exception:
796 except Exception:
769 ancestor_commit = None
797 ancestor_commit = None
770
798
771 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
799 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
772
800
773 def assure_not_empty_repo(self):
801 def assure_not_empty_repo(self):
774 _ = self.request.translate
802 _ = self.request.translate
775
803
776 try:
804 try:
777 self.db_repo.scm_instance().get_commit()
805 self.db_repo.scm_instance().get_commit()
778 except EmptyRepositoryError:
806 except EmptyRepositoryError:
779 h.flash(h.literal(_('There are no commits yet')),
807 h.flash(h.literal(_('There are no commits yet')),
780 category='warning')
808 category='warning')
781 raise HTTPFound(
809 raise HTTPFound(
782 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
810 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
783
811
784 @LoginRequired()
812 @LoginRequired()
785 @NotAnonymous()
813 @NotAnonymous()
786 @HasRepoPermissionAnyDecorator(
814 @HasRepoPermissionAnyDecorator(
787 'repository.read', 'repository.write', 'repository.admin')
815 'repository.read', 'repository.write', 'repository.admin')
788 @view_config(
816 @view_config(
789 route_name='pullrequest_new', request_method='GET',
817 route_name='pullrequest_new', request_method='GET',
790 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
818 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
791 def pull_request_new(self):
819 def pull_request_new(self):
792 _ = self.request.translate
820 _ = self.request.translate
793 c = self.load_default_context()
821 c = self.load_default_context()
794
822
795 self.assure_not_empty_repo()
823 self.assure_not_empty_repo()
796 source_repo = self.db_repo
824 source_repo = self.db_repo
797
825
798 commit_id = self.request.GET.get('commit')
826 commit_id = self.request.GET.get('commit')
799 branch_ref = self.request.GET.get('branch')
827 branch_ref = self.request.GET.get('branch')
800 bookmark_ref = self.request.GET.get('bookmark')
828 bookmark_ref = self.request.GET.get('bookmark')
801
829
802 try:
830 try:
803 source_repo_data = PullRequestModel().generate_repo_data(
831 source_repo_data = PullRequestModel().generate_repo_data(
804 source_repo, commit_id=commit_id,
832 source_repo, commit_id=commit_id,
805 branch=branch_ref, bookmark=bookmark_ref,
833 branch=branch_ref, bookmark=bookmark_ref,
806 translator=self.request.translate)
834 translator=self.request.translate)
807 except CommitDoesNotExistError as e:
835 except CommitDoesNotExistError as e:
808 log.exception(e)
836 log.exception(e)
809 h.flash(_('Commit does not exist'), 'error')
837 h.flash(_('Commit does not exist'), 'error')
810 raise HTTPFound(
838 raise HTTPFound(
811 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
839 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
812
840
813 default_target_repo = source_repo
841 default_target_repo = source_repo
814
842
815 if source_repo.parent and c.has_origin_repo_read_perm:
843 if source_repo.parent and c.has_origin_repo_read_perm:
816 parent_vcs_obj = source_repo.parent.scm_instance()
844 parent_vcs_obj = source_repo.parent.scm_instance()
817 if parent_vcs_obj and not parent_vcs_obj.is_empty():
845 if parent_vcs_obj and not parent_vcs_obj.is_empty():
818 # change default if we have a parent repo
846 # change default if we have a parent repo
819 default_target_repo = source_repo.parent
847 default_target_repo = source_repo.parent
820
848
821 target_repo_data = PullRequestModel().generate_repo_data(
849 target_repo_data = PullRequestModel().generate_repo_data(
822 default_target_repo, translator=self.request.translate)
850 default_target_repo, translator=self.request.translate)
823
851
824 selected_source_ref = source_repo_data['refs']['selected_ref']
852 selected_source_ref = source_repo_data['refs']['selected_ref']
825 title_source_ref = ''
853 title_source_ref = ''
826 if selected_source_ref:
854 if selected_source_ref:
827 title_source_ref = selected_source_ref.split(':', 2)[1]
855 title_source_ref = selected_source_ref.split(':', 2)[1]
828 c.default_title = PullRequestModel().generate_pullrequest_title(
856 c.default_title = PullRequestModel().generate_pullrequest_title(
829 source=source_repo.repo_name,
857 source=source_repo.repo_name,
830 source_ref=title_source_ref,
858 source_ref=title_source_ref,
831 target=default_target_repo.repo_name
859 target=default_target_repo.repo_name
832 )
860 )
833
861
834 c.default_repo_data = {
862 c.default_repo_data = {
835 'source_repo_name': source_repo.repo_name,
863 'source_repo_name': source_repo.repo_name,
836 'source_refs_json': json.dumps(source_repo_data),
864 'source_refs_json': json.dumps(source_repo_data),
837 'target_repo_name': default_target_repo.repo_name,
865 'target_repo_name': default_target_repo.repo_name,
838 'target_refs_json': json.dumps(target_repo_data),
866 'target_refs_json': json.dumps(target_repo_data),
839 }
867 }
840 c.default_source_ref = selected_source_ref
868 c.default_source_ref = selected_source_ref
841
869
842 return self._get_template_context(c)
870 return self._get_template_context(c)
843
871
844 @LoginRequired()
872 @LoginRequired()
845 @NotAnonymous()
873 @NotAnonymous()
846 @HasRepoPermissionAnyDecorator(
874 @HasRepoPermissionAnyDecorator(
847 'repository.read', 'repository.write', 'repository.admin')
875 'repository.read', 'repository.write', 'repository.admin')
848 @view_config(
876 @view_config(
849 route_name='pullrequest_repo_refs', request_method='GET',
877 route_name='pullrequest_repo_refs', request_method='GET',
850 renderer='json_ext', xhr=True)
878 renderer='json_ext', xhr=True)
851 def pull_request_repo_refs(self):
879 def pull_request_repo_refs(self):
852 self.load_default_context()
880 self.load_default_context()
853 target_repo_name = self.request.matchdict['target_repo_name']
881 target_repo_name = self.request.matchdict['target_repo_name']
854 repo = Repository.get_by_repo_name(target_repo_name)
882 repo = Repository.get_by_repo_name(target_repo_name)
855 if not repo:
883 if not repo:
856 raise HTTPNotFound()
884 raise HTTPNotFound()
857
885
858 target_perm = HasRepoPermissionAny(
886 target_perm = HasRepoPermissionAny(
859 'repository.read', 'repository.write', 'repository.admin')(
887 'repository.read', 'repository.write', 'repository.admin')(
860 target_repo_name)
888 target_repo_name)
861 if not target_perm:
889 if not target_perm:
862 raise HTTPNotFound()
890 raise HTTPNotFound()
863
891
864 return PullRequestModel().generate_repo_data(
892 return PullRequestModel().generate_repo_data(
865 repo, translator=self.request.translate)
893 repo, translator=self.request.translate)
866
894
867 @LoginRequired()
895 @LoginRequired()
868 @NotAnonymous()
896 @NotAnonymous()
869 @HasRepoPermissionAnyDecorator(
897 @HasRepoPermissionAnyDecorator(
870 'repository.read', 'repository.write', 'repository.admin')
898 'repository.read', 'repository.write', 'repository.admin')
871 @view_config(
899 @view_config(
872 route_name='pullrequest_repo_targets', request_method='GET',
900 route_name='pullrequest_repo_targets', request_method='GET',
873 renderer='json_ext', xhr=True)
901 renderer='json_ext', xhr=True)
874 def pullrequest_repo_targets(self):
902 def pullrequest_repo_targets(self):
875 _ = self.request.translate
903 _ = self.request.translate
876 filter_query = self.request.GET.get('query')
904 filter_query = self.request.GET.get('query')
877
905
878 # get the parents
906 # get the parents
879 parent_target_repos = []
907 parent_target_repos = []
880 if self.db_repo.parent:
908 if self.db_repo.parent:
881 parents_query = Repository.query() \
909 parents_query = Repository.query() \
882 .order_by(func.length(Repository.repo_name)) \
910 .order_by(func.length(Repository.repo_name)) \
883 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
911 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
884
912
885 if filter_query:
913 if filter_query:
886 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
914 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
887 parents_query = parents_query.filter(
915 parents_query = parents_query.filter(
888 Repository.repo_name.ilike(ilike_expression))
916 Repository.repo_name.ilike(ilike_expression))
889 parents = parents_query.limit(20).all()
917 parents = parents_query.limit(20).all()
890
918
891 for parent in parents:
919 for parent in parents:
892 parent_vcs_obj = parent.scm_instance()
920 parent_vcs_obj = parent.scm_instance()
893 if parent_vcs_obj and not parent_vcs_obj.is_empty():
921 if parent_vcs_obj and not parent_vcs_obj.is_empty():
894 parent_target_repos.append(parent)
922 parent_target_repos.append(parent)
895
923
896 # get other forks, and repo itself
924 # get other forks, and repo itself
897 query = Repository.query() \
925 query = Repository.query() \
898 .order_by(func.length(Repository.repo_name)) \
926 .order_by(func.length(Repository.repo_name)) \
899 .filter(
927 .filter(
900 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
928 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
901 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
929 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
902 ) \
930 ) \
903 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
931 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
904
932
905 if filter_query:
933 if filter_query:
906 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
934 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
907 query = query.filter(Repository.repo_name.ilike(ilike_expression))
935 query = query.filter(Repository.repo_name.ilike(ilike_expression))
908
936
909 limit = max(20 - len(parent_target_repos), 5) # not less then 5
937 limit = max(20 - len(parent_target_repos), 5) # not less then 5
910 target_repos = query.limit(limit).all()
938 target_repos = query.limit(limit).all()
911
939
912 all_target_repos = target_repos + parent_target_repos
940 all_target_repos = target_repos + parent_target_repos
913
941
914 repos = []
942 repos = []
915 # This checks permissions to the repositories
943 # This checks permissions to the repositories
916 for obj in ScmModel().get_repos(all_target_repos):
944 for obj in ScmModel().get_repos(all_target_repos):
917 repos.append({
945 repos.append({
918 'id': obj['name'],
946 'id': obj['name'],
919 'text': obj['name'],
947 'text': obj['name'],
920 'type': 'repo',
948 'type': 'repo',
921 'repo_id': obj['dbrepo']['repo_id'],
949 'repo_id': obj['dbrepo']['repo_id'],
922 'repo_type': obj['dbrepo']['repo_type'],
950 'repo_type': obj['dbrepo']['repo_type'],
923 'private': obj['dbrepo']['private'],
951 'private': obj['dbrepo']['private'],
924
952
925 })
953 })
926
954
927 data = {
955 data = {
928 'more': False,
956 'more': False,
929 'results': [{
957 'results': [{
930 'text': _('Repositories'),
958 'text': _('Repositories'),
931 'children': repos
959 'children': repos
932 }] if repos else []
960 }] if repos else []
933 }
961 }
934 return data
962 return data
935
963
936 @LoginRequired()
964 @LoginRequired()
937 @NotAnonymous()
965 @NotAnonymous()
938 @HasRepoPermissionAnyDecorator(
966 @HasRepoPermissionAnyDecorator(
939 'repository.read', 'repository.write', 'repository.admin')
967 'repository.read', 'repository.write', 'repository.admin')
968 @view_config(
969 route_name='pullrequest_comments', request_method='POST',
970 renderer='string', xhr=True)
971 def pullrequest_comments(self):
972 self.load_default_context()
973
974 pull_request = PullRequest.get_or_404(
975 self.request.matchdict['pull_request_id'])
976 pull_request_id = pull_request.pull_request_id
977 version = self.request.GET.get('version')
978
979 _render = self.request.get_partial_renderer(
980 'rhodecode:templates/base/sidebar.mako')
981 c = _render.get_call_context()
982
983 (pull_request_latest,
984 pull_request_at_ver,
985 pull_request_display_obj,
986 at_version) = PullRequestModel().get_pr_version(
987 pull_request_id, version=version)
988 versions = pull_request_display_obj.versions()
989 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
990 c.versions = versions + [latest_ver]
991
992 c.at_version = at_version
993 c.at_version_num = (at_version
994 if at_version and at_version != PullRequest.LATEST_VER
995 else None)
996
997 self.register_comments_vars(c, pull_request_latest, versions)
998 all_comments = c.inline_comments_flat + c.comments
999
1000 existing_ids = filter(
1001 lambda e: e, map(safe_int, self.request.POST.getall('comments[]')))
1002 return _render('comments_table', all_comments, len(all_comments),
1003 existing_ids=existing_ids)
1004
1005 @LoginRequired()
1006 @NotAnonymous()
1007 @HasRepoPermissionAnyDecorator(
1008 'repository.read', 'repository.write', 'repository.admin')
1009 @view_config(
1010 route_name='pullrequest_todos', request_method='POST',
1011 renderer='string', xhr=True)
1012 def pullrequest_todos(self):
1013 self.load_default_context()
1014
1015 pull_request = PullRequest.get_or_404(
1016 self.request.matchdict['pull_request_id'])
1017 pull_request_id = pull_request.pull_request_id
1018 version = self.request.GET.get('version')
1019
1020 _render = self.request.get_partial_renderer(
1021 'rhodecode:templates/base/sidebar.mako')
1022 c = _render.get_call_context()
1023 (pull_request_latest,
1024 pull_request_at_ver,
1025 pull_request_display_obj,
1026 at_version) = PullRequestModel().get_pr_version(
1027 pull_request_id, version=version)
1028 versions = pull_request_display_obj.versions()
1029 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1030 c.versions = versions + [latest_ver]
1031
1032 c.at_version = at_version
1033 c.at_version_num = (at_version
1034 if at_version and at_version != PullRequest.LATEST_VER
1035 else None)
1036
1037 c.unresolved_comments = CommentsModel() \
1038 .get_pull_request_unresolved_todos(pull_request)
1039 c.resolved_comments = CommentsModel() \
1040 .get_pull_request_resolved_todos(pull_request)
1041
1042 all_comments = c.unresolved_comments + c.resolved_comments
1043 existing_ids = filter(
1044 lambda e: e, map(safe_int, self.request.POST.getall('comments[]')))
1045 return _render('comments_table', all_comments, len(c.unresolved_comments),
1046 todo_comments=True, existing_ids=existing_ids)
1047
1048 @LoginRequired()
1049 @NotAnonymous()
1050 @HasRepoPermissionAnyDecorator(
1051 'repository.read', 'repository.write', 'repository.admin')
940 @CSRFRequired()
1052 @CSRFRequired()
941 @view_config(
1053 @view_config(
942 route_name='pullrequest_create', request_method='POST',
1054 route_name='pullrequest_create', request_method='POST',
943 renderer=None)
1055 renderer=None)
944 def pull_request_create(self):
1056 def pull_request_create(self):
945 _ = self.request.translate
1057 _ = self.request.translate
946 self.assure_not_empty_repo()
1058 self.assure_not_empty_repo()
947 self.load_default_context()
1059 self.load_default_context()
948
1060
949 controls = peppercorn.parse(self.request.POST.items())
1061 controls = peppercorn.parse(self.request.POST.items())
950
1062
951 try:
1063 try:
952 form = PullRequestForm(
1064 form = PullRequestForm(
953 self.request.translate, self.db_repo.repo_id)()
1065 self.request.translate, self.db_repo.repo_id)()
954 _form = form.to_python(controls)
1066 _form = form.to_python(controls)
955 except formencode.Invalid as errors:
1067 except formencode.Invalid as errors:
956 if errors.error_dict.get('revisions'):
1068 if errors.error_dict.get('revisions'):
957 msg = 'Revisions: %s' % errors.error_dict['revisions']
1069 msg = 'Revisions: %s' % errors.error_dict['revisions']
958 elif errors.error_dict.get('pullrequest_title'):
1070 elif errors.error_dict.get('pullrequest_title'):
959 msg = errors.error_dict.get('pullrequest_title')
1071 msg = errors.error_dict.get('pullrequest_title')
960 else:
1072 else:
961 msg = _('Error creating pull request: {}').format(errors)
1073 msg = _('Error creating pull request: {}').format(errors)
962 log.exception(msg)
1074 log.exception(msg)
963 h.flash(msg, 'error')
1075 h.flash(msg, 'error')
964
1076
965 # would rather just go back to form ...
1077 # would rather just go back to form ...
966 raise HTTPFound(
1078 raise HTTPFound(
967 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1079 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
968
1080
969 source_repo = _form['source_repo']
1081 source_repo = _form['source_repo']
970 source_ref = _form['source_ref']
1082 source_ref = _form['source_ref']
971 target_repo = _form['target_repo']
1083 target_repo = _form['target_repo']
972 target_ref = _form['target_ref']
1084 target_ref = _form['target_ref']
973 commit_ids = _form['revisions'][::-1]
1085 commit_ids = _form['revisions'][::-1]
974 common_ancestor_id = _form['common_ancestor']
1086 common_ancestor_id = _form['common_ancestor']
975
1087
976 # find the ancestor for this pr
1088 # find the ancestor for this pr
977 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1089 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
978 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1090 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
979
1091
980 if not (source_db_repo or target_db_repo):
1092 if not (source_db_repo or target_db_repo):
981 h.flash(_('source_repo or target repo not found'), category='error')
1093 h.flash(_('source_repo or target repo not found'), category='error')
982 raise HTTPFound(
1094 raise HTTPFound(
983 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1095 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
984
1096
985 # re-check permissions again here
1097 # re-check permissions again here
986 # source_repo we must have read permissions
1098 # source_repo we must have read permissions
987
1099
988 source_perm = HasRepoPermissionAny(
1100 source_perm = HasRepoPermissionAny(
989 'repository.read', 'repository.write', 'repository.admin')(
1101 'repository.read', 'repository.write', 'repository.admin')(
990 source_db_repo.repo_name)
1102 source_db_repo.repo_name)
991 if not source_perm:
1103 if not source_perm:
992 msg = _('Not Enough permissions to source repo `{}`.'.format(
1104 msg = _('Not Enough permissions to source repo `{}`.'.format(
993 source_db_repo.repo_name))
1105 source_db_repo.repo_name))
994 h.flash(msg, category='error')
1106 h.flash(msg, category='error')
995 # copy the args back to redirect
1107 # copy the args back to redirect
996 org_query = self.request.GET.mixed()
1108 org_query = self.request.GET.mixed()
997 raise HTTPFound(
1109 raise HTTPFound(
998 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1110 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
999 _query=org_query))
1111 _query=org_query))
1000
1112
1001 # target repo we must have read permissions, and also later on
1113 # target repo we must have read permissions, and also later on
1002 # we want to check branch permissions here
1114 # we want to check branch permissions here
1003 target_perm = HasRepoPermissionAny(
1115 target_perm = HasRepoPermissionAny(
1004 'repository.read', 'repository.write', 'repository.admin')(
1116 'repository.read', 'repository.write', 'repository.admin')(
1005 target_db_repo.repo_name)
1117 target_db_repo.repo_name)
1006 if not target_perm:
1118 if not target_perm:
1007 msg = _('Not Enough permissions to target repo `{}`.'.format(
1119 msg = _('Not Enough permissions to target repo `{}`.'.format(
1008 target_db_repo.repo_name))
1120 target_db_repo.repo_name))
1009 h.flash(msg, category='error')
1121 h.flash(msg, category='error')
1010 # copy the args back to redirect
1122 # copy the args back to redirect
1011 org_query = self.request.GET.mixed()
1123 org_query = self.request.GET.mixed()
1012 raise HTTPFound(
1124 raise HTTPFound(
1013 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1125 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1014 _query=org_query))
1126 _query=org_query))
1015
1127
1016 source_scm = source_db_repo.scm_instance()
1128 source_scm = source_db_repo.scm_instance()
1017 target_scm = target_db_repo.scm_instance()
1129 target_scm = target_db_repo.scm_instance()
1018
1130
1019 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
1131 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
1020 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
1132 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
1021
1133
1022 ancestor = source_scm.get_common_ancestor(
1134 ancestor = source_scm.get_common_ancestor(
1023 source_commit.raw_id, target_commit.raw_id, target_scm)
1135 source_commit.raw_id, target_commit.raw_id, target_scm)
1024
1136
1025 # recalculate target ref based on ancestor
1137 # recalculate target ref based on ancestor
1026 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1138 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1027 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1139 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1028
1140
1029 get_default_reviewers_data, validate_default_reviewers = \
1141 get_default_reviewers_data, validate_default_reviewers = \
1030 PullRequestModel().get_reviewer_functions()
1142 PullRequestModel().get_reviewer_functions()
1031
1143
1032 # recalculate reviewers logic, to make sure we can validate this
1144 # recalculate reviewers logic, to make sure we can validate this
1033 reviewer_rules = get_default_reviewers_data(
1145 reviewer_rules = get_default_reviewers_data(
1034 self._rhodecode_db_user, source_db_repo,
1146 self._rhodecode_db_user, source_db_repo,
1035 source_commit, target_db_repo, target_commit)
1147 source_commit, target_db_repo, target_commit)
1036
1148
1037 given_reviewers = _form['review_members']
1149 given_reviewers = _form['review_members']
1038 reviewers = validate_default_reviewers(
1150 reviewers = validate_default_reviewers(
1039 given_reviewers, reviewer_rules)
1151 given_reviewers, reviewer_rules)
1040
1152
1041 pullrequest_title = _form['pullrequest_title']
1153 pullrequest_title = _form['pullrequest_title']
1042 title_source_ref = source_ref.split(':', 2)[1]
1154 title_source_ref = source_ref.split(':', 2)[1]
1043 if not pullrequest_title:
1155 if not pullrequest_title:
1044 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1156 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1045 source=source_repo,
1157 source=source_repo,
1046 source_ref=title_source_ref,
1158 source_ref=title_source_ref,
1047 target=target_repo
1159 target=target_repo
1048 )
1160 )
1049
1161
1050 description = _form['pullrequest_desc']
1162 description = _form['pullrequest_desc']
1051 description_renderer = _form['description_renderer']
1163 description_renderer = _form['description_renderer']
1052
1164
1053 try:
1165 try:
1054 pull_request = PullRequestModel().create(
1166 pull_request = PullRequestModel().create(
1055 created_by=self._rhodecode_user.user_id,
1167 created_by=self._rhodecode_user.user_id,
1056 source_repo=source_repo,
1168 source_repo=source_repo,
1057 source_ref=source_ref,
1169 source_ref=source_ref,
1058 target_repo=target_repo,
1170 target_repo=target_repo,
1059 target_ref=target_ref,
1171 target_ref=target_ref,
1060 revisions=commit_ids,
1172 revisions=commit_ids,
1061 common_ancestor_id=common_ancestor_id,
1173 common_ancestor_id=common_ancestor_id,
1062 reviewers=reviewers,
1174 reviewers=reviewers,
1063 title=pullrequest_title,
1175 title=pullrequest_title,
1064 description=description,
1176 description=description,
1065 description_renderer=description_renderer,
1177 description_renderer=description_renderer,
1066 reviewer_data=reviewer_rules,
1178 reviewer_data=reviewer_rules,
1067 auth_user=self._rhodecode_user
1179 auth_user=self._rhodecode_user
1068 )
1180 )
1069 Session().commit()
1181 Session().commit()
1070
1182
1071 h.flash(_('Successfully opened new pull request'),
1183 h.flash(_('Successfully opened new pull request'),
1072 category='success')
1184 category='success')
1073 except Exception:
1185 except Exception:
1074 msg = _('Error occurred during creation of this pull request.')
1186 msg = _('Error occurred during creation of this pull request.')
1075 log.exception(msg)
1187 log.exception(msg)
1076 h.flash(msg, category='error')
1188 h.flash(msg, category='error')
1077
1189
1078 # copy the args back to redirect
1190 # copy the args back to redirect
1079 org_query = self.request.GET.mixed()
1191 org_query = self.request.GET.mixed()
1080 raise HTTPFound(
1192 raise HTTPFound(
1081 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1193 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1082 _query=org_query))
1194 _query=org_query))
1083
1195
1084 raise HTTPFound(
1196 raise HTTPFound(
1085 h.route_path('pullrequest_show', repo_name=target_repo,
1197 h.route_path('pullrequest_show', repo_name=target_repo,
1086 pull_request_id=pull_request.pull_request_id))
1198 pull_request_id=pull_request.pull_request_id))
1087
1199
1088 @LoginRequired()
1200 @LoginRequired()
1089 @NotAnonymous()
1201 @NotAnonymous()
1090 @HasRepoPermissionAnyDecorator(
1202 @HasRepoPermissionAnyDecorator(
1091 'repository.read', 'repository.write', 'repository.admin')
1203 'repository.read', 'repository.write', 'repository.admin')
1092 @CSRFRequired()
1204 @CSRFRequired()
1093 @view_config(
1205 @view_config(
1094 route_name='pullrequest_update', request_method='POST',
1206 route_name='pullrequest_update', request_method='POST',
1095 renderer='json_ext')
1207 renderer='json_ext')
1096 def pull_request_update(self):
1208 def pull_request_update(self):
1097 pull_request = PullRequest.get_or_404(
1209 pull_request = PullRequest.get_or_404(
1098 self.request.matchdict['pull_request_id'])
1210 self.request.matchdict['pull_request_id'])
1099 _ = self.request.translate
1211 _ = self.request.translate
1100
1212
1101 self.load_default_context()
1213 c = self.load_default_context()
1102 redirect_url = None
1214 redirect_url = None
1103
1215
1104 if pull_request.is_closed():
1216 if pull_request.is_closed():
1105 log.debug('update: forbidden because pull request is closed')
1217 log.debug('update: forbidden because pull request is closed')
1106 msg = _(u'Cannot update closed pull requests.')
1218 msg = _(u'Cannot update closed pull requests.')
1107 h.flash(msg, category='error')
1219 h.flash(msg, category='error')
1108 return {'response': True,
1220 return {'response': True,
1109 'redirect_url': redirect_url}
1221 'redirect_url': redirect_url}
1110
1222
1111 is_state_changing = pull_request.is_state_changing()
1223 is_state_changing = pull_request.is_state_changing()
1224 c.pr_broadcast_channel = '/repo${}$/pr/{}'.format(
1225 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1112
1226
1113 # only owner or admin can update it
1227 # only owner or admin can update it
1114 allowed_to_update = PullRequestModel().check_user_update(
1228 allowed_to_update = PullRequestModel().check_user_update(
1115 pull_request, self._rhodecode_user)
1229 pull_request, self._rhodecode_user)
1116 if allowed_to_update:
1230 if allowed_to_update:
1117 controls = peppercorn.parse(self.request.POST.items())
1231 controls = peppercorn.parse(self.request.POST.items())
1118 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1232 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1119
1233
1120 if 'review_members' in controls:
1234 if 'review_members' in controls:
1121 self._update_reviewers(
1235 self._update_reviewers(
1122 pull_request, controls['review_members'],
1236 pull_request, controls['review_members'],
1123 pull_request.reviewer_data)
1237 pull_request.reviewer_data)
1124 elif str2bool(self.request.POST.get('update_commits', 'false')):
1238 elif str2bool(self.request.POST.get('update_commits', 'false')):
1125 if is_state_changing:
1239 if is_state_changing:
1126 log.debug('commits update: forbidden because pull request is in state %s',
1240 log.debug('commits update: forbidden because pull request is in state %s',
1127 pull_request.pull_request_state)
1241 pull_request.pull_request_state)
1128 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1242 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1129 u'Current state is: `{}`').format(
1243 u'Current state is: `{}`').format(
1130 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1244 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1131 h.flash(msg, category='error')
1245 h.flash(msg, category='error')
1132 return {'response': True,
1246 return {'response': True,
1133 'redirect_url': redirect_url}
1247 'redirect_url': redirect_url}
1134
1248
1135 self._update_commits(pull_request)
1249 self._update_commits(c, pull_request)
1136 if force_refresh:
1250 if force_refresh:
1137 redirect_url = h.route_path(
1251 redirect_url = h.route_path(
1138 'pullrequest_show', repo_name=self.db_repo_name,
1252 'pullrequest_show', repo_name=self.db_repo_name,
1139 pull_request_id=pull_request.pull_request_id,
1253 pull_request_id=pull_request.pull_request_id,
1140 _query={"force_refresh": 1})
1254 _query={"force_refresh": 1})
1141 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1255 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1142 self._edit_pull_request(pull_request)
1256 self._edit_pull_request(pull_request)
1143 else:
1257 else:
1144 raise HTTPBadRequest()
1258 raise HTTPBadRequest()
1145
1259
1146 return {'response': True,
1260 return {'response': True,
1147 'redirect_url': redirect_url}
1261 'redirect_url': redirect_url}
1148 raise HTTPForbidden()
1262 raise HTTPForbidden()
1149
1263
1150 def _edit_pull_request(self, pull_request):
1264 def _edit_pull_request(self, pull_request):
1151 _ = self.request.translate
1265 _ = self.request.translate
1152
1266
1153 try:
1267 try:
1154 PullRequestModel().edit(
1268 PullRequestModel().edit(
1155 pull_request,
1269 pull_request,
1156 self.request.POST.get('title'),
1270 self.request.POST.get('title'),
1157 self.request.POST.get('description'),
1271 self.request.POST.get('description'),
1158 self.request.POST.get('description_renderer'),
1272 self.request.POST.get('description_renderer'),
1159 self._rhodecode_user)
1273 self._rhodecode_user)
1160 except ValueError:
1274 except ValueError:
1161 msg = _(u'Cannot update closed pull requests.')
1275 msg = _(u'Cannot update closed pull requests.')
1162 h.flash(msg, category='error')
1276 h.flash(msg, category='error')
1163 return
1277 return
1164 else:
1278 else:
1165 Session().commit()
1279 Session().commit()
1166
1280
1167 msg = _(u'Pull request title & description updated.')
1281 msg = _(u'Pull request title & description updated.')
1168 h.flash(msg, category='success')
1282 h.flash(msg, category='success')
1169 return
1283 return
1170
1284
1171 def _update_commits(self, pull_request):
1285 def _update_commits(self, c, pull_request):
1172 _ = self.request.translate
1286 _ = self.request.translate
1173
1287
1174 with pull_request.set_state(PullRequest.STATE_UPDATING):
1288 with pull_request.set_state(PullRequest.STATE_UPDATING):
1175 resp = PullRequestModel().update_commits(
1289 resp = PullRequestModel().update_commits(
1176 pull_request, self._rhodecode_db_user)
1290 pull_request, self._rhodecode_db_user)
1177
1291
1178 if resp.executed:
1292 if resp.executed:
1179
1293
1180 if resp.target_changed and resp.source_changed:
1294 if resp.target_changed and resp.source_changed:
1181 changed = 'target and source repositories'
1295 changed = 'target and source repositories'
1182 elif resp.target_changed and not resp.source_changed:
1296 elif resp.target_changed and not resp.source_changed:
1183 changed = 'target repository'
1297 changed = 'target repository'
1184 elif not resp.target_changed and resp.source_changed:
1298 elif not resp.target_changed and resp.source_changed:
1185 changed = 'source repository'
1299 changed = 'source repository'
1186 else:
1300 else:
1187 changed = 'nothing'
1301 changed = 'nothing'
1188
1302
1189 msg = _(u'Pull request updated to "{source_commit_id}" with '
1303 msg = _(u'Pull request updated to "{source_commit_id}" with '
1190 u'{count_added} added, {count_removed} removed commits. '
1304 u'{count_added} added, {count_removed} removed commits. '
1191 u'Source of changes: {change_source}')
1305 u'Source of changes: {change_source}')
1192 msg = msg.format(
1306 msg = msg.format(
1193 source_commit_id=pull_request.source_ref_parts.commit_id,
1307 source_commit_id=pull_request.source_ref_parts.commit_id,
1194 count_added=len(resp.changes.added),
1308 count_added=len(resp.changes.added),
1195 count_removed=len(resp.changes.removed),
1309 count_removed=len(resp.changes.removed),
1196 change_source=changed)
1310 change_source=changed)
1197 h.flash(msg, category='success')
1311 h.flash(msg, category='success')
1198
1312
1199 channel = '/repo${}$/pr/{}'.format(
1200 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1201 message = msg + (
1313 message = msg + (
1202 ' - <a onclick="window.location.reload()">'
1314 ' - <a onclick="window.location.reload()">'
1203 '<strong>{}</strong></a>'.format(_('Reload page')))
1315 '<strong>{}</strong></a>'.format(_('Reload page')))
1316
1317 message_obj = {
1318 'message': message,
1319 'level': 'success',
1320 'topic': '/notifications'
1321 }
1322
1204 channelstream.post_message(
1323 channelstream.post_message(
1205 channel, message, self._rhodecode_user.username,
1324 c.pr_broadcast_channel, message_obj, self._rhodecode_user.username,
1206 registry=self.request.registry)
1325 registry=self.request.registry)
1207 else:
1326 else:
1208 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1327 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1209 warning_reasons = [
1328 warning_reasons = [
1210 UpdateFailureReason.NO_CHANGE,
1329 UpdateFailureReason.NO_CHANGE,
1211 UpdateFailureReason.WRONG_REF_TYPE,
1330 UpdateFailureReason.WRONG_REF_TYPE,
1212 ]
1331 ]
1213 category = 'warning' if resp.reason in warning_reasons else 'error'
1332 category = 'warning' if resp.reason in warning_reasons else 'error'
1214 h.flash(msg, category=category)
1333 h.flash(msg, category=category)
1215
1334
1216 @LoginRequired()
1335 @LoginRequired()
1217 @NotAnonymous()
1336 @NotAnonymous()
1218 @HasRepoPermissionAnyDecorator(
1337 @HasRepoPermissionAnyDecorator(
1219 'repository.read', 'repository.write', 'repository.admin')
1338 'repository.read', 'repository.write', 'repository.admin')
1220 @CSRFRequired()
1339 @CSRFRequired()
1221 @view_config(
1340 @view_config(
1222 route_name='pullrequest_merge', request_method='POST',
1341 route_name='pullrequest_merge', request_method='POST',
1223 renderer='json_ext')
1342 renderer='json_ext')
1224 def pull_request_merge(self):
1343 def pull_request_merge(self):
1225 """
1344 """
1226 Merge will perform a server-side merge of the specified
1345 Merge will perform a server-side merge of the specified
1227 pull request, if the pull request is approved and mergeable.
1346 pull request, if the pull request is approved and mergeable.
1228 After successful merging, the pull request is automatically
1347 After successful merging, the pull request is automatically
1229 closed, with a relevant comment.
1348 closed, with a relevant comment.
1230 """
1349 """
1231 pull_request = PullRequest.get_or_404(
1350 pull_request = PullRequest.get_or_404(
1232 self.request.matchdict['pull_request_id'])
1351 self.request.matchdict['pull_request_id'])
1233 _ = self.request.translate
1352 _ = self.request.translate
1234
1353
1235 if pull_request.is_state_changing():
1354 if pull_request.is_state_changing():
1236 log.debug('show: forbidden because pull request is in state %s',
1355 log.debug('show: forbidden because pull request is in state %s',
1237 pull_request.pull_request_state)
1356 pull_request.pull_request_state)
1238 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1357 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1239 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1358 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1240 pull_request.pull_request_state)
1359 pull_request.pull_request_state)
1241 h.flash(msg, category='error')
1360 h.flash(msg, category='error')
1242 raise HTTPFound(
1361 raise HTTPFound(
1243 h.route_path('pullrequest_show',
1362 h.route_path('pullrequest_show',
1244 repo_name=pull_request.target_repo.repo_name,
1363 repo_name=pull_request.target_repo.repo_name,
1245 pull_request_id=pull_request.pull_request_id))
1364 pull_request_id=pull_request.pull_request_id))
1246
1365
1247 self.load_default_context()
1366 self.load_default_context()
1248
1367
1249 with pull_request.set_state(PullRequest.STATE_UPDATING):
1368 with pull_request.set_state(PullRequest.STATE_UPDATING):
1250 check = MergeCheck.validate(
1369 check = MergeCheck.validate(
1251 pull_request, auth_user=self._rhodecode_user,
1370 pull_request, auth_user=self._rhodecode_user,
1252 translator=self.request.translate)
1371 translator=self.request.translate)
1253 merge_possible = not check.failed
1372 merge_possible = not check.failed
1254
1373
1255 for err_type, error_msg in check.errors:
1374 for err_type, error_msg in check.errors:
1256 h.flash(error_msg, category=err_type)
1375 h.flash(error_msg, category=err_type)
1257
1376
1258 if merge_possible:
1377 if merge_possible:
1259 log.debug("Pre-conditions checked, trying to merge.")
1378 log.debug("Pre-conditions checked, trying to merge.")
1260 extras = vcs_operation_context(
1379 extras = vcs_operation_context(
1261 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1380 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1262 username=self._rhodecode_db_user.username, action='push',
1381 username=self._rhodecode_db_user.username, action='push',
1263 scm=pull_request.target_repo.repo_type)
1382 scm=pull_request.target_repo.repo_type)
1264 with pull_request.set_state(PullRequest.STATE_UPDATING):
1383 with pull_request.set_state(PullRequest.STATE_UPDATING):
1265 self._merge_pull_request(
1384 self._merge_pull_request(
1266 pull_request, self._rhodecode_db_user, extras)
1385 pull_request, self._rhodecode_db_user, extras)
1267 else:
1386 else:
1268 log.debug("Pre-conditions failed, NOT merging.")
1387 log.debug("Pre-conditions failed, NOT merging.")
1269
1388
1270 raise HTTPFound(
1389 raise HTTPFound(
1271 h.route_path('pullrequest_show',
1390 h.route_path('pullrequest_show',
1272 repo_name=pull_request.target_repo.repo_name,
1391 repo_name=pull_request.target_repo.repo_name,
1273 pull_request_id=pull_request.pull_request_id))
1392 pull_request_id=pull_request.pull_request_id))
1274
1393
1275 def _merge_pull_request(self, pull_request, user, extras):
1394 def _merge_pull_request(self, pull_request, user, extras):
1276 _ = self.request.translate
1395 _ = self.request.translate
1277 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1396 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1278
1397
1279 if merge_resp.executed:
1398 if merge_resp.executed:
1280 log.debug("The merge was successful, closing the pull request.")
1399 log.debug("The merge was successful, closing the pull request.")
1281 PullRequestModel().close_pull_request(
1400 PullRequestModel().close_pull_request(
1282 pull_request.pull_request_id, user)
1401 pull_request.pull_request_id, user)
1283 Session().commit()
1402 Session().commit()
1284 msg = _('Pull request was successfully merged and closed.')
1403 msg = _('Pull request was successfully merged and closed.')
1285 h.flash(msg, category='success')
1404 h.flash(msg, category='success')
1286 else:
1405 else:
1287 log.debug(
1406 log.debug(
1288 "The merge was not successful. Merge response: %s", merge_resp)
1407 "The merge was not successful. Merge response: %s", merge_resp)
1289 msg = merge_resp.merge_status_message
1408 msg = merge_resp.merge_status_message
1290 h.flash(msg, category='error')
1409 h.flash(msg, category='error')
1291
1410
1292 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1411 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1293 _ = self.request.translate
1412 _ = self.request.translate
1294
1413
1295 get_default_reviewers_data, validate_default_reviewers = \
1414 get_default_reviewers_data, validate_default_reviewers = \
1296 PullRequestModel().get_reviewer_functions()
1415 PullRequestModel().get_reviewer_functions()
1297
1416
1298 try:
1417 try:
1299 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1418 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1300 except ValueError as e:
1419 except ValueError as e:
1301 log.error('Reviewers Validation: {}'.format(e))
1420 log.error('Reviewers Validation: {}'.format(e))
1302 h.flash(e, category='error')
1421 h.flash(e, category='error')
1303 return
1422 return
1304
1423
1305 old_calculated_status = pull_request.calculated_review_status()
1424 old_calculated_status = pull_request.calculated_review_status()
1306 PullRequestModel().update_reviewers(
1425 PullRequestModel().update_reviewers(
1307 pull_request, reviewers, self._rhodecode_user)
1426 pull_request, reviewers, self._rhodecode_user)
1308 h.flash(_('Pull request reviewers updated.'), category='success')
1427 h.flash(_('Pull request reviewers updated.'), category='success')
1309 Session().commit()
1428 Session().commit()
1310
1429
1311 # trigger status changed if change in reviewers changes the status
1430 # trigger status changed if change in reviewers changes the status
1312 calculated_status = pull_request.calculated_review_status()
1431 calculated_status = pull_request.calculated_review_status()
1313 if old_calculated_status != calculated_status:
1432 if old_calculated_status != calculated_status:
1314 PullRequestModel().trigger_pull_request_hook(
1433 PullRequestModel().trigger_pull_request_hook(
1315 pull_request, self._rhodecode_user, 'review_status_change',
1434 pull_request, self._rhodecode_user, 'review_status_change',
1316 data={'status': calculated_status})
1435 data={'status': calculated_status})
1317
1436
1318 @LoginRequired()
1437 @LoginRequired()
1319 @NotAnonymous()
1438 @NotAnonymous()
1320 @HasRepoPermissionAnyDecorator(
1439 @HasRepoPermissionAnyDecorator(
1321 'repository.read', 'repository.write', 'repository.admin')
1440 'repository.read', 'repository.write', 'repository.admin')
1322 @CSRFRequired()
1441 @CSRFRequired()
1323 @view_config(
1442 @view_config(
1324 route_name='pullrequest_delete', request_method='POST',
1443 route_name='pullrequest_delete', request_method='POST',
1325 renderer='json_ext')
1444 renderer='json_ext')
1326 def pull_request_delete(self):
1445 def pull_request_delete(self):
1327 _ = self.request.translate
1446 _ = self.request.translate
1328
1447
1329 pull_request = PullRequest.get_or_404(
1448 pull_request = PullRequest.get_or_404(
1330 self.request.matchdict['pull_request_id'])
1449 self.request.matchdict['pull_request_id'])
1331 self.load_default_context()
1450 self.load_default_context()
1332
1451
1333 pr_closed = pull_request.is_closed()
1452 pr_closed = pull_request.is_closed()
1334 allowed_to_delete = PullRequestModel().check_user_delete(
1453 allowed_to_delete = PullRequestModel().check_user_delete(
1335 pull_request, self._rhodecode_user) and not pr_closed
1454 pull_request, self._rhodecode_user) and not pr_closed
1336
1455
1337 # only owner can delete it !
1456 # only owner can delete it !
1338 if allowed_to_delete:
1457 if allowed_to_delete:
1339 PullRequestModel().delete(pull_request, self._rhodecode_user)
1458 PullRequestModel().delete(pull_request, self._rhodecode_user)
1340 Session().commit()
1459 Session().commit()
1341 h.flash(_('Successfully deleted pull request'),
1460 h.flash(_('Successfully deleted pull request'),
1342 category='success')
1461 category='success')
1343 raise HTTPFound(h.route_path('pullrequest_show_all',
1462 raise HTTPFound(h.route_path('pullrequest_show_all',
1344 repo_name=self.db_repo_name))
1463 repo_name=self.db_repo_name))
1345
1464
1346 log.warning('user %s tried to delete pull request without access',
1465 log.warning('user %s tried to delete pull request without access',
1347 self._rhodecode_user)
1466 self._rhodecode_user)
1348 raise HTTPNotFound()
1467 raise HTTPNotFound()
1349
1468
1350 @LoginRequired()
1469 @LoginRequired()
1351 @NotAnonymous()
1470 @NotAnonymous()
1352 @HasRepoPermissionAnyDecorator(
1471 @HasRepoPermissionAnyDecorator(
1353 'repository.read', 'repository.write', 'repository.admin')
1472 'repository.read', 'repository.write', 'repository.admin')
1354 @CSRFRequired()
1473 @CSRFRequired()
1355 @view_config(
1474 @view_config(
1356 route_name='pullrequest_comment_create', request_method='POST',
1475 route_name='pullrequest_comment_create', request_method='POST',
1357 renderer='json_ext')
1476 renderer='json_ext')
1358 def pull_request_comment_create(self):
1477 def pull_request_comment_create(self):
1359 _ = self.request.translate
1478 _ = self.request.translate
1360
1479
1361 pull_request = PullRequest.get_or_404(
1480 pull_request = PullRequest.get_or_404(
1362 self.request.matchdict['pull_request_id'])
1481 self.request.matchdict['pull_request_id'])
1363 pull_request_id = pull_request.pull_request_id
1482 pull_request_id = pull_request.pull_request_id
1364
1483
1365 if pull_request.is_closed():
1484 if pull_request.is_closed():
1366 log.debug('comment: forbidden because pull request is closed')
1485 log.debug('comment: forbidden because pull request is closed')
1367 raise HTTPForbidden()
1486 raise HTTPForbidden()
1368
1487
1369 allowed_to_comment = PullRequestModel().check_user_comment(
1488 allowed_to_comment = PullRequestModel().check_user_comment(
1370 pull_request, self._rhodecode_user)
1489 pull_request, self._rhodecode_user)
1371 if not allowed_to_comment:
1490 if not allowed_to_comment:
1372 log.debug(
1491 log.debug(
1373 'comment: forbidden because pull request is from forbidden repo')
1492 'comment: forbidden because pull request is from forbidden repo')
1374 raise HTTPForbidden()
1493 raise HTTPForbidden()
1375
1494
1376 c = self.load_default_context()
1495 c = self.load_default_context()
1377
1496
1378 status = self.request.POST.get('changeset_status', None)
1497 status = self.request.POST.get('changeset_status', None)
1379 text = self.request.POST.get('text')
1498 text = self.request.POST.get('text')
1380 comment_type = self.request.POST.get('comment_type')
1499 comment_type = self.request.POST.get('comment_type')
1381 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1500 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1382 close_pull_request = self.request.POST.get('close_pull_request')
1501 close_pull_request = self.request.POST.get('close_pull_request')
1383
1502
1384 # the logic here should work like following, if we submit close
1503 # the logic here should work like following, if we submit close
1385 # pr comment, use `close_pull_request_with_comment` function
1504 # pr comment, use `close_pull_request_with_comment` function
1386 # else handle regular comment logic
1505 # else handle regular comment logic
1387
1506
1388 if close_pull_request:
1507 if close_pull_request:
1389 # only owner or admin or person with write permissions
1508 # only owner or admin or person with write permissions
1390 allowed_to_close = PullRequestModel().check_user_update(
1509 allowed_to_close = PullRequestModel().check_user_update(
1391 pull_request, self._rhodecode_user)
1510 pull_request, self._rhodecode_user)
1392 if not allowed_to_close:
1511 if not allowed_to_close:
1393 log.debug('comment: forbidden because not allowed to close '
1512 log.debug('comment: forbidden because not allowed to close '
1394 'pull request %s', pull_request_id)
1513 'pull request %s', pull_request_id)
1395 raise HTTPForbidden()
1514 raise HTTPForbidden()
1396
1515
1397 # This also triggers `review_status_change`
1516 # This also triggers `review_status_change`
1398 comment, status = PullRequestModel().close_pull_request_with_comment(
1517 comment, status = PullRequestModel().close_pull_request_with_comment(
1399 pull_request, self._rhodecode_user, self.db_repo, message=text,
1518 pull_request, self._rhodecode_user, self.db_repo, message=text,
1400 auth_user=self._rhodecode_user)
1519 auth_user=self._rhodecode_user)
1401 Session().flush()
1520 Session().flush()
1402
1521
1403 PullRequestModel().trigger_pull_request_hook(
1522 PullRequestModel().trigger_pull_request_hook(
1404 pull_request, self._rhodecode_user, 'comment',
1523 pull_request, self._rhodecode_user, 'comment',
1405 data={'comment': comment})
1524 data={'comment': comment})
1406
1525
1407 else:
1526 else:
1408 # regular comment case, could be inline, or one with status.
1527 # regular comment case, could be inline, or one with status.
1409 # for that one we check also permissions
1528 # for that one we check also permissions
1410
1529
1411 allowed_to_change_status = PullRequestModel().check_user_change_status(
1530 allowed_to_change_status = PullRequestModel().check_user_change_status(
1412 pull_request, self._rhodecode_user)
1531 pull_request, self._rhodecode_user)
1413
1532
1414 if status and allowed_to_change_status:
1533 if status and allowed_to_change_status:
1415 message = (_('Status change %(transition_icon)s %(status)s')
1534 message = (_('Status change %(transition_icon)s %(status)s')
1416 % {'transition_icon': '>',
1535 % {'transition_icon': '>',
1417 'status': ChangesetStatus.get_status_lbl(status)})
1536 'status': ChangesetStatus.get_status_lbl(status)})
1418 text = text or message
1537 text = text or message
1419
1538
1420 comment = CommentsModel().create(
1539 comment = CommentsModel().create(
1421 text=text,
1540 text=text,
1422 repo=self.db_repo.repo_id,
1541 repo=self.db_repo.repo_id,
1423 user=self._rhodecode_user.user_id,
1542 user=self._rhodecode_user.user_id,
1424 pull_request=pull_request,
1543 pull_request=pull_request,
1425 f_path=self.request.POST.get('f_path'),
1544 f_path=self.request.POST.get('f_path'),
1426 line_no=self.request.POST.get('line'),
1545 line_no=self.request.POST.get('line'),
1427 status_change=(ChangesetStatus.get_status_lbl(status)
1546 status_change=(ChangesetStatus.get_status_lbl(status)
1428 if status and allowed_to_change_status else None),
1547 if status and allowed_to_change_status else None),
1429 status_change_type=(status
1548 status_change_type=(status
1430 if status and allowed_to_change_status else None),
1549 if status and allowed_to_change_status else None),
1431 comment_type=comment_type,
1550 comment_type=comment_type,
1432 resolves_comment_id=resolves_comment_id,
1551 resolves_comment_id=resolves_comment_id,
1433 auth_user=self._rhodecode_user
1552 auth_user=self._rhodecode_user
1434 )
1553 )
1435
1554
1436 if allowed_to_change_status:
1555 if allowed_to_change_status:
1437 # calculate old status before we change it
1556 # calculate old status before we change it
1438 old_calculated_status = pull_request.calculated_review_status()
1557 old_calculated_status = pull_request.calculated_review_status()
1439
1558
1440 # get status if set !
1559 # get status if set !
1441 if status:
1560 if status:
1442 ChangesetStatusModel().set_status(
1561 ChangesetStatusModel().set_status(
1443 self.db_repo.repo_id,
1562 self.db_repo.repo_id,
1444 status,
1563 status,
1445 self._rhodecode_user.user_id,
1564 self._rhodecode_user.user_id,
1446 comment,
1565 comment,
1447 pull_request=pull_request
1566 pull_request=pull_request
1448 )
1567 )
1449
1568
1450 Session().flush()
1569 Session().flush()
1451 # this is somehow required to get access to some relationship
1570 # this is somehow required to get access to some relationship
1452 # loaded on comment
1571 # loaded on comment
1453 Session().refresh(comment)
1572 Session().refresh(comment)
1454
1573
1455 PullRequestModel().trigger_pull_request_hook(
1574 PullRequestModel().trigger_pull_request_hook(
1456 pull_request, self._rhodecode_user, 'comment',
1575 pull_request, self._rhodecode_user, 'comment',
1457 data={'comment': comment})
1576 data={'comment': comment})
1458
1577
1459 # we now calculate the status of pull request, and based on that
1578 # we now calculate the status of pull request, and based on that
1460 # calculation we set the commits status
1579 # calculation we set the commits status
1461 calculated_status = pull_request.calculated_review_status()
1580 calculated_status = pull_request.calculated_review_status()
1462 if old_calculated_status != calculated_status:
1581 if old_calculated_status != calculated_status:
1463 PullRequestModel().trigger_pull_request_hook(
1582 PullRequestModel().trigger_pull_request_hook(
1464 pull_request, self._rhodecode_user, 'review_status_change',
1583 pull_request, self._rhodecode_user, 'review_status_change',
1465 data={'status': calculated_status})
1584 data={'status': calculated_status})
1466
1585
1467 Session().commit()
1586 Session().commit()
1468
1587
1469 data = {
1588 data = {
1470 'target_id': h.safeid(h.safe_unicode(
1589 'target_id': h.safeid(h.safe_unicode(
1471 self.request.POST.get('f_path'))),
1590 self.request.POST.get('f_path'))),
1472 }
1591 }
1473 if comment:
1592 if comment:
1474 c.co = comment
1593 c.co = comment
1594 c.at_version_num = None
1475 rendered_comment = render(
1595 rendered_comment = render(
1476 'rhodecode:templates/changeset/changeset_comment_block.mako',
1596 'rhodecode:templates/changeset/changeset_comment_block.mako',
1477 self._get_template_context(c), self.request)
1597 self._get_template_context(c), self.request)
1478
1598
1479 data.update(comment.get_dict())
1599 data.update(comment.get_dict())
1480 data.update({'rendered_text': rendered_comment})
1600 data.update({'rendered_text': rendered_comment})
1481
1601
1482 return data
1602 return data
1483
1603
1484 @LoginRequired()
1604 @LoginRequired()
1485 @NotAnonymous()
1605 @NotAnonymous()
1486 @HasRepoPermissionAnyDecorator(
1606 @HasRepoPermissionAnyDecorator(
1487 'repository.read', 'repository.write', 'repository.admin')
1607 'repository.read', 'repository.write', 'repository.admin')
1488 @CSRFRequired()
1608 @CSRFRequired()
1489 @view_config(
1609 @view_config(
1490 route_name='pullrequest_comment_delete', request_method='POST',
1610 route_name='pullrequest_comment_delete', request_method='POST',
1491 renderer='json_ext')
1611 renderer='json_ext')
1492 def pull_request_comment_delete(self):
1612 def pull_request_comment_delete(self):
1493 pull_request = PullRequest.get_or_404(
1613 pull_request = PullRequest.get_or_404(
1494 self.request.matchdict['pull_request_id'])
1614 self.request.matchdict['pull_request_id'])
1495
1615
1496 comment = ChangesetComment.get_or_404(
1616 comment = ChangesetComment.get_or_404(
1497 self.request.matchdict['comment_id'])
1617 self.request.matchdict['comment_id'])
1498 comment_id = comment.comment_id
1618 comment_id = comment.comment_id
1499
1619
1500 if comment.immutable:
1620 if comment.immutable:
1501 # don't allow deleting comments that are immutable
1621 # don't allow deleting comments that are immutable
1502 raise HTTPForbidden()
1622 raise HTTPForbidden()
1503
1623
1504 if pull_request.is_closed():
1624 if pull_request.is_closed():
1505 log.debug('comment: forbidden because pull request is closed')
1625 log.debug('comment: forbidden because pull request is closed')
1506 raise HTTPForbidden()
1626 raise HTTPForbidden()
1507
1627
1508 if not comment:
1628 if not comment:
1509 log.debug('Comment with id:%s not found, skipping', comment_id)
1629 log.debug('Comment with id:%s not found, skipping', comment_id)
1510 # comment already deleted in another call probably
1630 # comment already deleted in another call probably
1511 return True
1631 return True
1512
1632
1513 if comment.pull_request.is_closed():
1633 if comment.pull_request.is_closed():
1514 # don't allow deleting comments on closed pull request
1634 # don't allow deleting comments on closed pull request
1515 raise HTTPForbidden()
1635 raise HTTPForbidden()
1516
1636
1517 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1637 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1518 super_admin = h.HasPermissionAny('hg.admin')()
1638 super_admin = h.HasPermissionAny('hg.admin')()
1519 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1639 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1520 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1640 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1521 comment_repo_admin = is_repo_admin and is_repo_comment
1641 comment_repo_admin = is_repo_admin and is_repo_comment
1522
1642
1523 if super_admin or comment_owner or comment_repo_admin:
1643 if super_admin or comment_owner or comment_repo_admin:
1524 old_calculated_status = comment.pull_request.calculated_review_status()
1644 old_calculated_status = comment.pull_request.calculated_review_status()
1525 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1645 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1526 Session().commit()
1646 Session().commit()
1527 calculated_status = comment.pull_request.calculated_review_status()
1647 calculated_status = comment.pull_request.calculated_review_status()
1528 if old_calculated_status != calculated_status:
1648 if old_calculated_status != calculated_status:
1529 PullRequestModel().trigger_pull_request_hook(
1649 PullRequestModel().trigger_pull_request_hook(
1530 comment.pull_request, self._rhodecode_user, 'review_status_change',
1650 comment.pull_request, self._rhodecode_user, 'review_status_change',
1531 data={'status': calculated_status})
1651 data={'status': calculated_status})
1532 return True
1652 return True
1533 else:
1653 else:
1534 log.warning('No permissions for user %s to delete comment_id: %s',
1654 log.warning('No permissions for user %s to delete comment_id: %s',
1535 self._rhodecode_db_user, comment_id)
1655 self._rhodecode_db_user, comment_id)
1536 raise HTTPNotFound()
1656 raise HTTPNotFound()
1537
1657
1538 @LoginRequired()
1658 @LoginRequired()
1539 @NotAnonymous()
1659 @NotAnonymous()
1540 @HasRepoPermissionAnyDecorator(
1660 @HasRepoPermissionAnyDecorator(
1541 'repository.read', 'repository.write', 'repository.admin')
1661 'repository.read', 'repository.write', 'repository.admin')
1542 @CSRFRequired()
1662 @CSRFRequired()
1543 @view_config(
1663 @view_config(
1544 route_name='pullrequest_comment_edit', request_method='POST',
1664 route_name='pullrequest_comment_edit', request_method='POST',
1545 renderer='json_ext')
1665 renderer='json_ext')
1546 def pull_request_comment_edit(self):
1666 def pull_request_comment_edit(self):
1547 self.load_default_context()
1667 self.load_default_context()
1548
1668
1549 pull_request = PullRequest.get_or_404(
1669 pull_request = PullRequest.get_or_404(
1550 self.request.matchdict['pull_request_id']
1670 self.request.matchdict['pull_request_id']
1551 )
1671 )
1552 comment = ChangesetComment.get_or_404(
1672 comment = ChangesetComment.get_or_404(
1553 self.request.matchdict['comment_id']
1673 self.request.matchdict['comment_id']
1554 )
1674 )
1555 comment_id = comment.comment_id
1675 comment_id = comment.comment_id
1556
1676
1557 if comment.immutable:
1677 if comment.immutable:
1558 # don't allow deleting comments that are immutable
1678 # don't allow deleting comments that are immutable
1559 raise HTTPForbidden()
1679 raise HTTPForbidden()
1560
1680
1561 if pull_request.is_closed():
1681 if pull_request.is_closed():
1562 log.debug('comment: forbidden because pull request is closed')
1682 log.debug('comment: forbidden because pull request is closed')
1563 raise HTTPForbidden()
1683 raise HTTPForbidden()
1564
1684
1565 if not comment:
1685 if not comment:
1566 log.debug('Comment with id:%s not found, skipping', comment_id)
1686 log.debug('Comment with id:%s not found, skipping', comment_id)
1567 # comment already deleted in another call probably
1687 # comment already deleted in another call probably
1568 return True
1688 return True
1569
1689
1570 if comment.pull_request.is_closed():
1690 if comment.pull_request.is_closed():
1571 # don't allow deleting comments on closed pull request
1691 # don't allow deleting comments on closed pull request
1572 raise HTTPForbidden()
1692 raise HTTPForbidden()
1573
1693
1574 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1694 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1575 super_admin = h.HasPermissionAny('hg.admin')()
1695 super_admin = h.HasPermissionAny('hg.admin')()
1576 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1696 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1577 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1697 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1578 comment_repo_admin = is_repo_admin and is_repo_comment
1698 comment_repo_admin = is_repo_admin and is_repo_comment
1579
1699
1580 if super_admin or comment_owner or comment_repo_admin:
1700 if super_admin or comment_owner or comment_repo_admin:
1581 text = self.request.POST.get('text')
1701 text = self.request.POST.get('text')
1582 version = self.request.POST.get('version')
1702 version = self.request.POST.get('version')
1583 if text == comment.text:
1703 if text == comment.text:
1584 log.warning(
1704 log.warning(
1585 'Comment(PR): '
1705 'Comment(PR): '
1586 'Trying to create new version '
1706 'Trying to create new version '
1587 'with the same comment body {}'.format(
1707 'with the same comment body {}'.format(
1588 comment_id,
1708 comment_id,
1589 )
1709 )
1590 )
1710 )
1591 raise HTTPNotFound()
1711 raise HTTPNotFound()
1592
1712
1593 if version.isdigit():
1713 if version.isdigit():
1594 version = int(version)
1714 version = int(version)
1595 else:
1715 else:
1596 log.warning(
1716 log.warning(
1597 'Comment(PR): Wrong version type {} {} '
1717 'Comment(PR): Wrong version type {} {} '
1598 'for comment {}'.format(
1718 'for comment {}'.format(
1599 version,
1719 version,
1600 type(version),
1720 type(version),
1601 comment_id,
1721 comment_id,
1602 )
1722 )
1603 )
1723 )
1604 raise HTTPNotFound()
1724 raise HTTPNotFound()
1605
1725
1606 try:
1726 try:
1607 comment_history = CommentsModel().edit(
1727 comment_history = CommentsModel().edit(
1608 comment_id=comment_id,
1728 comment_id=comment_id,
1609 text=text,
1729 text=text,
1610 auth_user=self._rhodecode_user,
1730 auth_user=self._rhodecode_user,
1611 version=version,
1731 version=version,
1612 )
1732 )
1613 except CommentVersionMismatch:
1733 except CommentVersionMismatch:
1614 raise HTTPConflict()
1734 raise HTTPConflict()
1615
1735
1616 if not comment_history:
1736 if not comment_history:
1617 raise HTTPNotFound()
1737 raise HTTPNotFound()
1618
1738
1619 Session().commit()
1739 Session().commit()
1620
1740
1621 PullRequestModel().trigger_pull_request_hook(
1741 PullRequestModel().trigger_pull_request_hook(
1622 pull_request, self._rhodecode_user, 'comment_edit',
1742 pull_request, self._rhodecode_user, 'comment_edit',
1623 data={'comment': comment})
1743 data={'comment': comment})
1624
1744
1625 return {
1745 return {
1626 'comment_history_id': comment_history.comment_history_id,
1746 'comment_history_id': comment_history.comment_history_id,
1627 'comment_id': comment.comment_id,
1747 'comment_id': comment.comment_id,
1628 'comment_version': comment_history.version,
1748 'comment_version': comment_history.version,
1629 'comment_author_username': comment_history.author.username,
1749 'comment_author_username': comment_history.author.username,
1630 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1750 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1631 'comment_created_on': h.age_component(comment_history.created_on,
1751 'comment_created_on': h.age_component(comment_history.created_on,
1632 time_is_local=True),
1752 time_is_local=True),
1633 }
1753 }
1634 else:
1754 else:
1635 log.warning('No permissions for user %s to edit comment_id: %s',
1755 log.warning('No permissions for user %s to edit comment_id: %s',
1636 self._rhodecode_db_user, comment_id)
1756 self._rhodecode_db_user, comment_id)
1637 raise HTTPNotFound()
1757 raise HTTPNotFound()
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now