Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,193 +1,194 b'' | |||||
1 | Nginx Configuration Example |
|
1 | Nginx Configuration Example | |
2 | --------------------------- |
|
2 | --------------------------- | |
3 |
|
3 | |||
4 | Use the following example to configure Nginx as a your web server. |
|
4 | Use the following example to configure Nginx as a your web server. | |
5 |
|
5 | |||
6 |
|
6 | |||
7 | .. code-block:: nginx |
|
7 | .. code-block:: nginx | |
8 |
|
8 | |||
9 | ## Rate limiter for certain pages to prevent brute force attacks |
|
9 | ## Rate limiter for certain pages to prevent brute force attacks | |
10 | limit_req_zone $binary_remote_addr zone=req_limit:10m rate=1r/s; |
|
10 | limit_req_zone $binary_remote_addr zone=req_limit:10m rate=1r/s; | |
11 |
|
11 | |||
12 | ## cache zone |
|
12 | ## cache zone | |
13 | proxy_cache_path /etc/nginx/nginx_cache levels=1:2 use_temp_path=off keys_zone=cache_zone:10m inactive=720h max_size=10g; |
|
13 | proxy_cache_path /etc/nginx/nginx_cache levels=1:2 use_temp_path=off keys_zone=cache_zone:10m inactive=720h max_size=10g; | |
14 |
|
14 | |||
15 | ## Custom log format |
|
15 | ## Custom log format | |
16 | log_format log_custom '$remote_addr - $remote_user [$time_local] ' |
|
16 | log_format log_custom '$remote_addr - $remote_user [$time_local] ' | |
17 | '"$request" $status $body_bytes_sent ' |
|
17 | '"$request" $status $body_bytes_sent ' | |
18 | '"$http_referer" "$http_user_agent" ' |
|
18 | '"$http_referer" "$http_user_agent" ' | |
19 | '$request_time $upstream_response_time $pipe'; |
|
19 | '$request_time $upstream_response_time $pipe'; | |
20 |
|
20 | |||
21 | ## Define one or more upstreams (local RhodeCode instance) to connect to |
|
21 | ## Define one or more upstreams (local RhodeCode instance) to connect to | |
22 | upstream rc { |
|
22 | upstream rc { | |
23 | # Url to running RhodeCode instance. |
|
23 | # Url to running RhodeCode instance. | |
24 | # This is shown as `- URL: <host>` in output from rccontrol status. |
|
24 | # This is shown as `- URL: <host>` in output from rccontrol status. | |
25 | server 127.0.0.1:10002; |
|
25 | server 127.0.0.1:10002; | |
26 |
|
26 | |||
27 | # add more instances for load balancing |
|
27 | # add more instances for load balancing | |
28 | # server 127.0.0.1:10003; |
|
28 | # server 127.0.0.1:10003; | |
29 | # server 127.0.0.1:10004; |
|
29 | # server 127.0.0.1:10004; | |
30 | } |
|
30 | } | |
31 |
|
31 | |||
32 | ## HTTP to HTTPS rewrite |
|
32 | ## HTTP to HTTPS rewrite | |
33 | server { |
|
33 | server { | |
34 | listen 80; |
|
34 | listen 80; | |
35 | server_name rhodecode.myserver.com; |
|
35 | server_name rhodecode.myserver.com; | |
36 |
|
36 | |||
37 | if ($http_host = rhodecode.myserver.com) { |
|
37 | if ($http_host = rhodecode.myserver.com) { | |
38 | rewrite (.*) https://rhodecode.myserver.com$1 permanent; |
|
38 | rewrite (.*) https://rhodecode.myserver.com$1 permanent; | |
39 | } |
|
39 | } | |
40 | } |
|
40 | } | |
41 |
|
41 | |||
42 | ## Optional gist alias server, for serving nicer GIST urls. |
|
42 | ## Optional gist alias server, for serving nicer GIST urls. | |
43 | server { |
|
43 | server { | |
44 | listen 443; |
|
44 | listen 443; | |
45 | server_name gist.myserver.com; |
|
45 | server_name gist.myserver.com; | |
46 | access_log /var/log/nginx/gist.access.log log_custom; |
|
46 | access_log /var/log/nginx/gist.access.log log_custom; | |
47 | error_log /var/log/nginx/gist.error.log; |
|
47 | error_log /var/log/nginx/gist.error.log; | |
48 |
|
48 | |||
49 | ssl on; |
|
49 | ssl on; | |
50 | ssl_certificate gist.rhodecode.myserver.com.crt; |
|
50 | ssl_certificate gist.rhodecode.myserver.com.crt; | |
51 | ssl_certificate_key gist.rhodecode.myserver.com.key; |
|
51 | ssl_certificate_key gist.rhodecode.myserver.com.key; | |
52 |
|
52 | |||
53 | ssl_session_timeout 5m; |
|
53 | ssl_session_timeout 5m; | |
54 |
|
54 | |||
55 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; |
|
55 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; | |
56 | ssl_prefer_server_ciphers on; |
|
56 | ssl_prefer_server_ciphers on; | |
57 | ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA'; |
|
57 | ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA'; | |
58 |
|
58 | |||
59 | ## Strict http prevents from https -> http downgrade |
|
59 | ## Strict http prevents from https -> http downgrade | |
60 | add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;"; |
|
60 | add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;"; | |
61 |
|
61 | |||
62 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits |
|
62 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits | |
63 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; |
|
63 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; | |
64 |
|
64 | |||
65 | rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1; |
|
65 | rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1; | |
66 | rewrite (.*) https://rhodecode.myserver.com/_admin/gists; |
|
66 | rewrite (.*) https://rhodecode.myserver.com/_admin/gists; | |
67 | } |
|
67 | } | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | ## MAIN SSL enabled server |
|
70 | ## MAIN SSL enabled server | |
71 | server { |
|
71 | server { | |
72 | listen 443 ssl http2; |
|
72 | listen 443 ssl http2; | |
73 | server_name rhodecode.myserver.com; |
|
73 | server_name rhodecode.myserver.com; | |
74 |
|
74 | |||
75 | access_log /var/log/nginx/rhodecode.access.log log_custom; |
|
75 | access_log /var/log/nginx/rhodecode.access.log log_custom; | |
76 | error_log /var/log/nginx/rhodecode.error.log; |
|
76 | error_log /var/log/nginx/rhodecode.error.log; | |
77 |
|
77 | |||
78 | ssl_certificate rhodecode.myserver.com.crt; |
|
78 | ssl_certificate rhodecode.myserver.com.crt; | |
79 | ssl_certificate_key rhodecode.myserver.com.key; |
|
79 | ssl_certificate_key rhodecode.myserver.com.key; | |
80 |
|
80 | |||
81 | # enable session resumption to improve https performance |
|
81 | # enable session resumption to improve https performance | |
82 | # http://vincent.bernat.im/en/blog/2011-ssl-session-reuse-rfc5077.html |
|
82 | # http://vincent.bernat.im/en/blog/2011-ssl-session-reuse-rfc5077.html | |
83 | ssl_session_cache shared:SSL:50m; |
|
83 | ssl_session_cache shared:SSL:50m; | |
84 | ssl_session_timeout 5m; |
|
84 | ssl_session_timeout 5m; | |
85 |
|
85 | |||
86 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits |
|
86 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits | |
87 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; |
|
87 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; | |
88 |
|
88 | |||
89 | # enables server-side protection from BEAST attacks |
|
89 | # enables server-side protection from BEAST attacks | |
90 | # http://blog.ivanristic.com/2013/09/is-beast-still-a-threat.html |
|
90 | # http://blog.ivanristic.com/2013/09/is-beast-still-a-threat.html | |
91 | ssl_prefer_server_ciphers on; |
|
91 | ssl_prefer_server_ciphers on; | |
92 |
|
92 | |||
93 | # disable SSLv3(enabled by default since nginx 0.8.19) since it's less secure then TLS http://en.wikipedia.org/wiki/Secure_Sockets_Layer#SSL_3.0 |
|
93 | # disable SSLv3(enabled by default since nginx 0.8.19) since it's less secure then TLS http://en.wikipedia.org/wiki/Secure_Sockets_Layer#SSL_3.0 | |
94 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; |
|
94 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; | |
95 |
|
95 | |||
96 | # ciphers chosen for forward secrecy and compatibility |
|
96 | # ciphers chosen for forward secrecy and compatibility | |
97 | # http://blog.ivanristic.com/2013/08/configuring-apache-nginx-and-openssl-for-forward-secrecy.html |
|
97 | # http://blog.ivanristic.com/2013/08/configuring-apache-nginx-and-openssl-for-forward-secrecy.html | |
98 | ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4"; |
|
98 | ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4"; | |
99 |
|
99 | |||
100 | client_body_buffer_size 128k; |
|
100 | client_body_buffer_size 128k; | |
101 | # maximum number and size of buffers for large headers to read from client request |
|
101 | # maximum number and size of buffers for large headers to read from client request | |
102 | large_client_header_buffers 16 256k; |
|
102 | large_client_header_buffers 16 256k; | |
103 |
|
103 | |||
104 | ## uncomment to serve static files by Nginx, recommended for performance |
|
104 | ## uncomment to serve static files by Nginx, recommended for performance | |
105 | # location /_static/rhodecode { |
|
105 | # location /_static/rhodecode { | |
106 | # gzip on; |
|
106 | # gzip on; | |
107 | # gzip_min_length 500; |
|
107 | # gzip_min_length 500; | |
108 | # gzip_proxied any; |
|
108 | # gzip_proxied any; | |
109 | # gzip_comp_level 4; |
|
109 | # gzip_comp_level 4; | |
110 | # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml; |
|
110 | # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml; | |
111 | # gzip_vary on; |
|
111 | # gzip_vary on; | |
112 | # gzip_disable "msie6"; |
|
112 | # gzip_disable "msie6"; | |
113 | # expires 60d; |
|
113 | # expires 60d; | |
114 | # alias /path/to/.rccontrol/community-1/static; |
|
114 | # alias /path/to/.rccontrol/community-1/static; | |
115 | # alias /path/to/.rccontrol/enterprise-1/static; |
|
115 | # alias /path/to/.rccontrol/enterprise-1/static; | |
116 | # } |
|
116 | # } | |
117 |
|
117 | |||
118 | ## channelstream location handler, if channelstream live chat and notifications |
|
118 | ## channelstream location handler, if channelstream live chat and notifications | |
119 | ## are enable this will proxy the requests to channelstream websocket server |
|
119 | ## are enable this will proxy the requests to channelstream websocket server | |
120 | location /_channelstream { |
|
120 | location /_channelstream { | |
121 | rewrite /_channelstream/(.*) /$1 break; |
|
121 | rewrite /_channelstream/(.*) /$1 break; | |
122 | gzip off; |
|
122 | gzip off; | |
123 | tcp_nodelay off; |
|
123 | tcp_nodelay off; | |
124 |
|
124 | |||
125 | proxy_connect_timeout 10; |
|
125 | proxy_connect_timeout 10; | |
126 | proxy_send_timeout 10m; |
|
126 | proxy_send_timeout 10m; | |
127 | proxy_read_timeout 10m; |
|
127 | proxy_read_timeout 10m; | |
128 |
|
128 | |||
129 | proxy_set_header Host $host; |
|
129 | proxy_set_header Host $host; | |
130 | proxy_set_header X-Real-IP $remote_addr; |
|
130 | proxy_set_header X-Real-IP $remote_addr; | |
131 | proxy_set_header X-Url-Scheme $scheme; |
|
131 | proxy_set_header X-Url-Scheme $scheme; | |
132 | proxy_set_header X-Forwarded-Proto $scheme; |
|
132 | proxy_set_header X-Forwarded-Proto $scheme; | |
133 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; |
|
133 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; | |
134 |
|
134 | |||
135 | proxy_http_version 1.1; |
|
135 | proxy_http_version 1.1; | |
136 | proxy_set_header Upgrade $http_upgrade; |
|
136 | proxy_set_header Upgrade $http_upgrade; | |
137 | proxy_set_header Connection "upgrade"; |
|
137 | proxy_set_header Connection "upgrade"; | |
138 |
|
138 | |||
139 | proxy_pass http://127.0.0.1:9800; |
|
139 | proxy_pass http://127.0.0.1:9800; | |
140 | } |
|
140 | } | |
141 |
|
141 | |||
142 | ## rate limit this endpoint to prevent login page brute-force attacks |
|
142 | ## rate limit this endpoint to prevent login page brute-force attacks | |
143 | location /_admin/login { |
|
143 | location /_admin/login { | |
144 | limit_req zone=req_limit burst=10 nodelay; |
|
144 | limit_req zone=req_limit burst=10 nodelay; | |
145 | try_files $uri @rhodecode_http; |
|
145 | try_files $uri @rhodecode_http; | |
146 | } |
|
146 | } | |
147 |
|
147 | |||
148 | ## Special Cache for file store, make sure you enable this intentionally as |
|
148 | ## Special Cache for file store, make sure you enable this intentionally as | |
149 | ## it could bypass upload files permissions |
|
149 | ## it could bypass upload files permissions | |
150 | # location /_file_store/download { |
|
150 | # location /_file_store/download/gravatars { | |
151 | # |
|
151 | # | |
152 | # proxy_cache cache_zone; |
|
152 | # proxy_cache cache_zone; | |
153 | # # ignore Set-Cookie |
|
153 | # # ignore Set-Cookie | |
154 | # proxy_ignore_headers Set-Cookie; |
|
154 | # proxy_ignore_headers Set-Cookie; | |
155 |
# |
|
155 | # # ignore cache-control | |
|
156 | # proxy_ignore_headers Cache-Control; | |||
156 | # |
|
157 | # | |
157 | # proxy_cache_key $host$uri$is_args$args; |
|
158 | # proxy_cache_key $host$uri$is_args$args; | |
158 | # proxy_cache_methods GET; |
|
159 | # proxy_cache_methods GET; | |
159 | # |
|
160 | # | |
160 | # proxy_cache_bypass $http_cache_control; |
|
161 | # proxy_cache_bypass $http_cache_control; | |
161 | # proxy_cache_valid 200 302 720h; |
|
162 | # proxy_cache_valid 200 302 720h; | |
162 | # |
|
163 | # | |
163 | # proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504; |
|
164 | # proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504; | |
164 | # |
|
165 | # | |
165 | # # returns cache status in headers |
|
166 | # # returns cache status in headers | |
166 | # add_header X-Proxy-Cache $upstream_cache_status; |
|
167 | # add_header X-Proxy-Cache $upstream_cache_status; | |
167 | # add_header Cache-Control "public"; |
|
168 | # add_header Cache-Control "public"; | |
168 | # |
|
169 | # | |
169 | # proxy_cache_lock on; |
|
170 | # proxy_cache_lock on; | |
170 | # proxy_cache_lock_age 5m; |
|
171 | # proxy_cache_lock_age 5m; | |
171 | # |
|
172 | # | |
172 | # proxy_pass http://rc; |
|
173 | # proxy_pass http://rc; | |
173 | # |
|
174 | # | |
174 | # } |
|
175 | # } | |
175 |
|
176 | |||
176 | location / { |
|
177 | location / { | |
177 | try_files $uri @rhodecode_http; |
|
178 | try_files $uri @rhodecode_http; | |
178 | } |
|
179 | } | |
179 |
|
180 | |||
180 | location @rhodecode_http { |
|
181 | location @rhodecode_http { | |
181 | # example of proxy.conf can be found in our docs. |
|
182 | # example of proxy.conf can be found in our docs. | |
182 | include /etc/nginx/proxy.conf; |
|
183 | include /etc/nginx/proxy.conf; | |
183 | proxy_pass http://rc; |
|
184 | proxy_pass http://rc; | |
184 | } |
|
185 | } | |
185 |
|
186 | |||
186 | ## Custom 502 error page. |
|
187 | ## Custom 502 error page. | |
187 | ## Will be displayed while RhodeCode server is turned off |
|
188 | ## Will be displayed while RhodeCode server is turned off | |
188 | error_page 502 /502.html; |
|
189 | error_page 502 /502.html; | |
189 | location = /502.html { |
|
190 | location = /502.html { | |
190 | #root /path/to/.rccontrol/community-1/static; |
|
191 | #root /path/to/.rccontrol/community-1/static; | |
191 | root /path/to/.rccontrol/enterprise-1/static; |
|
192 | root /path/to/.rccontrol/enterprise-1/static; | |
192 | } |
|
193 | } | |
193 | } No newline at end of file |
|
194 | } |
@@ -1,52 +1,52 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import os |
|
20 | import os | |
21 | from rhodecode.apps.file_store import config_keys |
|
21 | from rhodecode.apps.file_store import config_keys | |
22 | from rhodecode.config.middleware import _bool_setting, _string_setting |
|
22 | from rhodecode.config.middleware import _bool_setting, _string_setting | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | def _sanitize_settings_and_apply_defaults(settings): |
|
25 | def _sanitize_settings_and_apply_defaults(settings): | |
26 | """ |
|
26 | """ | |
27 | Set defaults, convert to python types and validate settings. |
|
27 | Set defaults, convert to python types and validate settings. | |
28 | """ |
|
28 | """ | |
29 | _bool_setting(settings, config_keys.enabled, 'true') |
|
29 | _bool_setting(settings, config_keys.enabled, 'true') | |
30 |
|
30 | |||
31 | _string_setting(settings, config_keys.backend, 'local') |
|
31 | _string_setting(settings, config_keys.backend, 'local') | |
32 |
|
32 | |||
33 | default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store') |
|
33 | default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store') | |
34 | _string_setting(settings, config_keys.store_path, default_store) |
|
34 | _string_setting(settings, config_keys.store_path, default_store) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def includeme(config): |
|
37 | def includeme(config): | |
38 | settings = config.registry.settings |
|
38 | settings = config.registry.settings | |
39 | _sanitize_settings_and_apply_defaults(settings) |
|
39 | _sanitize_settings_and_apply_defaults(settings) | |
40 |
|
40 | |||
41 | config.add_route( |
|
41 | config.add_route( | |
42 | name='upload_file', |
|
42 | name='upload_file', | |
43 | pattern='/_file_store/upload') |
|
43 | pattern='/_file_store/upload') | |
44 | config.add_route( |
|
44 | config.add_route( | |
45 | name='download_file', |
|
45 | name='download_file', | |
46 | pattern='/_file_store/download/{fid}') |
|
46 | pattern='/_file_store/download/{fid:.*}') | |
47 | config.add_route( |
|
47 | config.add_route( | |
48 | name='download_file_by_token', |
|
48 | name='download_file_by_token', | |
49 | pattern='/_file_store/token-download/{_auth_token}/{fid}') |
|
49 | pattern='/_file_store/token-download/{_auth_token}/{fid:.*}') | |
50 |
|
50 | |||
51 | # Scan module for configuration decorators. |
|
51 | # Scan module for configuration decorators. | |
52 | config.scan('.views', ignore='.tests') |
|
52 | config.scan('.views', ignore='.tests') |
@@ -1,240 +1,261 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import time |
|
22 | import time | |
|
23 | import errno | |||
23 | import shutil |
|
24 | import shutil | |
24 | import hashlib |
|
25 | import hashlib | |
25 |
|
26 | |||
26 | from rhodecode.lib.ext_json import json |
|
27 | from rhodecode.lib.ext_json import json | |
27 | from rhodecode.apps.file_store import utils |
|
28 | from rhodecode.apps.file_store import utils | |
28 | from rhodecode.apps.file_store.extensions import resolve_extensions |
|
29 | from rhodecode.apps.file_store.extensions import resolve_extensions | |
29 | from rhodecode.apps.file_store.exceptions import ( |
|
30 | from rhodecode.apps.file_store.exceptions import ( | |
30 | FileNotAllowedException, FileOverSizeException) |
|
31 | FileNotAllowedException, FileOverSizeException) | |
31 |
|
32 | |||
32 | METADATA_VER = 'v1' |
|
33 | METADATA_VER = 'v1' | |
33 |
|
34 | |||
34 |
|
35 | |||
|
36 | def safe_make_dirs(dir_path): | |||
|
37 | if not os.path.exists(dir_path): | |||
|
38 | try: | |||
|
39 | os.makedirs(dir_path) | |||
|
40 | except OSError as e: | |||
|
41 | if e.errno != errno.EEXIST: | |||
|
42 | raise | |||
|
43 | return | |||
|
44 | ||||
|
45 | ||||
35 | class LocalFileStorage(object): |
|
46 | class LocalFileStorage(object): | |
36 |
|
47 | |||
37 | @classmethod |
|
48 | @classmethod | |
|
49 | def apply_counter(cls, counter, filename): | |||
|
50 | name_counted = '%d-%s' % (counter, filename) | |||
|
51 | return name_counted | |||
|
52 | ||||
|
53 | @classmethod | |||
38 | def resolve_name(cls, name, directory): |
|
54 | def resolve_name(cls, name, directory): | |
39 | """ |
|
55 | """ | |
40 | Resolves a unique name and the correct path. If a filename |
|
56 | Resolves a unique name and the correct path. If a filename | |
41 | for that path already exists then a numeric prefix with values > 0 will be |
|
57 | for that path already exists then a numeric prefix with values > 0 will be | |
42 | added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix. |
|
58 | added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix. | |
43 |
|
59 | |||
44 | :param name: base name of file |
|
60 | :param name: base name of file | |
45 | :param directory: absolute directory path |
|
61 | :param directory: absolute directory path | |
46 | """ |
|
62 | """ | |
47 |
|
63 | |||
48 | counter = 0 |
|
64 | counter = 0 | |
49 | while True: |
|
65 | while True: | |
50 |
name = |
|
66 | name_counted = cls.apply_counter(counter, name) | |
51 |
|
67 | |||
52 | # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file |
|
68 | # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file | |
53 | sub_store = cls._sub_store_from_filename(name) |
|
69 | sub_store = cls._sub_store_from_filename(name_counted) | |
54 | sub_store_path = os.path.join(directory, sub_store) |
|
70 | sub_store_path = os.path.join(directory, sub_store) | |
55 |
|
|
71 | safe_make_dirs(sub_store_path) | |
56 | os.makedirs(sub_store_path) |
|
|||
57 |
|
72 | |||
58 | path = os.path.join(sub_store_path, name) |
|
73 | path = os.path.join(sub_store_path, name_counted) | |
59 | if not os.path.exists(path): |
|
74 | if not os.path.exists(path): | |
60 | return name, path |
|
75 | return name_counted, path | |
61 | counter += 1 |
|
76 | counter += 1 | |
62 |
|
77 | |||
63 | @classmethod |
|
78 | @classmethod | |
64 | def _sub_store_from_filename(cls, filename): |
|
79 | def _sub_store_from_filename(cls, filename): | |
65 | return filename[:2] |
|
80 | return filename[:2] | |
66 |
|
81 | |||
67 | @classmethod |
|
82 | @classmethod | |
68 | def calculate_path_hash(cls, file_path): |
|
83 | def calculate_path_hash(cls, file_path): | |
69 | """ |
|
84 | """ | |
70 | Efficient calculation of file_path sha256 sum |
|
85 | Efficient calculation of file_path sha256 sum | |
71 |
|
86 | |||
72 | :param file_path: |
|
87 | :param file_path: | |
73 | :return: sha256sum |
|
88 | :return: sha256sum | |
74 | """ |
|
89 | """ | |
75 | digest = hashlib.sha256() |
|
90 | digest = hashlib.sha256() | |
76 | with open(file_path, 'rb') as f: |
|
91 | with open(file_path, 'rb') as f: | |
77 | for chunk in iter(lambda: f.read(1024 * 100), b""): |
|
92 | for chunk in iter(lambda: f.read(1024 * 100), b""): | |
78 | digest.update(chunk) |
|
93 | digest.update(chunk) | |
79 |
|
94 | |||
80 | return digest.hexdigest() |
|
95 | return digest.hexdigest() | |
81 |
|
96 | |||
82 | def __init__(self, base_path, extension_groups=None): |
|
97 | def __init__(self, base_path, extension_groups=None): | |
83 |
|
98 | |||
84 | """ |
|
99 | """ | |
85 | Local file storage |
|
100 | Local file storage | |
86 |
|
101 | |||
87 | :param base_path: the absolute base path where uploads are stored |
|
102 | :param base_path: the absolute base path where uploads are stored | |
88 | :param extension_groups: extensions string |
|
103 | :param extension_groups: extensions string | |
89 | """ |
|
104 | """ | |
90 |
|
105 | |||
91 | extension_groups = extension_groups or ['any'] |
|
106 | extension_groups = extension_groups or ['any'] | |
92 | self.base_path = base_path |
|
107 | self.base_path = base_path | |
93 | self.extensions = resolve_extensions([], groups=extension_groups) |
|
108 | self.extensions = resolve_extensions([], groups=extension_groups) | |
94 |
|
109 | |||
95 | def __repr__(self): |
|
110 | def __repr__(self): | |
96 | return '{}@{}'.format(self.__class__, self.base_path) |
|
111 | return '{}@{}'.format(self.__class__, self.base_path) | |
97 |
|
112 | |||
98 | def store_path(self, filename): |
|
113 | def store_path(self, filename): | |
99 | """ |
|
114 | """ | |
100 | Returns absolute file path of the filename, joined to the |
|
115 | Returns absolute file path of the filename, joined to the | |
101 | base_path. |
|
116 | base_path. | |
102 |
|
117 | |||
103 | :param filename: base name of file |
|
118 | :param filename: base name of file | |
104 | """ |
|
119 | """ | |
105 | sub_store = self._sub_store_from_filename(filename) |
|
120 | prefix_dir = '' | |
106 | return os.path.join(self.base_path, sub_store, filename) |
|
121 | if '/' in filename: | |
|
122 | prefix_dir, filename = filename.split('/') | |||
|
123 | sub_store = self._sub_store_from_filename(filename) | |||
|
124 | else: | |||
|
125 | sub_store = self._sub_store_from_filename(filename) | |||
|
126 | return os.path.join(self.base_path, prefix_dir, sub_store, filename) | |||
107 |
|
127 | |||
108 | def delete(self, filename): |
|
128 | def delete(self, filename): | |
109 | """ |
|
129 | """ | |
110 | Deletes the filename. Filename is resolved with the |
|
130 | Deletes the filename. Filename is resolved with the | |
111 | absolute path based on base_path. If file does not exist, |
|
131 | absolute path based on base_path. If file does not exist, | |
112 | returns **False**, otherwise **True** |
|
132 | returns **False**, otherwise **True** | |
113 |
|
133 | |||
114 | :param filename: base name of file |
|
134 | :param filename: base name of file | |
115 | """ |
|
135 | """ | |
116 | if self.exists(filename): |
|
136 | if self.exists(filename): | |
117 | os.remove(self.store_path(filename)) |
|
137 | os.remove(self.store_path(filename)) | |
118 | return True |
|
138 | return True | |
119 | return False |
|
139 | return False | |
120 |
|
140 | |||
121 | def exists(self, filename): |
|
141 | def exists(self, filename): | |
122 | """ |
|
142 | """ | |
123 | Checks if file exists. Resolves filename's absolute |
|
143 | Checks if file exists. Resolves filename's absolute | |
124 | path based on base_path. |
|
144 | path based on base_path. | |
125 |
|
145 | |||
126 | :param filename: base name of file |
|
146 | :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg | |
127 | """ |
|
147 | """ | |
128 | return os.path.exists(self.store_path(filename)) |
|
148 | return os.path.exists(self.store_path(filename)) | |
129 |
|
149 | |||
130 | def filename_allowed(self, filename, extensions=None): |
|
150 | def filename_allowed(self, filename, extensions=None): | |
131 | """Checks if a filename has an allowed extension |
|
151 | """Checks if a filename has an allowed extension | |
132 |
|
152 | |||
133 | :param filename: base name of file |
|
153 | :param filename: base name of file | |
134 | :param extensions: iterable of extensions (or self.extensions) |
|
154 | :param extensions: iterable of extensions (or self.extensions) | |
135 | """ |
|
155 | """ | |
136 | _, ext = os.path.splitext(filename) |
|
156 | _, ext = os.path.splitext(filename) | |
137 | return self.extension_allowed(ext, extensions) |
|
157 | return self.extension_allowed(ext, extensions) | |
138 |
|
158 | |||
139 | def extension_allowed(self, ext, extensions=None): |
|
159 | def extension_allowed(self, ext, extensions=None): | |
140 | """ |
|
160 | """ | |
141 | Checks if an extension is permitted. Both e.g. ".jpg" and |
|
161 | Checks if an extension is permitted. Both e.g. ".jpg" and | |
142 | "jpg" can be passed in. Extension lookup is case-insensitive. |
|
162 | "jpg" can be passed in. Extension lookup is case-insensitive. | |
143 |
|
163 | |||
144 | :param ext: extension to check |
|
164 | :param ext: extension to check | |
145 | :param extensions: iterable of extensions to validate against (or self.extensions) |
|
165 | :param extensions: iterable of extensions to validate against (or self.extensions) | |
146 | """ |
|
166 | """ | |
147 | def normalize_ext(_ext): |
|
167 | def normalize_ext(_ext): | |
148 | if _ext.startswith('.'): |
|
168 | if _ext.startswith('.'): | |
149 | _ext = _ext[1:] |
|
169 | _ext = _ext[1:] | |
150 | return _ext.lower() |
|
170 | return _ext.lower() | |
151 |
|
171 | |||
152 | extensions = extensions or self.extensions |
|
172 | extensions = extensions or self.extensions | |
153 | if not extensions: |
|
173 | if not extensions: | |
154 | return True |
|
174 | return True | |
155 |
|
175 | |||
156 | ext = normalize_ext(ext) |
|
176 | ext = normalize_ext(ext) | |
157 |
|
177 | |||
158 | return ext in [normalize_ext(x) for x in extensions] |
|
178 | return ext in [normalize_ext(x) for x in extensions] | |
159 |
|
179 | |||
160 | def save_file(self, file_obj, filename, directory=None, extensions=None, |
|
180 | def save_file(self, file_obj, filename, directory=None, extensions=None, | |
161 | extra_metadata=None, max_filesize=None, **kwargs): |
|
181 | extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs): | |
162 | """ |
|
182 | """ | |
163 | Saves a file object to the uploads location. |
|
183 | Saves a file object to the uploads location. | |
164 | Returns the resolved filename, i.e. the directory + |
|
184 | Returns the resolved filename, i.e. the directory + | |
165 | the (randomized/incremented) base name. |
|
185 | the (randomized/incremented) base name. | |
166 |
|
186 | |||
167 | :param file_obj: **cgi.FieldStorage** object (or similar) |
|
187 | :param file_obj: **cgi.FieldStorage** object (or similar) | |
168 | :param filename: original filename |
|
188 | :param filename: original filename | |
169 | :param directory: relative path of sub-directory |
|
189 | :param directory: relative path of sub-directory | |
170 | :param extensions: iterable of allowed extensions, if not default |
|
190 | :param extensions: iterable of allowed extensions, if not default | |
171 | :param max_filesize: maximum size of file that should be allowed |
|
191 | :param max_filesize: maximum size of file that should be allowed | |
|
192 | :param randomized_name: generate random generated UID or fixed based on the filename | |||
172 | :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix |
|
193 | :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix | |
173 |
|
194 | |||
174 | """ |
|
195 | """ | |
175 |
|
196 | |||
176 | extensions = extensions or self.extensions |
|
197 | extensions = extensions or self.extensions | |
177 |
|
198 | |||
178 | if not self.filename_allowed(filename, extensions): |
|
199 | if not self.filename_allowed(filename, extensions): | |
179 | raise FileNotAllowedException() |
|
200 | raise FileNotAllowedException() | |
180 |
|
201 | |||
181 | if directory: |
|
202 | if directory: | |
182 | dest_directory = os.path.join(self.base_path, directory) |
|
203 | dest_directory = os.path.join(self.base_path, directory) | |
183 | else: |
|
204 | else: | |
184 | dest_directory = self.base_path |
|
205 | dest_directory = self.base_path | |
185 |
|
206 | |||
186 |
|
|
207 | safe_make_dirs(dest_directory) | |
187 | os.makedirs(dest_directory) |
|
|||
188 |
|
208 | |||
189 | filename = utils.uid_filename(filename) |
|
209 | uid_filename = utils.uid_filename(filename, randomized=randomized_name) | |
190 |
|
210 | |||
191 | # resolve also produces special sub-dir for file optimized store |
|
211 | # resolve also produces special sub-dir for file optimized store | |
192 | filename, path = self.resolve_name(filename, dest_directory) |
|
212 | filename, path = self.resolve_name(uid_filename, dest_directory) | |
193 | stored_file_dir = os.path.dirname(path) |
|
213 | stored_file_dir = os.path.dirname(path) | |
194 |
|
214 | |||
195 | file_obj.seek(0) |
|
215 | file_obj.seek(0) | |
196 |
|
216 | |||
197 | with open(path, "wb") as dest: |
|
217 | with open(path, "wb") as dest: | |
198 | shutil.copyfileobj(file_obj, dest) |
|
218 | shutil.copyfileobj(file_obj, dest) | |
199 |
|
219 | |||
200 | metadata = {} |
|
220 | metadata = {} | |
201 | if extra_metadata: |
|
221 | if extra_metadata: | |
202 | metadata = extra_metadata |
|
222 | metadata = extra_metadata | |
203 |
|
223 | |||
204 | size = os.stat(path).st_size |
|
224 | size = os.stat(path).st_size | |
205 |
|
225 | |||
206 | if max_filesize and size > max_filesize: |
|
226 | if max_filesize and size > max_filesize: | |
207 | # free up the copied file, and raise exc |
|
227 | # free up the copied file, and raise exc | |
208 | os.remove(path) |
|
228 | os.remove(path) | |
209 | raise FileOverSizeException() |
|
229 | raise FileOverSizeException() | |
210 |
|
230 | |||
211 | file_hash = self.calculate_path_hash(path) |
|
231 | file_hash = self.calculate_path_hash(path) | |
212 |
|
232 | |||
213 | metadata.update( |
|
233 | metadata.update({ | |
214 |
|
|
234 | "filename": filename, | |
215 | "size": size, |
|
235 | "size": size, | |
216 | "time": time.time(), |
|
236 | "time": time.time(), | |
217 | "sha256": file_hash, |
|
237 | "sha256": file_hash, | |
218 |
"meta_ver": METADATA_VER |
|
238 | "meta_ver": METADATA_VER | |
|
239 | }) | |||
219 |
|
240 | |||
220 | filename_meta = filename + '.meta' |
|
241 | filename_meta = filename + '.meta' | |
221 | with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta: |
|
242 | with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta: | |
222 | dest_meta.write(json.dumps(metadata)) |
|
243 | dest_meta.write(json.dumps(metadata)) | |
223 |
|
244 | |||
224 | if directory: |
|
245 | if directory: | |
225 | filename = os.path.join(directory, filename) |
|
246 | filename = os.path.join(directory, filename) | |
226 |
|
247 | |||
227 | return filename, metadata |
|
248 | return filename, metadata | |
228 |
|
249 | |||
229 | def get_metadata(self, filename): |
|
250 | def get_metadata(self, filename): | |
230 | """ |
|
251 | """ | |
231 | Reads JSON stored metadata for a file |
|
252 | Reads JSON stored metadata for a file | |
232 |
|
253 | |||
233 | :param filename: |
|
254 | :param filename: | |
234 | :return: |
|
255 | :return: | |
235 | """ |
|
256 | """ | |
236 | filename = self.store_path(filename) |
|
257 | filename = self.store_path(filename) | |
237 | filename_meta = filename + '.meta' |
|
258 | filename_meta = filename + '.meta' | |
238 |
|
259 | |||
239 | with open(filename_meta, "rb") as source_meta: |
|
260 | with open(filename_meta, "rb") as source_meta: | |
240 | return json.loads(source_meta.read()) |
|
261 | return json.loads(source_meta.read()) |
@@ -1,54 +1,58 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import uuid |
|
22 | import uuid | |
23 |
|
23 | import StringIO | ||
24 | import pathlib2 |
|
24 | import pathlib2 | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | def get_file_storage(settings): |
|
27 | def get_file_storage(settings): | |
28 | from rhodecode.apps.file_store.backends.local_store import LocalFileStorage |
|
28 | from rhodecode.apps.file_store.backends.local_store import LocalFileStorage | |
29 | from rhodecode.apps.file_store import config_keys |
|
29 | from rhodecode.apps.file_store import config_keys | |
30 | store_path = settings.get(config_keys.store_path) |
|
30 | store_path = settings.get(config_keys.store_path) | |
31 | return LocalFileStorage(base_path=store_path) |
|
31 | return LocalFileStorage(base_path=store_path) | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def splitext(filename): |
|
34 | def splitext(filename): | |
35 | ext = ''.join(pathlib2.Path(filename).suffixes) |
|
35 | ext = ''.join(pathlib2.Path(filename).suffixes) | |
36 | return filename, ext |
|
36 | return filename, ext | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | def uid_filename(filename, randomized=True): |
|
39 | def uid_filename(filename, randomized=True): | |
40 | """ |
|
40 | """ | |
41 | Generates a randomized or stable (uuid) filename, |
|
41 | Generates a randomized or stable (uuid) filename, | |
42 | preserving the original extension. |
|
42 | preserving the original extension. | |
43 |
|
43 | |||
44 | :param filename: the original filename |
|
44 | :param filename: the original filename | |
45 | :param randomized: define if filename should be stable (sha1 based) or randomized |
|
45 | :param randomized: define if filename should be stable (sha1 based) or randomized | |
46 | """ |
|
46 | """ | |
47 |
|
47 | |||
48 | _, ext = splitext(filename) |
|
48 | _, ext = splitext(filename) | |
49 | if randomized: |
|
49 | if randomized: | |
50 | uid = uuid.uuid4() |
|
50 | uid = uuid.uuid4() | |
51 | else: |
|
51 | else: | |
52 | hash_key = '{}.{}'.format(filename, 'store') |
|
52 | hash_key = '{}.{}'.format(filename, 'store') | |
53 | uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) |
|
53 | uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) | |
54 | return str(uid) + ext.lower() |
|
54 | return str(uid) + ext.lower() | |
|
55 | ||||
|
56 | ||||
|
57 | def bytes_to_file_obj(bytes_data): | |||
|
58 | return StringIO.StringIO(bytes_data) |
@@ -1,195 +1,195 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import logging |
|
20 | import logging | |
21 |
|
21 | |||
22 | from pyramid.view import view_config |
|
22 | from pyramid.view import view_config | |
23 | from pyramid.response import FileResponse |
|
23 | from pyramid.response import FileResponse | |
24 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
24 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound | |
25 |
|
25 | |||
26 | from rhodecode.apps._base import BaseAppView |
|
26 | from rhodecode.apps._base import BaseAppView | |
27 | from rhodecode.apps.file_store import utils |
|
27 | from rhodecode.apps.file_store import utils | |
28 | from rhodecode.apps.file_store.exceptions import ( |
|
28 | from rhodecode.apps.file_store.exceptions import ( | |
29 | FileNotAllowedException, FileOverSizeException) |
|
29 | FileNotAllowedException, FileOverSizeException) | |
30 |
|
30 | |||
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib import audit_logger |
|
32 | from rhodecode.lib import audit_logger | |
33 | from rhodecode.lib.auth import ( |
|
33 | from rhodecode.lib.auth import ( | |
34 | CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
34 | CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
35 | LoginRequired) |
|
35 | LoginRequired) | |
36 | from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db |
|
36 | from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db | |
37 | from rhodecode.model.db import Session, FileStore, UserApiKeys |
|
37 | from rhodecode.model.db import Session, FileStore, UserApiKeys | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class FileStoreView(BaseAppView): |
|
42 | class FileStoreView(BaseAppView): | |
43 | upload_key = 'store_file' |
|
43 | upload_key = 'store_file' | |
44 |
|
44 | |||
45 | def load_default_context(self): |
|
45 | def load_default_context(self): | |
46 | c = self._get_local_tmpl_context() |
|
46 | c = self._get_local_tmpl_context() | |
47 | self.storage = utils.get_file_storage(self.request.registry.settings) |
|
47 | self.storage = utils.get_file_storage(self.request.registry.settings) | |
48 | return c |
|
48 | return c | |
49 |
|
49 | |||
50 | def _guess_type(self, file_name): |
|
50 | def _guess_type(self, file_name): | |
51 | """ |
|
51 | """ | |
52 | Our own type guesser for mimetypes using the rich DB |
|
52 | Our own type guesser for mimetypes using the rich DB | |
53 | """ |
|
53 | """ | |
54 | if not hasattr(self, 'db'): |
|
54 | if not hasattr(self, 'db'): | |
55 | self.db = get_mimetypes_db() |
|
55 | self.db = get_mimetypes_db() | |
56 | _content_type, _encoding = self.db.guess_type(file_name, strict=False) |
|
56 | _content_type, _encoding = self.db.guess_type(file_name, strict=False) | |
57 | return _content_type, _encoding |
|
57 | return _content_type, _encoding | |
58 |
|
58 | |||
59 | def _serve_file(self, file_uid): |
|
59 | def _serve_file(self, file_uid): | |
60 |
|
60 | |||
61 | if not self.storage.exists(file_uid): |
|
61 | if not self.storage.exists(file_uid): | |
62 | store_path = self.storage.store_path(file_uid) |
|
62 | store_path = self.storage.store_path(file_uid) | |
63 | log.debug('File with FID:%s not found in the store under `%s`', |
|
63 | log.debug('File with FID:%s not found in the store under `%s`', | |
64 | file_uid, store_path) |
|
64 | file_uid, store_path) | |
65 | raise HTTPNotFound() |
|
65 | raise HTTPNotFound() | |
66 |
|
66 | |||
67 |
db_obj = FileStore( |
|
67 | db_obj = FileStore.get_by_store_uid(file_uid, safe=True) | |
68 | if not db_obj: |
|
68 | if not db_obj: | |
69 | raise HTTPNotFound() |
|
69 | raise HTTPNotFound() | |
70 |
|
70 | |||
71 | # private upload for user |
|
71 | # private upload for user | |
72 | if db_obj.check_acl and db_obj.scope_user_id: |
|
72 | if db_obj.check_acl and db_obj.scope_user_id: | |
73 | log.debug('Artifact: checking scope access for bound artifact user: `%s`', |
|
73 | log.debug('Artifact: checking scope access for bound artifact user: `%s`', | |
74 | db_obj.scope_user_id) |
|
74 | db_obj.scope_user_id) | |
75 | user = db_obj.user |
|
75 | user = db_obj.user | |
76 | if self._rhodecode_db_user.user_id != user.user_id: |
|
76 | if self._rhodecode_db_user.user_id != user.user_id: | |
77 | log.warning('Access to file store object forbidden') |
|
77 | log.warning('Access to file store object forbidden') | |
78 | raise HTTPNotFound() |
|
78 | raise HTTPNotFound() | |
79 |
|
79 | |||
80 | # scoped to repository permissions |
|
80 | # scoped to repository permissions | |
81 | if db_obj.check_acl and db_obj.scope_repo_id: |
|
81 | if db_obj.check_acl and db_obj.scope_repo_id: | |
82 | log.debug('Artifact: checking scope access for bound artifact repo: `%s`', |
|
82 | log.debug('Artifact: checking scope access for bound artifact repo: `%s`', | |
83 | db_obj.scope_repo_id) |
|
83 | db_obj.scope_repo_id) | |
84 | repo = db_obj.repo |
|
84 | repo = db_obj.repo | |
85 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
85 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] | |
86 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check') |
|
86 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check') | |
87 | if not has_perm: |
|
87 | if not has_perm: | |
88 | log.warning('Access to file store object `%s` forbidden', file_uid) |
|
88 | log.warning('Access to file store object `%s` forbidden', file_uid) | |
89 | raise HTTPNotFound() |
|
89 | raise HTTPNotFound() | |
90 |
|
90 | |||
91 | # scoped to repository group permissions |
|
91 | # scoped to repository group permissions | |
92 | if db_obj.check_acl and db_obj.scope_repo_group_id: |
|
92 | if db_obj.check_acl and db_obj.scope_repo_group_id: | |
93 | log.debug('Artifact: checking scope access for bound artifact repo group: `%s`', |
|
93 | log.debug('Artifact: checking scope access for bound artifact repo group: `%s`', | |
94 | db_obj.scope_repo_group_id) |
|
94 | db_obj.scope_repo_group_id) | |
95 | repo_group = db_obj.repo_group |
|
95 | repo_group = db_obj.repo_group | |
96 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
96 | perm_set = ['group.read', 'group.write', 'group.admin'] | |
97 | has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check') |
|
97 | has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check') | |
98 | if not has_perm: |
|
98 | if not has_perm: | |
99 | log.warning('Access to file store object `%s` forbidden', file_uid) |
|
99 | log.warning('Access to file store object `%s` forbidden', file_uid) | |
100 | raise HTTPNotFound() |
|
100 | raise HTTPNotFound() | |
101 |
|
101 | |||
102 | FileStore.bump_access_counter(file_uid) |
|
102 | FileStore.bump_access_counter(file_uid) | |
103 |
|
103 | |||
104 | file_path = self.storage.store_path(file_uid) |
|
104 | file_path = self.storage.store_path(file_uid) | |
105 | content_type = 'application/octet-stream' |
|
105 | content_type = 'application/octet-stream' | |
106 | content_encoding = None |
|
106 | content_encoding = None | |
107 |
|
107 | |||
108 | _content_type, _encoding = self._guess_type(file_path) |
|
108 | _content_type, _encoding = self._guess_type(file_path) | |
109 | if _content_type: |
|
109 | if _content_type: | |
110 | content_type = _content_type |
|
110 | content_type = _content_type | |
111 |
|
111 | |||
112 | # For file store we don't submit any session data, this logic tells the |
|
112 | # For file store we don't submit any session data, this logic tells the | |
113 | # Session lib to skip it |
|
113 | # Session lib to skip it | |
114 | setattr(self.request, '_file_response', True) |
|
114 | setattr(self.request, '_file_response', True) | |
115 | return FileResponse(file_path, request=self.request, |
|
115 | return FileResponse(file_path, request=self.request, | |
116 | content_type=content_type, content_encoding=content_encoding) |
|
116 | content_type=content_type, content_encoding=content_encoding) | |
117 |
|
117 | |||
118 | @LoginRequired() |
|
118 | @LoginRequired() | |
119 | @NotAnonymous() |
|
119 | @NotAnonymous() | |
120 | @CSRFRequired() |
|
120 | @CSRFRequired() | |
121 | @view_config(route_name='upload_file', request_method='POST', renderer='json_ext') |
|
121 | @view_config(route_name='upload_file', request_method='POST', renderer='json_ext') | |
122 | def upload_file(self): |
|
122 | def upload_file(self): | |
123 | self.load_default_context() |
|
123 | self.load_default_context() | |
124 | file_obj = self.request.POST.get(self.upload_key) |
|
124 | file_obj = self.request.POST.get(self.upload_key) | |
125 |
|
125 | |||
126 | if file_obj is None: |
|
126 | if file_obj is None: | |
127 | return {'store_fid': None, |
|
127 | return {'store_fid': None, | |
128 | 'access_path': None, |
|
128 | 'access_path': None, | |
129 | 'error': '{} data field is missing'.format(self.upload_key)} |
|
129 | 'error': '{} data field is missing'.format(self.upload_key)} | |
130 |
|
130 | |||
131 | if not hasattr(file_obj, 'filename'): |
|
131 | if not hasattr(file_obj, 'filename'): | |
132 | return {'store_fid': None, |
|
132 | return {'store_fid': None, | |
133 | 'access_path': None, |
|
133 | 'access_path': None, | |
134 | 'error': 'filename cannot be read from the data field'} |
|
134 | 'error': 'filename cannot be read from the data field'} | |
135 |
|
135 | |||
136 | filename = file_obj.filename |
|
136 | filename = file_obj.filename | |
137 |
|
137 | |||
138 | metadata = { |
|
138 | metadata = { | |
139 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
139 | 'user_uploaded': {'username': self._rhodecode_user.username, | |
140 | 'user_id': self._rhodecode_user.user_id, |
|
140 | 'user_id': self._rhodecode_user.user_id, | |
141 | 'ip': self._rhodecode_user.ip_addr}} |
|
141 | 'ip': self._rhodecode_user.ip_addr}} | |
142 | try: |
|
142 | try: | |
143 | store_uid, metadata = self.storage.save_file( |
|
143 | store_uid, metadata = self.storage.save_file( | |
144 | file_obj.file, filename, extra_metadata=metadata) |
|
144 | file_obj.file, filename, extra_metadata=metadata) | |
145 | except FileNotAllowedException: |
|
145 | except FileNotAllowedException: | |
146 | return {'store_fid': None, |
|
146 | return {'store_fid': None, | |
147 | 'access_path': None, |
|
147 | 'access_path': None, | |
148 | 'error': 'File {} is not allowed.'.format(filename)} |
|
148 | 'error': 'File {} is not allowed.'.format(filename)} | |
149 |
|
149 | |||
150 | except FileOverSizeException: |
|
150 | except FileOverSizeException: | |
151 | return {'store_fid': None, |
|
151 | return {'store_fid': None, | |
152 | 'access_path': None, |
|
152 | 'access_path': None, | |
153 | 'error': 'File {} is exceeding allowed limit.'.format(filename)} |
|
153 | 'error': 'File {} is exceeding allowed limit.'.format(filename)} | |
154 |
|
154 | |||
155 | try: |
|
155 | try: | |
156 | entry = FileStore.create( |
|
156 | entry = FileStore.create( | |
157 | file_uid=store_uid, filename=metadata["filename"], |
|
157 | file_uid=store_uid, filename=metadata["filename"], | |
158 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
158 | file_hash=metadata["sha256"], file_size=metadata["size"], | |
159 | file_description=u'upload attachment', |
|
159 | file_description=u'upload attachment', | |
160 | check_acl=False, user_id=self._rhodecode_user.user_id |
|
160 | check_acl=False, user_id=self._rhodecode_user.user_id | |
161 | ) |
|
161 | ) | |
162 | Session().add(entry) |
|
162 | Session().add(entry) | |
163 | Session().commit() |
|
163 | Session().commit() | |
164 | log.debug('Stored upload in DB as %s', entry) |
|
164 | log.debug('Stored upload in DB as %s', entry) | |
165 | except Exception: |
|
165 | except Exception: | |
166 | log.exception('Failed to store file %s', filename) |
|
166 | log.exception('Failed to store file %s', filename) | |
167 | return {'store_fid': None, |
|
167 | return {'store_fid': None, | |
168 | 'access_path': None, |
|
168 | 'access_path': None, | |
169 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
169 | 'error': 'File {} failed to store in DB.'.format(filename)} | |
170 |
|
170 | |||
171 | return {'store_fid': store_uid, |
|
171 | return {'store_fid': store_uid, | |
172 | 'access_path': h.route_path('download_file', fid=store_uid)} |
|
172 | 'access_path': h.route_path('download_file', fid=store_uid)} | |
173 |
|
173 | |||
174 | # ACL is checked by scopes, if no scope the file is accessible to all |
|
174 | # ACL is checked by scopes, if no scope the file is accessible to all | |
175 | @view_config(route_name='download_file') |
|
175 | @view_config(route_name='download_file') | |
176 | def download_file(self): |
|
176 | def download_file(self): | |
177 | self.load_default_context() |
|
177 | self.load_default_context() | |
178 | file_uid = self.request.matchdict['fid'] |
|
178 | file_uid = self.request.matchdict['fid'] | |
179 | log.debug('Requesting FID:%s from store %s', file_uid, self.storage) |
|
179 | log.debug('Requesting FID:%s from store %s', file_uid, self.storage) | |
180 | return self._serve_file(file_uid) |
|
180 | return self._serve_file(file_uid) | |
181 |
|
181 | |||
182 | # in addition to @LoginRequired ACL is checked by scopes |
|
182 | # in addition to @LoginRequired ACL is checked by scopes | |
183 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD]) |
|
183 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD]) | |
184 | @NotAnonymous() |
|
184 | @NotAnonymous() | |
185 | @view_config(route_name='download_file_by_token') |
|
185 | @view_config(route_name='download_file_by_token') | |
186 | def download_file_by_token(self): |
|
186 | def download_file_by_token(self): | |
187 | """ |
|
187 | """ | |
188 | Special view that allows to access the download file by special URL that |
|
188 | Special view that allows to access the download file by special URL that | |
189 | is stored inside the URL. |
|
189 | is stored inside the URL. | |
190 |
|
190 | |||
191 | http://example.com/_file_store/token-download/TOKEN/FILE_UID |
|
191 | http://example.com/_file_store/token-download/TOKEN/FILE_UID | |
192 | """ |
|
192 | """ | |
193 | self.load_default_context() |
|
193 | self.load_default_context() | |
194 | file_uid = self.request.matchdict['fid'] |
|
194 | file_uid = self.request.matchdict['fid'] | |
195 | return self._serve_file(file_uid) |
|
195 | return self._serve_file(file_uid) |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now