Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,193 +1,194 b'' | |||
|
1 | 1 | Nginx Configuration Example |
|
2 | 2 | --------------------------- |
|
3 | 3 | |
|
4 | 4 | Use the following example to configure Nginx as a your web server. |
|
5 | 5 | |
|
6 | 6 | |
|
7 | 7 | .. code-block:: nginx |
|
8 | 8 | |
|
9 | 9 | ## Rate limiter for certain pages to prevent brute force attacks |
|
10 | 10 | limit_req_zone $binary_remote_addr zone=req_limit:10m rate=1r/s; |
|
11 | 11 | |
|
12 | 12 | ## cache zone |
|
13 | 13 | proxy_cache_path /etc/nginx/nginx_cache levels=1:2 use_temp_path=off keys_zone=cache_zone:10m inactive=720h max_size=10g; |
|
14 | 14 | |
|
15 | 15 | ## Custom log format |
|
16 | 16 | log_format log_custom '$remote_addr - $remote_user [$time_local] ' |
|
17 | 17 | '"$request" $status $body_bytes_sent ' |
|
18 | 18 | '"$http_referer" "$http_user_agent" ' |
|
19 | 19 | '$request_time $upstream_response_time $pipe'; |
|
20 | 20 | |
|
21 | 21 | ## Define one or more upstreams (local RhodeCode instance) to connect to |
|
22 | 22 | upstream rc { |
|
23 | 23 | # Url to running RhodeCode instance. |
|
24 | 24 | # This is shown as `- URL: <host>` in output from rccontrol status. |
|
25 | 25 | server 127.0.0.1:10002; |
|
26 | 26 | |
|
27 | 27 | # add more instances for load balancing |
|
28 | 28 | # server 127.0.0.1:10003; |
|
29 | 29 | # server 127.0.0.1:10004; |
|
30 | 30 | } |
|
31 | 31 | |
|
32 | 32 | ## HTTP to HTTPS rewrite |
|
33 | 33 | server { |
|
34 | 34 | listen 80; |
|
35 | 35 | server_name rhodecode.myserver.com; |
|
36 | 36 | |
|
37 | 37 | if ($http_host = rhodecode.myserver.com) { |
|
38 | 38 | rewrite (.*) https://rhodecode.myserver.com$1 permanent; |
|
39 | 39 | } |
|
40 | 40 | } |
|
41 | 41 | |
|
42 | 42 | ## Optional gist alias server, for serving nicer GIST urls. |
|
43 | 43 | server { |
|
44 | 44 | listen 443; |
|
45 | 45 | server_name gist.myserver.com; |
|
46 | 46 | access_log /var/log/nginx/gist.access.log log_custom; |
|
47 | 47 | error_log /var/log/nginx/gist.error.log; |
|
48 | 48 | |
|
49 | 49 | ssl on; |
|
50 | 50 | ssl_certificate gist.rhodecode.myserver.com.crt; |
|
51 | 51 | ssl_certificate_key gist.rhodecode.myserver.com.key; |
|
52 | 52 | |
|
53 | 53 | ssl_session_timeout 5m; |
|
54 | 54 | |
|
55 | 55 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; |
|
56 | 56 | ssl_prefer_server_ciphers on; |
|
57 | 57 | ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA'; |
|
58 | 58 | |
|
59 | 59 | ## Strict http prevents from https -> http downgrade |
|
60 | 60 | add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;"; |
|
61 | 61 | |
|
62 | 62 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits |
|
63 | 63 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; |
|
64 | 64 | |
|
65 | 65 | rewrite ^/(.+)$ https://rhodecode.myserver.com/_admin/gists/$1; |
|
66 | 66 | rewrite (.*) https://rhodecode.myserver.com/_admin/gists; |
|
67 | 67 | } |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | ## MAIN SSL enabled server |
|
71 | 71 | server { |
|
72 | 72 | listen 443 ssl http2; |
|
73 | 73 | server_name rhodecode.myserver.com; |
|
74 | 74 | |
|
75 | 75 | access_log /var/log/nginx/rhodecode.access.log log_custom; |
|
76 | 76 | error_log /var/log/nginx/rhodecode.error.log; |
|
77 | 77 | |
|
78 | 78 | ssl_certificate rhodecode.myserver.com.crt; |
|
79 | 79 | ssl_certificate_key rhodecode.myserver.com.key; |
|
80 | 80 | |
|
81 | 81 | # enable session resumption to improve https performance |
|
82 | 82 | # http://vincent.bernat.im/en/blog/2011-ssl-session-reuse-rfc5077.html |
|
83 | 83 | ssl_session_cache shared:SSL:50m; |
|
84 | 84 | ssl_session_timeout 5m; |
|
85 | 85 | |
|
86 | 86 | ## Diffie-Hellman parameter for DHE ciphersuites, recommended 2048 bits |
|
87 | 87 | #ssl_dhparam /etc/nginx/ssl/dhparam.pem; |
|
88 | 88 | |
|
89 | 89 | # enables server-side protection from BEAST attacks |
|
90 | 90 | # http://blog.ivanristic.com/2013/09/is-beast-still-a-threat.html |
|
91 | 91 | ssl_prefer_server_ciphers on; |
|
92 | 92 | |
|
93 | 93 | # disable SSLv3(enabled by default since nginx 0.8.19) since it's less secure then TLS http://en.wikipedia.org/wiki/Secure_Sockets_Layer#SSL_3.0 |
|
94 | 94 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; |
|
95 | 95 | |
|
96 | 96 | # ciphers chosen for forward secrecy and compatibility |
|
97 | 97 | # http://blog.ivanristic.com/2013/08/configuring-apache-nginx-and-openssl-for-forward-secrecy.html |
|
98 | 98 | ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4"; |
|
99 | 99 | |
|
100 | 100 | client_body_buffer_size 128k; |
|
101 | 101 | # maximum number and size of buffers for large headers to read from client request |
|
102 | 102 | large_client_header_buffers 16 256k; |
|
103 | 103 | |
|
104 | 104 | ## uncomment to serve static files by Nginx, recommended for performance |
|
105 | 105 | # location /_static/rhodecode { |
|
106 | 106 | # gzip on; |
|
107 | 107 | # gzip_min_length 500; |
|
108 | 108 | # gzip_proxied any; |
|
109 | 109 | # gzip_comp_level 4; |
|
110 | 110 | # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml; |
|
111 | 111 | # gzip_vary on; |
|
112 | 112 | # gzip_disable "msie6"; |
|
113 | 113 | # expires 60d; |
|
114 | 114 | # alias /path/to/.rccontrol/community-1/static; |
|
115 | 115 | # alias /path/to/.rccontrol/enterprise-1/static; |
|
116 | 116 | # } |
|
117 | 117 | |
|
118 | 118 | ## channelstream location handler, if channelstream live chat and notifications |
|
119 | 119 | ## are enable this will proxy the requests to channelstream websocket server |
|
120 | 120 | location /_channelstream { |
|
121 | 121 | rewrite /_channelstream/(.*) /$1 break; |
|
122 | 122 | gzip off; |
|
123 | 123 | tcp_nodelay off; |
|
124 | 124 | |
|
125 | 125 | proxy_connect_timeout 10; |
|
126 | 126 | proxy_send_timeout 10m; |
|
127 | 127 | proxy_read_timeout 10m; |
|
128 | 128 | |
|
129 | 129 | proxy_set_header Host $host; |
|
130 | 130 | proxy_set_header X-Real-IP $remote_addr; |
|
131 | 131 | proxy_set_header X-Url-Scheme $scheme; |
|
132 | 132 | proxy_set_header X-Forwarded-Proto $scheme; |
|
133 | 133 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; |
|
134 | 134 | |
|
135 | 135 | proxy_http_version 1.1; |
|
136 | 136 | proxy_set_header Upgrade $http_upgrade; |
|
137 | 137 | proxy_set_header Connection "upgrade"; |
|
138 | 138 | |
|
139 | 139 | proxy_pass http://127.0.0.1:9800; |
|
140 | 140 | } |
|
141 | 141 | |
|
142 | 142 | ## rate limit this endpoint to prevent login page brute-force attacks |
|
143 | 143 | location /_admin/login { |
|
144 | 144 | limit_req zone=req_limit burst=10 nodelay; |
|
145 | 145 | try_files $uri @rhodecode_http; |
|
146 | 146 | } |
|
147 | 147 | |
|
148 | 148 | ## Special Cache for file store, make sure you enable this intentionally as |
|
149 | 149 | ## it could bypass upload files permissions |
|
150 | # location /_file_store/download { | |
|
150 | # location /_file_store/download/gravatars { | |
|
151 | 151 | # |
|
152 | 152 | # proxy_cache cache_zone; |
|
153 | 153 | # # ignore Set-Cookie |
|
154 | 154 | # proxy_ignore_headers Set-Cookie; |
|
155 |
# |
|
|
155 | # # ignore cache-control | |
|
156 | # proxy_ignore_headers Cache-Control; | |
|
156 | 157 | # |
|
157 | 158 | # proxy_cache_key $host$uri$is_args$args; |
|
158 | 159 | # proxy_cache_methods GET; |
|
159 | 160 | # |
|
160 | 161 | # proxy_cache_bypass $http_cache_control; |
|
161 | 162 | # proxy_cache_valid 200 302 720h; |
|
162 | 163 | # |
|
163 | 164 | # proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504; |
|
164 | 165 | # |
|
165 | 166 | # # returns cache status in headers |
|
166 | 167 | # add_header X-Proxy-Cache $upstream_cache_status; |
|
167 | 168 | # add_header Cache-Control "public"; |
|
168 | 169 | # |
|
169 | 170 | # proxy_cache_lock on; |
|
170 | 171 | # proxy_cache_lock_age 5m; |
|
171 | 172 | # |
|
172 | 173 | # proxy_pass http://rc; |
|
173 | 174 | # |
|
174 | 175 | # } |
|
175 | 176 | |
|
176 | 177 | location / { |
|
177 | 178 | try_files $uri @rhodecode_http; |
|
178 | 179 | } |
|
179 | 180 | |
|
180 | 181 | location @rhodecode_http { |
|
181 | 182 | # example of proxy.conf can be found in our docs. |
|
182 | 183 | include /etc/nginx/proxy.conf; |
|
183 | 184 | proxy_pass http://rc; |
|
184 | 185 | } |
|
185 | 186 | |
|
186 | 187 | ## Custom 502 error page. |
|
187 | 188 | ## Will be displayed while RhodeCode server is turned off |
|
188 | 189 | error_page 502 /502.html; |
|
189 | 190 | location = /502.html { |
|
190 | 191 | #root /path/to/.rccontrol/community-1/static; |
|
191 | 192 | root /path/to/.rccontrol/enterprise-1/static; |
|
192 | 193 | } |
|
193 | 194 | } No newline at end of file |
@@ -1,52 +1,52 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import os |
|
21 | 21 | from rhodecode.apps.file_store import config_keys |
|
22 | 22 | from rhodecode.config.middleware import _bool_setting, _string_setting |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def _sanitize_settings_and_apply_defaults(settings): |
|
26 | 26 | """ |
|
27 | 27 | Set defaults, convert to python types and validate settings. |
|
28 | 28 | """ |
|
29 | 29 | _bool_setting(settings, config_keys.enabled, 'true') |
|
30 | 30 | |
|
31 | 31 | _string_setting(settings, config_keys.backend, 'local') |
|
32 | 32 | |
|
33 | 33 | default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store') |
|
34 | 34 | _string_setting(settings, config_keys.store_path, default_store) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def includeme(config): |
|
38 | 38 | settings = config.registry.settings |
|
39 | 39 | _sanitize_settings_and_apply_defaults(settings) |
|
40 | 40 | |
|
41 | 41 | config.add_route( |
|
42 | 42 | name='upload_file', |
|
43 | 43 | pattern='/_file_store/upload') |
|
44 | 44 | config.add_route( |
|
45 | 45 | name='download_file', |
|
46 | pattern='/_file_store/download/{fid}') | |
|
46 | pattern='/_file_store/download/{fid:.*}') | |
|
47 | 47 | config.add_route( |
|
48 | 48 | name='download_file_by_token', |
|
49 | pattern='/_file_store/token-download/{_auth_token}/{fid}') | |
|
49 | pattern='/_file_store/token-download/{_auth_token}/{fid:.*}') | |
|
50 | 50 | |
|
51 | 51 | # Scan module for configuration decorators. |
|
52 | 52 | config.scan('.views', ignore='.tests') |
@@ -1,240 +1,261 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import time |
|
23 | import errno | |
|
23 | 24 | import shutil |
|
24 | 25 | import hashlib |
|
25 | 26 | |
|
26 | 27 | from rhodecode.lib.ext_json import json |
|
27 | 28 | from rhodecode.apps.file_store import utils |
|
28 | 29 | from rhodecode.apps.file_store.extensions import resolve_extensions |
|
29 | 30 | from rhodecode.apps.file_store.exceptions import ( |
|
30 | 31 | FileNotAllowedException, FileOverSizeException) |
|
31 | 32 | |
|
32 | 33 | METADATA_VER = 'v1' |
|
33 | 34 | |
|
34 | 35 | |
|
36 | def safe_make_dirs(dir_path): | |
|
37 | if not os.path.exists(dir_path): | |
|
38 | try: | |
|
39 | os.makedirs(dir_path) | |
|
40 | except OSError as e: | |
|
41 | if e.errno != errno.EEXIST: | |
|
42 | raise | |
|
43 | return | |
|
44 | ||
|
45 | ||
|
35 | 46 | class LocalFileStorage(object): |
|
36 | 47 | |
|
37 | 48 | @classmethod |
|
49 | def apply_counter(cls, counter, filename): | |
|
50 | name_counted = '%d-%s' % (counter, filename) | |
|
51 | return name_counted | |
|
52 | ||
|
53 | @classmethod | |
|
38 | 54 | def resolve_name(cls, name, directory): |
|
39 | 55 | """ |
|
40 | 56 | Resolves a unique name and the correct path. If a filename |
|
41 | 57 | for that path already exists then a numeric prefix with values > 0 will be |
|
42 | 58 | added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix. |
|
43 | 59 | |
|
44 | 60 | :param name: base name of file |
|
45 | 61 | :param directory: absolute directory path |
|
46 | 62 | """ |
|
47 | 63 | |
|
48 | 64 | counter = 0 |
|
49 | 65 | while True: |
|
50 |
name = |
|
|
66 | name_counted = cls.apply_counter(counter, name) | |
|
51 | 67 | |
|
52 | 68 | # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file |
|
53 | sub_store = cls._sub_store_from_filename(name) | |
|
69 | sub_store = cls._sub_store_from_filename(name_counted) | |
|
54 | 70 | sub_store_path = os.path.join(directory, sub_store) |
|
55 |
|
|
|
56 | os.makedirs(sub_store_path) | |
|
71 | safe_make_dirs(sub_store_path) | |
|
57 | 72 | |
|
58 | path = os.path.join(sub_store_path, name) | |
|
73 | path = os.path.join(sub_store_path, name_counted) | |
|
59 | 74 | if not os.path.exists(path): |
|
60 | return name, path | |
|
75 | return name_counted, path | |
|
61 | 76 | counter += 1 |
|
62 | 77 | |
|
63 | 78 | @classmethod |
|
64 | 79 | def _sub_store_from_filename(cls, filename): |
|
65 | 80 | return filename[:2] |
|
66 | 81 | |
|
67 | 82 | @classmethod |
|
68 | 83 | def calculate_path_hash(cls, file_path): |
|
69 | 84 | """ |
|
70 | 85 | Efficient calculation of file_path sha256 sum |
|
71 | 86 | |
|
72 | 87 | :param file_path: |
|
73 | 88 | :return: sha256sum |
|
74 | 89 | """ |
|
75 | 90 | digest = hashlib.sha256() |
|
76 | 91 | with open(file_path, 'rb') as f: |
|
77 | 92 | for chunk in iter(lambda: f.read(1024 * 100), b""): |
|
78 | 93 | digest.update(chunk) |
|
79 | 94 | |
|
80 | 95 | return digest.hexdigest() |
|
81 | 96 | |
|
82 | 97 | def __init__(self, base_path, extension_groups=None): |
|
83 | 98 | |
|
84 | 99 | """ |
|
85 | 100 | Local file storage |
|
86 | 101 | |
|
87 | 102 | :param base_path: the absolute base path where uploads are stored |
|
88 | 103 | :param extension_groups: extensions string |
|
89 | 104 | """ |
|
90 | 105 | |
|
91 | 106 | extension_groups = extension_groups or ['any'] |
|
92 | 107 | self.base_path = base_path |
|
93 | 108 | self.extensions = resolve_extensions([], groups=extension_groups) |
|
94 | 109 | |
|
95 | 110 | def __repr__(self): |
|
96 | 111 | return '{}@{}'.format(self.__class__, self.base_path) |
|
97 | 112 | |
|
98 | 113 | def store_path(self, filename): |
|
99 | 114 | """ |
|
100 | 115 | Returns absolute file path of the filename, joined to the |
|
101 | 116 | base_path. |
|
102 | 117 | |
|
103 | 118 | :param filename: base name of file |
|
104 | 119 | """ |
|
105 | sub_store = self._sub_store_from_filename(filename) | |
|
106 | return os.path.join(self.base_path, sub_store, filename) | |
|
120 | prefix_dir = '' | |
|
121 | if '/' in filename: | |
|
122 | prefix_dir, filename = filename.split('/') | |
|
123 | sub_store = self._sub_store_from_filename(filename) | |
|
124 | else: | |
|
125 | sub_store = self._sub_store_from_filename(filename) | |
|
126 | return os.path.join(self.base_path, prefix_dir, sub_store, filename) | |
|
107 | 127 | |
|
108 | 128 | def delete(self, filename): |
|
109 | 129 | """ |
|
110 | 130 | Deletes the filename. Filename is resolved with the |
|
111 | 131 | absolute path based on base_path. If file does not exist, |
|
112 | 132 | returns **False**, otherwise **True** |
|
113 | 133 | |
|
114 | 134 | :param filename: base name of file |
|
115 | 135 | """ |
|
116 | 136 | if self.exists(filename): |
|
117 | 137 | os.remove(self.store_path(filename)) |
|
118 | 138 | return True |
|
119 | 139 | return False |
|
120 | 140 | |
|
121 | 141 | def exists(self, filename): |
|
122 | 142 | """ |
|
123 | 143 | Checks if file exists. Resolves filename's absolute |
|
124 | 144 | path based on base_path. |
|
125 | 145 | |
|
126 | :param filename: base name of file | |
|
146 | :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg | |
|
127 | 147 | """ |
|
128 | 148 | return os.path.exists(self.store_path(filename)) |
|
129 | 149 | |
|
130 | 150 | def filename_allowed(self, filename, extensions=None): |
|
131 | 151 | """Checks if a filename has an allowed extension |
|
132 | 152 | |
|
133 | 153 | :param filename: base name of file |
|
134 | 154 | :param extensions: iterable of extensions (or self.extensions) |
|
135 | 155 | """ |
|
136 | 156 | _, ext = os.path.splitext(filename) |
|
137 | 157 | return self.extension_allowed(ext, extensions) |
|
138 | 158 | |
|
139 | 159 | def extension_allowed(self, ext, extensions=None): |
|
140 | 160 | """ |
|
141 | 161 | Checks if an extension is permitted. Both e.g. ".jpg" and |
|
142 | 162 | "jpg" can be passed in. Extension lookup is case-insensitive. |
|
143 | 163 | |
|
144 | 164 | :param ext: extension to check |
|
145 | 165 | :param extensions: iterable of extensions to validate against (or self.extensions) |
|
146 | 166 | """ |
|
147 | 167 | def normalize_ext(_ext): |
|
148 | 168 | if _ext.startswith('.'): |
|
149 | 169 | _ext = _ext[1:] |
|
150 | 170 | return _ext.lower() |
|
151 | 171 | |
|
152 | 172 | extensions = extensions or self.extensions |
|
153 | 173 | if not extensions: |
|
154 | 174 | return True |
|
155 | 175 | |
|
156 | 176 | ext = normalize_ext(ext) |
|
157 | 177 | |
|
158 | 178 | return ext in [normalize_ext(x) for x in extensions] |
|
159 | 179 | |
|
160 | 180 | def save_file(self, file_obj, filename, directory=None, extensions=None, |
|
161 | extra_metadata=None, max_filesize=None, **kwargs): | |
|
181 | extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs): | |
|
162 | 182 | """ |
|
163 | 183 | Saves a file object to the uploads location. |
|
164 | 184 | Returns the resolved filename, i.e. the directory + |
|
165 | 185 | the (randomized/incremented) base name. |
|
166 | 186 | |
|
167 | 187 | :param file_obj: **cgi.FieldStorage** object (or similar) |
|
168 | 188 | :param filename: original filename |
|
169 | 189 | :param directory: relative path of sub-directory |
|
170 | 190 | :param extensions: iterable of allowed extensions, if not default |
|
171 | 191 | :param max_filesize: maximum size of file that should be allowed |
|
192 | :param randomized_name: generate random generated UID or fixed based on the filename | |
|
172 | 193 | :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix |
|
173 | 194 | |
|
174 | 195 | """ |
|
175 | 196 | |
|
176 | 197 | extensions = extensions or self.extensions |
|
177 | 198 | |
|
178 | 199 | if not self.filename_allowed(filename, extensions): |
|
179 | 200 | raise FileNotAllowedException() |
|
180 | 201 | |
|
181 | 202 | if directory: |
|
182 | 203 | dest_directory = os.path.join(self.base_path, directory) |
|
183 | 204 | else: |
|
184 | 205 | dest_directory = self.base_path |
|
185 | 206 | |
|
186 |
|
|
|
187 | os.makedirs(dest_directory) | |
|
207 | safe_make_dirs(dest_directory) | |
|
188 | 208 | |
|
189 | filename = utils.uid_filename(filename) | |
|
209 | uid_filename = utils.uid_filename(filename, randomized=randomized_name) | |
|
190 | 210 | |
|
191 | 211 | # resolve also produces special sub-dir for file optimized store |
|
192 | filename, path = self.resolve_name(filename, dest_directory) | |
|
212 | filename, path = self.resolve_name(uid_filename, dest_directory) | |
|
193 | 213 | stored_file_dir = os.path.dirname(path) |
|
194 | 214 | |
|
195 | 215 | file_obj.seek(0) |
|
196 | 216 | |
|
197 | 217 | with open(path, "wb") as dest: |
|
198 | 218 | shutil.copyfileobj(file_obj, dest) |
|
199 | 219 | |
|
200 | 220 | metadata = {} |
|
201 | 221 | if extra_metadata: |
|
202 | 222 | metadata = extra_metadata |
|
203 | 223 | |
|
204 | 224 | size = os.stat(path).st_size |
|
205 | 225 | |
|
206 | 226 | if max_filesize and size > max_filesize: |
|
207 | 227 | # free up the copied file, and raise exc |
|
208 | 228 | os.remove(path) |
|
209 | 229 | raise FileOverSizeException() |
|
210 | 230 | |
|
211 | 231 | file_hash = self.calculate_path_hash(path) |
|
212 | 232 | |
|
213 | metadata.update( | |
|
214 |
|
|
|
233 | metadata.update({ | |
|
234 | "filename": filename, | |
|
215 | 235 | "size": size, |
|
216 | 236 | "time": time.time(), |
|
217 | 237 | "sha256": file_hash, |
|
218 |
"meta_ver": METADATA_VER |
|
|
238 | "meta_ver": METADATA_VER | |
|
239 | }) | |
|
219 | 240 | |
|
220 | 241 | filename_meta = filename + '.meta' |
|
221 | 242 | with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta: |
|
222 | 243 | dest_meta.write(json.dumps(metadata)) |
|
223 | 244 | |
|
224 | 245 | if directory: |
|
225 | 246 | filename = os.path.join(directory, filename) |
|
226 | 247 | |
|
227 | 248 | return filename, metadata |
|
228 | 249 | |
|
229 | 250 | def get_metadata(self, filename): |
|
230 | 251 | """ |
|
231 | 252 | Reads JSON stored metadata for a file |
|
232 | 253 | |
|
233 | 254 | :param filename: |
|
234 | 255 | :return: |
|
235 | 256 | """ |
|
236 | 257 | filename = self.store_path(filename) |
|
237 | 258 | filename_meta = filename + '.meta' |
|
238 | 259 | |
|
239 | 260 | with open(filename_meta, "rb") as source_meta: |
|
240 | 261 | return json.loads(source_meta.read()) |
@@ -1,54 +1,58 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import uuid |
|
23 | ||
|
23 | import StringIO | |
|
24 | 24 | import pathlib2 |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def get_file_storage(settings): |
|
28 | 28 | from rhodecode.apps.file_store.backends.local_store import LocalFileStorage |
|
29 | 29 | from rhodecode.apps.file_store import config_keys |
|
30 | 30 | store_path = settings.get(config_keys.store_path) |
|
31 | 31 | return LocalFileStorage(base_path=store_path) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def splitext(filename): |
|
35 | 35 | ext = ''.join(pathlib2.Path(filename).suffixes) |
|
36 | 36 | return filename, ext |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | def uid_filename(filename, randomized=True): |
|
40 | 40 | """ |
|
41 | 41 | Generates a randomized or stable (uuid) filename, |
|
42 | 42 | preserving the original extension. |
|
43 | 43 | |
|
44 | 44 | :param filename: the original filename |
|
45 | 45 | :param randomized: define if filename should be stable (sha1 based) or randomized |
|
46 | 46 | """ |
|
47 | 47 | |
|
48 | 48 | _, ext = splitext(filename) |
|
49 | 49 | if randomized: |
|
50 | 50 | uid = uuid.uuid4() |
|
51 | 51 | else: |
|
52 | 52 | hash_key = '{}.{}'.format(filename, 'store') |
|
53 | 53 | uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) |
|
54 | 54 | return str(uid) + ext.lower() |
|
55 | ||
|
56 | ||
|
57 | def bytes_to_file_obj(bytes_data): | |
|
58 | return StringIO.StringIO(bytes_data) |
@@ -1,195 +1,195 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import logging |
|
21 | 21 | |
|
22 | 22 | from pyramid.view import view_config |
|
23 | 23 | from pyramid.response import FileResponse |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import BaseAppView |
|
27 | 27 | from rhodecode.apps.file_store import utils |
|
28 | 28 | from rhodecode.apps.file_store.exceptions import ( |
|
29 | 29 | FileNotAllowedException, FileOverSizeException) |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib import audit_logger |
|
33 | 33 | from rhodecode.lib.auth import ( |
|
34 | 34 | CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
35 | 35 | LoginRequired) |
|
36 | 36 | from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db |
|
37 | 37 | from rhodecode.model.db import Session, FileStore, UserApiKeys |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class FileStoreView(BaseAppView): |
|
43 | 43 | upload_key = 'store_file' |
|
44 | 44 | |
|
45 | 45 | def load_default_context(self): |
|
46 | 46 | c = self._get_local_tmpl_context() |
|
47 | 47 | self.storage = utils.get_file_storage(self.request.registry.settings) |
|
48 | 48 | return c |
|
49 | 49 | |
|
50 | 50 | def _guess_type(self, file_name): |
|
51 | 51 | """ |
|
52 | 52 | Our own type guesser for mimetypes using the rich DB |
|
53 | 53 | """ |
|
54 | 54 | if not hasattr(self, 'db'): |
|
55 | 55 | self.db = get_mimetypes_db() |
|
56 | 56 | _content_type, _encoding = self.db.guess_type(file_name, strict=False) |
|
57 | 57 | return _content_type, _encoding |
|
58 | 58 | |
|
59 | 59 | def _serve_file(self, file_uid): |
|
60 | 60 | |
|
61 | 61 | if not self.storage.exists(file_uid): |
|
62 | 62 | store_path = self.storage.store_path(file_uid) |
|
63 | 63 | log.debug('File with FID:%s not found in the store under `%s`', |
|
64 | 64 | file_uid, store_path) |
|
65 | 65 | raise HTTPNotFound() |
|
66 | 66 | |
|
67 |
db_obj = FileStore( |
|
|
67 | db_obj = FileStore.get_by_store_uid(file_uid, safe=True) | |
|
68 | 68 | if not db_obj: |
|
69 | 69 | raise HTTPNotFound() |
|
70 | 70 | |
|
71 | 71 | # private upload for user |
|
72 | 72 | if db_obj.check_acl and db_obj.scope_user_id: |
|
73 | 73 | log.debug('Artifact: checking scope access for bound artifact user: `%s`', |
|
74 | 74 | db_obj.scope_user_id) |
|
75 | 75 | user = db_obj.user |
|
76 | 76 | if self._rhodecode_db_user.user_id != user.user_id: |
|
77 | 77 | log.warning('Access to file store object forbidden') |
|
78 | 78 | raise HTTPNotFound() |
|
79 | 79 | |
|
80 | 80 | # scoped to repository permissions |
|
81 | 81 | if db_obj.check_acl and db_obj.scope_repo_id: |
|
82 | 82 | log.debug('Artifact: checking scope access for bound artifact repo: `%s`', |
|
83 | 83 | db_obj.scope_repo_id) |
|
84 | 84 | repo = db_obj.repo |
|
85 | 85 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
86 | 86 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check') |
|
87 | 87 | if not has_perm: |
|
88 | 88 | log.warning('Access to file store object `%s` forbidden', file_uid) |
|
89 | 89 | raise HTTPNotFound() |
|
90 | 90 | |
|
91 | 91 | # scoped to repository group permissions |
|
92 | 92 | if db_obj.check_acl and db_obj.scope_repo_group_id: |
|
93 | 93 | log.debug('Artifact: checking scope access for bound artifact repo group: `%s`', |
|
94 | 94 | db_obj.scope_repo_group_id) |
|
95 | 95 | repo_group = db_obj.repo_group |
|
96 | 96 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
97 | 97 | has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check') |
|
98 | 98 | if not has_perm: |
|
99 | 99 | log.warning('Access to file store object `%s` forbidden', file_uid) |
|
100 | 100 | raise HTTPNotFound() |
|
101 | 101 | |
|
102 | 102 | FileStore.bump_access_counter(file_uid) |
|
103 | 103 | |
|
104 | 104 | file_path = self.storage.store_path(file_uid) |
|
105 | 105 | content_type = 'application/octet-stream' |
|
106 | 106 | content_encoding = None |
|
107 | 107 | |
|
108 | 108 | _content_type, _encoding = self._guess_type(file_path) |
|
109 | 109 | if _content_type: |
|
110 | 110 | content_type = _content_type |
|
111 | 111 | |
|
112 | 112 | # For file store we don't submit any session data, this logic tells the |
|
113 | 113 | # Session lib to skip it |
|
114 | 114 | setattr(self.request, '_file_response', True) |
|
115 | 115 | return FileResponse(file_path, request=self.request, |
|
116 | 116 | content_type=content_type, content_encoding=content_encoding) |
|
117 | 117 | |
|
118 | 118 | @LoginRequired() |
|
119 | 119 | @NotAnonymous() |
|
120 | 120 | @CSRFRequired() |
|
121 | 121 | @view_config(route_name='upload_file', request_method='POST', renderer='json_ext') |
|
122 | 122 | def upload_file(self): |
|
123 | 123 | self.load_default_context() |
|
124 | 124 | file_obj = self.request.POST.get(self.upload_key) |
|
125 | 125 | |
|
126 | 126 | if file_obj is None: |
|
127 | 127 | return {'store_fid': None, |
|
128 | 128 | 'access_path': None, |
|
129 | 129 | 'error': '{} data field is missing'.format(self.upload_key)} |
|
130 | 130 | |
|
131 | 131 | if not hasattr(file_obj, 'filename'): |
|
132 | 132 | return {'store_fid': None, |
|
133 | 133 | 'access_path': None, |
|
134 | 134 | 'error': 'filename cannot be read from the data field'} |
|
135 | 135 | |
|
136 | 136 | filename = file_obj.filename |
|
137 | 137 | |
|
138 | 138 | metadata = { |
|
139 | 139 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
140 | 140 | 'user_id': self._rhodecode_user.user_id, |
|
141 | 141 | 'ip': self._rhodecode_user.ip_addr}} |
|
142 | 142 | try: |
|
143 | 143 | store_uid, metadata = self.storage.save_file( |
|
144 | 144 | file_obj.file, filename, extra_metadata=metadata) |
|
145 | 145 | except FileNotAllowedException: |
|
146 | 146 | return {'store_fid': None, |
|
147 | 147 | 'access_path': None, |
|
148 | 148 | 'error': 'File {} is not allowed.'.format(filename)} |
|
149 | 149 | |
|
150 | 150 | except FileOverSizeException: |
|
151 | 151 | return {'store_fid': None, |
|
152 | 152 | 'access_path': None, |
|
153 | 153 | 'error': 'File {} is exceeding allowed limit.'.format(filename)} |
|
154 | 154 | |
|
155 | 155 | try: |
|
156 | 156 | entry = FileStore.create( |
|
157 | 157 | file_uid=store_uid, filename=metadata["filename"], |
|
158 | 158 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
159 | 159 | file_description=u'upload attachment', |
|
160 | 160 | check_acl=False, user_id=self._rhodecode_user.user_id |
|
161 | 161 | ) |
|
162 | 162 | Session().add(entry) |
|
163 | 163 | Session().commit() |
|
164 | 164 | log.debug('Stored upload in DB as %s', entry) |
|
165 | 165 | except Exception: |
|
166 | 166 | log.exception('Failed to store file %s', filename) |
|
167 | 167 | return {'store_fid': None, |
|
168 | 168 | 'access_path': None, |
|
169 | 169 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
170 | 170 | |
|
171 | 171 | return {'store_fid': store_uid, |
|
172 | 172 | 'access_path': h.route_path('download_file', fid=store_uid)} |
|
173 | 173 | |
|
174 | 174 | # ACL is checked by scopes, if no scope the file is accessible to all |
|
175 | 175 | @view_config(route_name='download_file') |
|
176 | 176 | def download_file(self): |
|
177 | 177 | self.load_default_context() |
|
178 | 178 | file_uid = self.request.matchdict['fid'] |
|
179 | 179 | log.debug('Requesting FID:%s from store %s', file_uid, self.storage) |
|
180 | 180 | return self._serve_file(file_uid) |
|
181 | 181 | |
|
182 | 182 | # in addition to @LoginRequired ACL is checked by scopes |
|
183 | 183 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD]) |
|
184 | 184 | @NotAnonymous() |
|
185 | 185 | @view_config(route_name='download_file_by_token') |
|
186 | 186 | def download_file_by_token(self): |
|
187 | 187 | """ |
|
188 | 188 | Special view that allows to access the download file by special URL that |
|
189 | 189 | is stored inside the URL. |
|
190 | 190 | |
|
191 | 191 | http://example.com/_file_store/token-download/TOKEN/FILE_UID |
|
192 | 192 | """ |
|
193 | 193 | self.load_default_context() |
|
194 | 194 | file_uid = self.request.matchdict['fid'] |
|
195 | 195 | return self._serve_file(file_uid) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now