##// END OF EJS Templates
release: Merge default into stable for release preparation
super-admin -
r4729:f8c5eac8 merge stable
parent child Browse files
Show More
@@ -0,0 +1,55 b''
1 |RCE| 4.26.0 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-08-06
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18 - Caches: introduce invalidation as a safer ways to expire keys, deleting them are more problematic.
19 - Caches: improved locking problems with distributed lock new cache backend.
20 - Pull requests: optimize db transaction logic.
21 This should prevent potential problems with locking of pull-requests that have a lot of reviewers.
22 - Pull requests: updates use retry logic in case of update is locked/fails for some concurrency issues.
23 - Pull requests: allow forced state change to repo admins too.
24 - SSH: handle subrepos better when using SSH communication.
25
26
27 Security
28 ^^^^^^^^
29
30 - Drafts comments: don't allow to view history for others than owner.
31 - Validators: apply username validator to prevent bad values being searched in DB, and potential XSS payload sent via validators.
32
33
34 Performance
35 ^^^^^^^^^^^
36
37 - SSH: use pre-compiled backends for faster matching of vcs detection.
38 - Routing: don't check channelstream connections for faster handling of this route.
39 - Routing: skip vcsdetection for ops view so they are not checked against the vcs operations.
40
41
42 Fixes
43 ^^^^^
44
45 - Permissions: flush all users permissions when creating a new user group.
46 - Repos: recover properly from bad extraction of repo_id from URL and DB calls.
47 - Comments history: fixed fetching of history for comments
48 - Pull requests: fix potential crash on providing a wrong order-by type column.
49 - Caches: report damaged DB on key iterations too not only the GET call
50 - API: added proper full permission flush on API calls when creating repos and repo groups.
51
52 Upgrade notes
53 ^^^^^^^^^^^^^
54
55 - Scheduled release 4.26.0.
@@ -1,6 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.25.2
2 current_version = 4.26.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:rhodecode/VERSION]
5 [bumpversion:file:rhodecode/VERSION]
6
@@ -1,33 +1,28 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:rc_tools_pinned]
7 [task:rc_tools_pinned]
8 done = true
9
8
10 [task:fixes_on_stable]
9 [task:fixes_on_stable]
11 done = true
12
10
13 [task:pip2nix_generated]
11 [task:pip2nix_generated]
14 done = true
15
12
16 [task:changelog_updated]
13 [task:changelog_updated]
17 done = true
18
14
19 [task:generate_api_docs]
15 [task:generate_api_docs]
20 done = true
16
17 [task:updated_translation]
21
18
22 [release]
19 [release]
23 state = prepared
20 state = in_progress
24 version = 4.25.2
21 version = 4.26.0
25
26 [task:updated_translation]
27
22
28 [task:generate_js_routes]
23 [task:generate_js_routes]
29
24
30 [task:updated_trial_license]
25 [task:updated_trial_license]
31
26
32 [task:generate_oss_licenses]
27 [task:generate_oss_licenses]
33
28
@@ -1,796 +1,800 b''
1 ## -*- coding: utf-8 -*-
1 ## -*- coding: utf-8 -*-
2
2
3 ; #########################################
3 ; #########################################
4 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; RHODECODE COMMUNITY EDITION CONFIGURATION
5 ; #########################################
5 ; #########################################
6
6
7 [DEFAULT]
7 [DEFAULT]
8 ; Debug flag sets all loggers to debug, and enables request tracking
8 ; Debug flag sets all loggers to debug, and enables request tracking
9 debug = false
9 debug = false
10
10
11 ; ########################################################################
11 ; ########################################################################
12 ; EMAIL CONFIGURATION
12 ; EMAIL CONFIGURATION
13 ; These settings will be used by the RhodeCode mailing system
13 ; These settings will be used by the RhodeCode mailing system
14 ; ########################################################################
14 ; ########################################################################
15
15
16 ; prefix all emails subjects with given prefix, helps filtering out emails
16 ; prefix all emails subjects with given prefix, helps filtering out emails
17 #email_prefix = [RhodeCode]
17 #email_prefix = [RhodeCode]
18
18
19 ; email FROM address all mails will be sent
19 ; email FROM address all mails will be sent
20 #app_email_from = rhodecode-noreply@localhost
20 #app_email_from = rhodecode-noreply@localhost
21
21
22 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
23 #smtp_username =
23 #smtp_username =
24 #smtp_password =
24 #smtp_password =
25 #smtp_port =
25 #smtp_port =
26 #smtp_use_tls = false
26 #smtp_use_tls = false
27 #smtp_use_ssl = true
27 #smtp_use_ssl = true
28
28
29 [server:main]
29 [server:main]
30 ; COMMON HOST/IP CONFIG
30 ; COMMON HOST/IP CONFIG
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 5000
32 port = 5000
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
39 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Sets the number of process workers. More workers means more concurrent connections
44 ; Sets the number of process workers. More workers means more concurrent connections
45 ; RhodeCode can handle at the same time. Each additional worker also it increases
45 ; RhodeCode can handle at the same time. Each additional worker also it increases
46 ; memory usage as each has it's own set of caches.
46 ; memory usage as each has it's own set of caches.
47 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
47 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
48 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
48 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
49 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
49 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
50 ; when using more than 1 worker.
50 ; when using more than 1 worker.
51 workers = 2
51 workers = 2
52
52
53 ; Gunicorn access log level
53 ; Gunicorn access log level
54 loglevel = info
54 loglevel = info
55
55
56 ; Process name visible in process list
56 ; Process name visible in process list
57 proc_name = rhodecode
57 proc_name = rhodecode
58
58
59 ; Type of worker class, one of `sync`, `gevent`
59 ; Type of worker class, one of `sync`, `gevent`
60 ; Recommended type is `gevent`
60 ; Recommended type is `gevent`
61 worker_class = gevent
61 worker_class = gevent
62
62
63 ; The maximum number of simultaneous clients per worker. Valid only for gevent
63 ; The maximum number of simultaneous clients per worker. Valid only for gevent
64 worker_connections = 10
64 worker_connections = 10
65
65
66 ; Max number of requests that worker will handle before being gracefully restarted.
66 ; Max number of requests that worker will handle before being gracefully restarted.
67 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
67 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
68 max_requests = 1000
68 max_requests = 1000
69 max_requests_jitter = 30
69 max_requests_jitter = 30
70
70
71 ; Amount of time a worker can spend with handling a request before it
71 ; Amount of time a worker can spend with handling a request before it
72 ; gets killed and restarted. By default set to 21600 (6hrs)
72 ; gets killed and restarted. By default set to 21600 (6hrs)
73 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
73 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
74 timeout = 21600
74 timeout = 21600
75
75
76 ; The maximum size of HTTP request line in bytes.
76 ; The maximum size of HTTP request line in bytes.
77 ; 0 for unlimited
77 ; 0 for unlimited
78 limit_request_line = 0
78 limit_request_line = 0
79
79
80 ; Limit the number of HTTP headers fields in a request.
80 ; Limit the number of HTTP headers fields in a request.
81 ; By default this value is 100 and can't be larger than 32768.
81 ; By default this value is 100 and can't be larger than 32768.
82 limit_request_fields = 32768
82 limit_request_fields = 32768
83
83
84 ; Limit the allowed size of an HTTP request header field.
84 ; Limit the allowed size of an HTTP request header field.
85 ; Value is a positive number or 0.
85 ; Value is a positive number or 0.
86 ; Setting it to 0 will allow unlimited header field sizes.
86 ; Setting it to 0 will allow unlimited header field sizes.
87 limit_request_field_size = 0
87 limit_request_field_size = 0
88
88
89 ; Timeout for graceful workers restart.
89 ; Timeout for graceful workers restart.
90 ; After receiving a restart signal, workers have this much time to finish
90 ; After receiving a restart signal, workers have this much time to finish
91 ; serving requests. Workers still alive after the timeout (starting from the
91 ; serving requests. Workers still alive after the timeout (starting from the
92 ; receipt of the restart signal) are force killed.
92 ; receipt of the restart signal) are force killed.
93 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
93 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
94 graceful_timeout = 3600
94 graceful_timeout = 3600
95
95
96 # The number of seconds to wait for requests on a Keep-Alive connection.
96 # The number of seconds to wait for requests on a Keep-Alive connection.
97 # Generally set in the 1-5 seconds range.
97 # Generally set in the 1-5 seconds range.
98 keepalive = 2
98 keepalive = 2
99
99
100 ; Maximum memory usage that each worker can use before it will receive a
100 ; Maximum memory usage that each worker can use before it will receive a
101 ; graceful restart signal 0 = memory monitoring is disabled
101 ; graceful restart signal 0 = memory monitoring is disabled
102 ; Examples: 268435456 (256MB), 536870912 (512MB)
102 ; Examples: 268435456 (256MB), 536870912 (512MB)
103 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
103 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
104 memory_max_usage = 0
104 memory_max_usage = 0
105
105
106 ; How often in seconds to check for memory usage for each gunicorn worker
106 ; How often in seconds to check for memory usage for each gunicorn worker
107 memory_usage_check_interval = 60
107 memory_usage_check_interval = 60
108
108
109 ; Threshold value for which we don't recycle worker if GarbageCollection
109 ; Threshold value for which we don't recycle worker if GarbageCollection
110 ; frees up enough resources. Before each restart we try to run GC on worker
110 ; frees up enough resources. Before each restart we try to run GC on worker
111 ; in case we get enough free memory after that, restart will not happen.
111 ; in case we get enough free memory after that, restart will not happen.
112 memory_usage_recovery_threshold = 0.8
112 memory_usage_recovery_threshold = 0.8
113
113
114
114
115 ; Prefix middleware for RhodeCode.
115 ; Prefix middleware for RhodeCode.
116 ; recommended when using proxy setup.
116 ; recommended when using proxy setup.
117 ; allows to set RhodeCode under a prefix in server.
117 ; allows to set RhodeCode under a prefix in server.
118 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
118 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
119 ; And set your prefix like: `prefix = /custom_prefix`
119 ; And set your prefix like: `prefix = /custom_prefix`
120 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
120 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
121 ; to make your cookies only work on prefix url
121 ; to make your cookies only work on prefix url
122 [filter:proxy-prefix]
122 [filter:proxy-prefix]
123 use = egg:PasteDeploy#prefix
123 use = egg:PasteDeploy#prefix
124 prefix = /
124 prefix = /
125
125
126 [app:main]
126 [app:main]
127 ; The %(here)s variable will be replaced with the absolute path of parent directory
127 ; The %(here)s variable will be replaced with the absolute path of parent directory
128 ; of this file
128 ; of this file
129 ; In addition ENVIRONMENT variables usage is possible, e.g
129 ; In addition ENVIRONMENT variables usage is possible, e.g
130 ; sqlalchemy.db1.url = {ENV_RC_DB_URL}
130 ; sqlalchemy.db1.url = {ENV_RC_DB_URL}
131
131
132 use = egg:rhodecode-enterprise-ce
132 use = egg:rhodecode-enterprise-ce
133
133
134 ; enable proxy prefix middleware, defined above
134 ; enable proxy prefix middleware, defined above
135 #filter-with = proxy-prefix
135 #filter-with = proxy-prefix
136
136
137 ; encryption key used to encrypt social plugin tokens,
137 ; encryption key used to encrypt social plugin tokens,
138 ; remote_urls with credentials etc, if not set it defaults to
138 ; remote_urls with credentials etc, if not set it defaults to
139 ; `beaker.session.secret`
139 ; `beaker.session.secret`
140 #rhodecode.encrypted_values.secret =
140 #rhodecode.encrypted_values.secret =
141
141
142 ; decryption strict mode (enabled by default). It controls if decryption raises
142 ; decryption strict mode (enabled by default). It controls if decryption raises
143 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
144 #rhodecode.encrypted_values.strict = false
144 #rhodecode.encrypted_values.strict = false
145
145
146 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
146 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
147 ; fernet is safer, and we strongly recommend switching to it.
147 ; fernet is safer, and we strongly recommend switching to it.
148 ; Due to backward compatibility aes is used as default.
148 ; Due to backward compatibility aes is used as default.
149 #rhodecode.encrypted_values.algorithm = fernet
149 #rhodecode.encrypted_values.algorithm = fernet
150
150
151 ; Return gzipped responses from RhodeCode (static files/application)
151 ; Return gzipped responses from RhodeCode (static files/application)
152 gzip_responses = false
152 gzip_responses = false
153
153
154 ; Auto-generate javascript routes file on startup
154 ; Auto-generate javascript routes file on startup
155 generate_js_files = false
155 generate_js_files = false
156
156
157 ; System global default language.
157 ; System global default language.
158 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
158 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
159 lang = en
159 lang = en
160
160
161 ; Perform a full repository scan and import on each server start.
161 ; Perform a full repository scan and import on each server start.
162 ; Settings this to true could lead to very long startup time.
162 ; Settings this to true could lead to very long startup time.
163 startup.import_repos = false
163 startup.import_repos = false
164
164
165 ; Uncomment and set this path to use archive download cache.
165 ; Uncomment and set this path to use archive download cache.
166 ; Once enabled, generated archives will be cached at this location
166 ; Once enabled, generated archives will be cached at this location
167 ; and served from the cache during subsequent requests for the same archive of
167 ; and served from the cache during subsequent requests for the same archive of
168 ; the repository.
168 ; the repository.
169 #archive_cache_dir = /tmp/tarballcache
169 #archive_cache_dir = /tmp/tarballcache
170
170
171 ; URL at which the application is running. This is used for Bootstrapping
171 ; URL at which the application is running. This is used for Bootstrapping
172 ; requests in context when no web request is available. Used in ishell, or
172 ; requests in context when no web request is available. Used in ishell, or
173 ; SSH calls. Set this for events to receive proper url for SSH calls.
173 ; SSH calls. Set this for events to receive proper url for SSH calls.
174 app.base_url = http://rhodecode.local
174 app.base_url = http://rhodecode.local
175
175
176 ; Unique application ID. Should be a random unique string for security.
176 ; Unique application ID. Should be a random unique string for security.
177 app_instance_uuid = rc-production
177 app_instance_uuid = rc-production
178
178
179 ; Cut off limit for large diffs (size in bytes). If overall diff size on
179 ; Cut off limit for large diffs (size in bytes). If overall diff size on
180 ; commit, or pull request exceeds this limit this diff will be displayed
180 ; commit, or pull request exceeds this limit this diff will be displayed
181 ; partially. E.g 512000 == 512Kb
181 ; partially. E.g 512000 == 512Kb
182 cut_off_limit_diff = 512000
182 cut_off_limit_diff = 512000
183
183
184 ; Cut off limit for large files inside diffs (size in bytes). Each individual
184 ; Cut off limit for large files inside diffs (size in bytes). Each individual
185 ; file inside diff which exceeds this limit will be displayed partially.
185 ; file inside diff which exceeds this limit will be displayed partially.
186 ; E.g 128000 == 128Kb
186 ; E.g 128000 == 128Kb
187 cut_off_limit_file = 128000
187 cut_off_limit_file = 128000
188
188
189 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
189 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
190 vcs_full_cache = true
190 vcs_full_cache = true
191
191
192 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
192 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
193 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
193 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
194 force_https = false
194 force_https = false
195
195
196 ; use Strict-Transport-Security headers
196 ; use Strict-Transport-Security headers
197 use_htsts = false
197 use_htsts = false
198
198
199 ; Set to true if your repos are exposed using the dumb protocol
199 ; Set to true if your repos are exposed using the dumb protocol
200 git_update_server_info = false
200 git_update_server_info = false
201
201
202 ; RSS/ATOM feed options
202 ; RSS/ATOM feed options
203 rss_cut_off_limit = 256000
203 rss_cut_off_limit = 256000
204 rss_items_per_page = 10
204 rss_items_per_page = 10
205 rss_include_diff = false
205 rss_include_diff = false
206
206
207 ; gist URL alias, used to create nicer urls for gist. This should be an
207 ; gist URL alias, used to create nicer urls for gist. This should be an
208 ; url that does rewrites to _admin/gists/{gistid}.
208 ; url that does rewrites to _admin/gists/{gistid}.
209 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
209 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
210 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
210 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
211 gist_alias_url =
211 gist_alias_url =
212
212
213 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
213 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
214 ; used for access.
214 ; used for access.
215 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
215 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
216 ; came from the the logged in user who own this authentication token.
216 ; came from the the logged in user who own this authentication token.
217 ; Additionally @TOKEN syntax can be used to bound the view to specific
217 ; Additionally @TOKEN syntax can be used to bound the view to specific
218 ; authentication token. Such view would be only accessible when used together
218 ; authentication token. Such view would be only accessible when used together
219 ; with this authentication token
219 ; with this authentication token
220 ; list of all views can be found under `/_admin/permissions/auth_token_access`
220 ; list of all views can be found under `/_admin/permissions/auth_token_access`
221 ; The list should be "," separated and on a single line.
221 ; The list should be "," separated and on a single line.
222 ; Most common views to enable:
222 ; Most common views to enable:
223
223
224 # RepoCommitsView:repo_commit_download
224 # RepoCommitsView:repo_commit_download
225 # RepoCommitsView:repo_commit_patch
225 # RepoCommitsView:repo_commit_patch
226 # RepoCommitsView:repo_commit_raw
226 # RepoCommitsView:repo_commit_raw
227 # RepoCommitsView:repo_commit_raw@TOKEN
227 # RepoCommitsView:repo_commit_raw@TOKEN
228 # RepoFilesView:repo_files_diff
228 # RepoFilesView:repo_files_diff
229 # RepoFilesView:repo_archivefile
229 # RepoFilesView:repo_archivefile
230 # RepoFilesView:repo_file_raw
230 # RepoFilesView:repo_file_raw
231 # GistView:*
231 # GistView:*
232 api_access_controllers_whitelist =
232 api_access_controllers_whitelist =
233
233
234 ; Default encoding used to convert from and to unicode
234 ; Default encoding used to convert from and to unicode
235 ; can be also a comma separated list of encoding in case of mixed encodings
235 ; can be also a comma separated list of encoding in case of mixed encodings
236 default_encoding = UTF-8
236 default_encoding = UTF-8
237
237
238 ; instance-id prefix
238 ; instance-id prefix
239 ; a prefix key for this instance used for cache invalidation when running
239 ; a prefix key for this instance used for cache invalidation when running
240 ; multiple instances of RhodeCode, make sure it's globally unique for
240 ; multiple instances of RhodeCode, make sure it's globally unique for
241 ; all running RhodeCode instances. Leave empty if you don't use it
241 ; all running RhodeCode instances. Leave empty if you don't use it
242 instance_id =
242 instance_id =
243
243
244 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
244 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
245 ; of an authentication plugin also if it is disabled by it's settings.
245 ; of an authentication plugin also if it is disabled by it's settings.
246 ; This could be useful if you are unable to log in to the system due to broken
246 ; This could be useful if you are unable to log in to the system due to broken
247 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
247 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
248 ; module to log in again and fix the settings.
248 ; module to log in again and fix the settings.
249 ; Available builtin plugin IDs (hash is part of the ID):
249 ; Available builtin plugin IDs (hash is part of the ID):
250 ; egg:rhodecode-enterprise-ce#rhodecode
250 ; egg:rhodecode-enterprise-ce#rhodecode
251 ; egg:rhodecode-enterprise-ce#pam
251 ; egg:rhodecode-enterprise-ce#pam
252 ; egg:rhodecode-enterprise-ce#ldap
252 ; egg:rhodecode-enterprise-ce#ldap
253 ; egg:rhodecode-enterprise-ce#jasig_cas
253 ; egg:rhodecode-enterprise-ce#jasig_cas
254 ; egg:rhodecode-enterprise-ce#headers
254 ; egg:rhodecode-enterprise-ce#headers
255 ; egg:rhodecode-enterprise-ce#crowd
255 ; egg:rhodecode-enterprise-ce#crowd
256
256
257 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
258
258
259 ; Flag to control loading of legacy plugins in py:/path format
259 ; Flag to control loading of legacy plugins in py:/path format
260 auth_plugin.import_legacy_plugins = true
260 auth_plugin.import_legacy_plugins = true
261
261
262 ; alternative return HTTP header for failed authentication. Default HTTP
262 ; alternative return HTTP header for failed authentication. Default HTTP
263 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
263 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
264 ; handling that causing a series of failed authentication calls.
264 ; handling that causing a series of failed authentication calls.
265 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
265 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
266 ; This will be served instead of default 401 on bad authentication
266 ; This will be served instead of default 401 on bad authentication
267 auth_ret_code =
267 auth_ret_code =
268
268
269 ; use special detection method when serving auth_ret_code, instead of serving
269 ; use special detection method when serving auth_ret_code, instead of serving
270 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
270 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
271 ; and then serve auth_ret_code to clients
271 ; and then serve auth_ret_code to clients
272 auth_ret_code_detection = false
272 auth_ret_code_detection = false
273
273
274 ; locking return code. When repository is locked return this HTTP code. 2XX
274 ; locking return code. When repository is locked return this HTTP code. 2XX
275 ; codes don't break the transactions while 4XX codes do
275 ; codes don't break the transactions while 4XX codes do
276 lock_ret_code = 423
276 lock_ret_code = 423
277
277
278 ; allows to change the repository location in settings page
278 ; allows to change the repository location in settings page
279 allow_repo_location_change = true
279 allow_repo_location_change = true
280
280
281 ; allows to setup custom hooks in settings page
281 ; allows to setup custom hooks in settings page
282 allow_custom_hooks_settings = true
282 allow_custom_hooks_settings = true
283
283
284 ; Generated license token required for EE edition license.
284 ; Generated license token required for EE edition license.
285 ; New generated token value can be found in Admin > settings > license page.
285 ; New generated token value can be found in Admin > settings > license page.
286 license_token =
286 license_token =
287
287
288 ; This flag hides sensitive information on the license page such as token, and license data
288 ; This flag hides sensitive information on the license page such as token, and license data
289 license.hide_license_info = false
289 license.hide_license_info = false
290
290
291 ; supervisor connection uri, for managing supervisor and logs.
291 ; supervisor connection uri, for managing supervisor and logs.
292 supervisor.uri =
292 supervisor.uri =
293
293
294 ; supervisord group name/id we only want this RC instance to handle
294 ; supervisord group name/id we only want this RC instance to handle
295 supervisor.group_id = prod
295 supervisor.group_id = prod
296
296
297 ; Display extended labs settings
297 ; Display extended labs settings
298 labs_settings_active = true
298 labs_settings_active = true
299
299
300 ; Custom exception store path, defaults to TMPDIR
300 ; Custom exception store path, defaults to TMPDIR
301 ; This is used to store exception from RhodeCode in shared directory
301 ; This is used to store exception from RhodeCode in shared directory
302 #exception_tracker.store_path =
302 #exception_tracker.store_path =
303
303
304 ; Send email with exception details when it happens
304 ; Send email with exception details when it happens
305 #exception_tracker.send_email = false
305 #exception_tracker.send_email = false
306
306
307 ; Comma separated list of recipients for exception emails,
307 ; Comma separated list of recipients for exception emails,
308 ; e.g admin@rhodecode.com,devops@rhodecode.com
308 ; e.g admin@rhodecode.com,devops@rhodecode.com
309 ; Can be left empty, then emails will be sent to ALL super-admins
309 ; Can be left empty, then emails will be sent to ALL super-admins
310 #exception_tracker.send_email_recipients =
310 #exception_tracker.send_email_recipients =
311
311
312 ; optional prefix to Add to email Subject
312 ; optional prefix to Add to email Subject
313 #exception_tracker.email_prefix = [RHODECODE ERROR]
313 #exception_tracker.email_prefix = [RHODECODE ERROR]
314
314
315 ; File store configuration. This is used to store and serve uploaded files
315 ; File store configuration. This is used to store and serve uploaded files
316 file_store.enabled = true
316 file_store.enabled = true
317
317
318 ; Storage backend, available options are: local
318 ; Storage backend, available options are: local
319 file_store.backend = local
319 file_store.backend = local
320
320
321 ; path to store the uploaded binaries
321 ; path to store the uploaded binaries
322 file_store.storage_path = %(here)s/data/file_store
322 file_store.storage_path = %(here)s/data/file_store
323
323
324
324
325 ; #############
325 ; #############
326 ; CELERY CONFIG
326 ; CELERY CONFIG
327 ; #############
327 ; #############
328
328
329 ; manually run celery: /path/to/celery worker -E --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
329 ; manually run celery: /path/to/celery worker -E --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330
330
331 use_celery = false
331 use_celery = false
332
332
333 ; connection url to the message broker (default redis)
333 ; connection url to the message broker (default redis)
334 celery.broker_url = redis://localhost:6379/8
334 celery.broker_url = redis://localhost:6379/8
335
335
336 ; rabbitmq example
336 ; rabbitmq example
337 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
337 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
338
338
339 ; maximum tasks to execute before worker restart
339 ; maximum tasks to execute before worker restart
340 celery.max_tasks_per_child = 100
340 celery.max_tasks_per_child = 100
341
341
342 ; tasks will never be sent to the queue, but executed locally instead.
342 ; tasks will never be sent to the queue, but executed locally instead.
343 celery.task_always_eager = false
343 celery.task_always_eager = false
344
344
345 ; #############
345 ; #############
346 ; DOGPILE CACHE
346 ; DOGPILE CACHE
347 ; #############
347 ; #############
348
348
349 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
349 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
350 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
350 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
351 cache_dir = %(here)s/data
351 cache_dir = %(here)s/data
352
352
353 ; *********************************************
353 ; *********************************************
354 ; `sql_cache_short` cache for heavy SQL queries
354 ; `sql_cache_short` cache for heavy SQL queries
355 ; Only supported backend is `memory_lru`
355 ; Only supported backend is `memory_lru`
356 ; *********************************************
356 ; *********************************************
357 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
357 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
358 rc_cache.sql_cache_short.expiration_time = 30
358 rc_cache.sql_cache_short.expiration_time = 30
359
359
360
360
361 ; *****************************************************
361 ; *****************************************************
362 ; `cache_repo_longterm` cache for repo object instances
362 ; `cache_repo_longterm` cache for repo object instances
363 ; Only supported backend is `memory_lru`
363 ; Only supported backend is `memory_lru`
364 ; *****************************************************
364 ; *****************************************************
365 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
365 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
366 ; by default we use 30 Days, cache is still invalidated on push
366 ; by default we use 30 Days, cache is still invalidated on push
367 rc_cache.cache_repo_longterm.expiration_time = 2592000
367 rc_cache.cache_repo_longterm.expiration_time = 2592000
368 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
368 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
369 rc_cache.cache_repo_longterm.max_size = 10000
369 rc_cache.cache_repo_longterm.max_size = 10000
370
370
371
371
372 ; *************************************************
372 ; *************************************************
373 ; `cache_perms` cache for permission tree, auth TTL
373 ; `cache_perms` cache for permission tree, auth TTL
374 ; *************************************************
374 ; *************************************************
375 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
375 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
376 rc_cache.cache_perms.expiration_time = 300
376 rc_cache.cache_perms.expiration_time = 300
377 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
377 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
378 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms.db
378 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms.db
379
379
380 ; alternative `cache_perms` redis backend with distributed lock
380 ; alternative `cache_perms` redis backend with distributed lock
381 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
381 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
382 #rc_cache.cache_perms.expiration_time = 300
382 #rc_cache.cache_perms.expiration_time = 300
383
383
384 ; redis_expiration_time needs to be greater then expiration_time
384 ; redis_expiration_time needs to be greater then expiration_time
385 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
385 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
386
386
387 #rc_cache.cache_perms.arguments.host = localhost
387 #rc_cache.cache_perms.arguments.host = localhost
388 #rc_cache.cache_perms.arguments.port = 6379
388 #rc_cache.cache_perms.arguments.port = 6379
389 #rc_cache.cache_perms.arguments.db = 0
389 #rc_cache.cache_perms.arguments.db = 0
390 #rc_cache.cache_perms.arguments.socket_timeout = 30
390 #rc_cache.cache_perms.arguments.socket_timeout = 30
391 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
391 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
392 #rc_cache.cache_perms.arguments.distributed_lock = true
392 #rc_cache.cache_perms.arguments.distributed_lock = true
393
393
394 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
395 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
394
396
395 ; ***************************************************
397 ; ***************************************************
396 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
398 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
397 ; ***************************************************
399 ; ***************************************************
398 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
400 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
399 rc_cache.cache_repo.expiration_time = 2592000
401 rc_cache.cache_repo.expiration_time = 2592000
400 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
402 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
401 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo.db
403 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo.db
402
404
403 ; alternative `cache_repo` redis backend with distributed lock
405 ; alternative `cache_repo` redis backend with distributed lock
404 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
406 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
405 #rc_cache.cache_repo.expiration_time = 2592000
407 #rc_cache.cache_repo.expiration_time = 2592000
406
408
407 ; redis_expiration_time needs to be greater then expiration_time
409 ; redis_expiration_time needs to be greater then expiration_time
408 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
410 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
409
411
410 #rc_cache.cache_repo.arguments.host = localhost
412 #rc_cache.cache_repo.arguments.host = localhost
411 #rc_cache.cache_repo.arguments.port = 6379
413 #rc_cache.cache_repo.arguments.port = 6379
412 #rc_cache.cache_repo.arguments.db = 1
414 #rc_cache.cache_repo.arguments.db = 1
413 #rc_cache.cache_repo.arguments.socket_timeout = 30
415 #rc_cache.cache_repo.arguments.socket_timeout = 30
414 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
416 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
415 #rc_cache.cache_repo.arguments.distributed_lock = true
417 #rc_cache.cache_repo.arguments.distributed_lock = true
416
418
419 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
420 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
417
421
418 ; ##############
422 ; ##############
419 ; BEAKER SESSION
423 ; BEAKER SESSION
420 ; ##############
424 ; ##############
421
425
422 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
426 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
423 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
427 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
424 ; Fastest ones are Redis and ext:database
428 ; Fastest ones are Redis and ext:database
425 beaker.session.type = file
429 beaker.session.type = file
426 beaker.session.data_dir = %(here)s/data/sessions
430 beaker.session.data_dir = %(here)s/data/sessions
427
431
428 ; Redis based sessions
432 ; Redis based sessions
429 #beaker.session.type = ext:redis
433 #beaker.session.type = ext:redis
430 #beaker.session.url = redis://127.0.0.1:6379/2
434 #beaker.session.url = redis://127.0.0.1:6379/2
431
435
432 ; DB based session, fast, and allows easy management over logged in users
436 ; DB based session, fast, and allows easy management over logged in users
433 #beaker.session.type = ext:database
437 #beaker.session.type = ext:database
434 #beaker.session.table_name = db_session
438 #beaker.session.table_name = db_session
435 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
439 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
436 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
440 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
437 #beaker.session.sa.pool_recycle = 3600
441 #beaker.session.sa.pool_recycle = 3600
438 #beaker.session.sa.echo = false
442 #beaker.session.sa.echo = false
439
443
440 beaker.session.key = rhodecode
444 beaker.session.key = rhodecode
441 beaker.session.secret = production-rc-uytcxaz
445 beaker.session.secret = production-rc-uytcxaz
442 beaker.session.lock_dir = %(here)s/data/sessions/lock
446 beaker.session.lock_dir = %(here)s/data/sessions/lock
443
447
444 ; Secure encrypted cookie. Requires AES and AES python libraries
448 ; Secure encrypted cookie. Requires AES and AES python libraries
445 ; you must disable beaker.session.secret to use this
449 ; you must disable beaker.session.secret to use this
446 #beaker.session.encrypt_key = key_for_encryption
450 #beaker.session.encrypt_key = key_for_encryption
447 #beaker.session.validate_key = validation_key
451 #beaker.session.validate_key = validation_key
448
452
449 ; Sets session as invalid (also logging out user) if it haven not been
453 ; Sets session as invalid (also logging out user) if it haven not been
450 ; accessed for given amount of time in seconds
454 ; accessed for given amount of time in seconds
451 beaker.session.timeout = 2592000
455 beaker.session.timeout = 2592000
452 beaker.session.httponly = true
456 beaker.session.httponly = true
453
457
454 ; Path to use for the cookie. Set to prefix if you use prefix middleware
458 ; Path to use for the cookie. Set to prefix if you use prefix middleware
455 #beaker.session.cookie_path = /custom_prefix
459 #beaker.session.cookie_path = /custom_prefix
456
460
457 ; Set https secure cookie
461 ; Set https secure cookie
458 beaker.session.secure = false
462 beaker.session.secure = false
459
463
460 ; default cookie expiration time in seconds, set to `true` to set expire
464 ; default cookie expiration time in seconds, set to `true` to set expire
461 ; at browser close
465 ; at browser close
462 #beaker.session.cookie_expires = 3600
466 #beaker.session.cookie_expires = 3600
463
467
464 ; #############################
468 ; #############################
465 ; SEARCH INDEXING CONFIGURATION
469 ; SEARCH INDEXING CONFIGURATION
466 ; #############################
470 ; #############################
467
471
468 ; Full text search indexer is available in rhodecode-tools under
472 ; Full text search indexer is available in rhodecode-tools under
469 ; `rhodecode-tools index` command
473 ; `rhodecode-tools index` command
470
474
471 ; WHOOSH Backend, doesn't require additional services to run
475 ; WHOOSH Backend, doesn't require additional services to run
472 ; it works good with few dozen repos
476 ; it works good with few dozen repos
473 search.module = rhodecode.lib.index.whoosh
477 search.module = rhodecode.lib.index.whoosh
474 search.location = %(here)s/data/index
478 search.location = %(here)s/data/index
475
479
476 ; ####################
480 ; ####################
477 ; CHANNELSTREAM CONFIG
481 ; CHANNELSTREAM CONFIG
478 ; ####################
482 ; ####################
479
483
480 ; channelstream enables persistent connections and live notification
484 ; channelstream enables persistent connections and live notification
481 ; in the system. It's also used by the chat system
485 ; in the system. It's also used by the chat system
482
486
483 channelstream.enabled = false
487 channelstream.enabled = false
484
488
485 ; server address for channelstream server on the backend
489 ; server address for channelstream server on the backend
486 channelstream.server = 127.0.0.1:9800
490 channelstream.server = 127.0.0.1:9800
487
491
488 ; location of the channelstream server from outside world
492 ; location of the channelstream server from outside world
489 ; use ws:// for http or wss:// for https. This address needs to be handled
493 ; use ws:// for http or wss:// for https. This address needs to be handled
490 ; by external HTTP server such as Nginx or Apache
494 ; by external HTTP server such as Nginx or Apache
491 ; see Nginx/Apache configuration examples in our docs
495 ; see Nginx/Apache configuration examples in our docs
492 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
496 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
493 channelstream.secret = secret
497 channelstream.secret = secret
494 channelstream.history.location = %(here)s/channelstream_history
498 channelstream.history.location = %(here)s/channelstream_history
495
499
496 ; Internal application path that Javascript uses to connect into.
500 ; Internal application path that Javascript uses to connect into.
497 ; If you use proxy-prefix the prefix should be added before /_channelstream
501 ; If you use proxy-prefix the prefix should be added before /_channelstream
498 channelstream.proxy_path = /_channelstream
502 channelstream.proxy_path = /_channelstream
499
503
500
504
501 ; ##############################
505 ; ##############################
502 ; MAIN RHODECODE DATABASE CONFIG
506 ; MAIN RHODECODE DATABASE CONFIG
503 ; ##############################
507 ; ##############################
504
508
505 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
509 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
506 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
510 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
507 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
511 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
508 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
512 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
509 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
510
514
511 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
515 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512
516
513 ; see sqlalchemy docs for other advanced settings
517 ; see sqlalchemy docs for other advanced settings
514 ; print the sql statements to output
518 ; print the sql statements to output
515 sqlalchemy.db1.echo = false
519 sqlalchemy.db1.echo = false
516
520
517 ; recycle the connections after this amount of seconds
521 ; recycle the connections after this amount of seconds
518 sqlalchemy.db1.pool_recycle = 3600
522 sqlalchemy.db1.pool_recycle = 3600
519 sqlalchemy.db1.convert_unicode = true
523 sqlalchemy.db1.convert_unicode = true
520
524
521 ; the number of connections to keep open inside the connection pool.
525 ; the number of connections to keep open inside the connection pool.
522 ; 0 indicates no limit
526 ; 0 indicates no limit
523 #sqlalchemy.db1.pool_size = 5
527 #sqlalchemy.db1.pool_size = 5
524
528
525 ; The number of connections to allow in connection pool "overflow", that is
529 ; The number of connections to allow in connection pool "overflow", that is
526 ; connections that can be opened above and beyond the pool_size setting,
530 ; connections that can be opened above and beyond the pool_size setting,
527 ; which defaults to five.
531 ; which defaults to five.
528 #sqlalchemy.db1.max_overflow = 10
532 #sqlalchemy.db1.max_overflow = 10
529
533
530 ; Connection check ping, used to detect broken database connections
534 ; Connection check ping, used to detect broken database connections
531 ; could be enabled to better handle cases if MySQL has gone away errors
535 ; could be enabled to better handle cases if MySQL has gone away errors
532 #sqlalchemy.db1.ping_connection = true
536 #sqlalchemy.db1.ping_connection = true
533
537
534 ; ##########
538 ; ##########
535 ; VCS CONFIG
539 ; VCS CONFIG
536 ; ##########
540 ; ##########
537 vcs.server.enable = true
541 vcs.server.enable = true
538 vcs.server = localhost:9900
542 vcs.server = localhost:9900
539
543
540 ; Web server connectivity protocol, responsible for web based VCS operations
544 ; Web server connectivity protocol, responsible for web based VCS operations
541 ; Available protocols are:
545 ; Available protocols are:
542 ; `http` - use http-rpc backend (default)
546 ; `http` - use http-rpc backend (default)
543 vcs.server.protocol = http
547 vcs.server.protocol = http
544
548
545 ; Push/Pull operations protocol, available options are:
549 ; Push/Pull operations protocol, available options are:
546 ; `http` - use http-rpc backend (default)
550 ; `http` - use http-rpc backend (default)
547 vcs.scm_app_implementation = http
551 vcs.scm_app_implementation = http
548
552
549 ; Push/Pull operations hooks protocol, available options are:
553 ; Push/Pull operations hooks protocol, available options are:
550 ; `http` - use http-rpc backend (default)
554 ; `http` - use http-rpc backend (default)
551 vcs.hooks.protocol = http
555 vcs.hooks.protocol = http
552
556
553 ; Host on which this instance is listening for hooks. If vcsserver is in other location
557 ; Host on which this instance is listening for hooks. If vcsserver is in other location
554 ; this should be adjusted.
558 ; this should be adjusted.
555 vcs.hooks.host = 127.0.0.1
559 vcs.hooks.host = 127.0.0.1
556
560
557 ; Start VCSServer with this instance as a subprocess, useful for development
561 ; Start VCSServer with this instance as a subprocess, useful for development
558 vcs.start_server = false
562 vcs.start_server = false
559
563
560 ; List of enabled VCS backends, available options are:
564 ; List of enabled VCS backends, available options are:
561 ; `hg` - mercurial
565 ; `hg` - mercurial
562 ; `git` - git
566 ; `git` - git
563 ; `svn` - subversion
567 ; `svn` - subversion
564 vcs.backends = hg, git, svn
568 vcs.backends = hg, git, svn
565
569
566 ; Wait this number of seconds before killing connection to the vcsserver
570 ; Wait this number of seconds before killing connection to the vcsserver
567 vcs.connection_timeout = 3600
571 vcs.connection_timeout = 3600
568
572
569 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
573 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
570 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
574 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
571 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
575 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
572 #vcs.svn.compatible_version = 1.8
576 #vcs.svn.compatible_version = 1.8
573
577
574
578
575 ; ####################################################
579 ; ####################################################
576 ; Subversion proxy support (mod_dav_svn)
580 ; Subversion proxy support (mod_dav_svn)
577 ; Maps RhodeCode repo groups into SVN paths for Apache
581 ; Maps RhodeCode repo groups into SVN paths for Apache
578 ; ####################################################
582 ; ####################################################
579
583
580 ; Enable or disable the config file generation.
584 ; Enable or disable the config file generation.
581 svn.proxy.generate_config = false
585 svn.proxy.generate_config = false
582
586
583 ; Generate config file with `SVNListParentPath` set to `On`.
587 ; Generate config file with `SVNListParentPath` set to `On`.
584 svn.proxy.list_parent_path = true
588 svn.proxy.list_parent_path = true
585
589
586 ; Set location and file name of generated config file.
590 ; Set location and file name of generated config file.
587 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
591 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
588
592
589 ; alternative mod_dav config template. This needs to be a valid mako template
593 ; alternative mod_dav config template. This needs to be a valid mako template
590 ; Example template can be found in the source code:
594 ; Example template can be found in the source code:
591 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
595 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
592 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
596 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
593
597
594 ; Used as a prefix to the `Location` block in the generated config file.
598 ; Used as a prefix to the `Location` block in the generated config file.
595 ; In most cases it should be set to `/`.
599 ; In most cases it should be set to `/`.
596 svn.proxy.location_root = /
600 svn.proxy.location_root = /
597
601
598 ; Command to reload the mod dav svn configuration on change.
602 ; Command to reload the mod dav svn configuration on change.
599 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
603 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
600 ; Make sure user who runs RhodeCode process is allowed to reload Apache
604 ; Make sure user who runs RhodeCode process is allowed to reload Apache
601 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
605 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
602
606
603 ; If the timeout expires before the reload command finishes, the command will
607 ; If the timeout expires before the reload command finishes, the command will
604 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
608 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
605 #svn.proxy.reload_timeout = 10
609 #svn.proxy.reload_timeout = 10
606
610
607 ; ####################
611 ; ####################
608 ; SSH Support Settings
612 ; SSH Support Settings
609 ; ####################
613 ; ####################
610
614
611 ; Defines if a custom authorized_keys file should be created and written on
615 ; Defines if a custom authorized_keys file should be created and written on
612 ; any change user ssh keys. Setting this to false also disables possibility
616 ; any change user ssh keys. Setting this to false also disables possibility
613 ; of adding SSH keys by users from web interface. Super admins can still
617 ; of adding SSH keys by users from web interface. Super admins can still
614 ; manage SSH Keys.
618 ; manage SSH Keys.
615 ssh.generate_authorized_keyfile = false
619 ssh.generate_authorized_keyfile = false
616
620
617 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
621 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
618 # ssh.authorized_keys_ssh_opts =
622 # ssh.authorized_keys_ssh_opts =
619
623
620 ; Path to the authorized_keys file where the generate entries are placed.
624 ; Path to the authorized_keys file where the generate entries are placed.
621 ; It is possible to have multiple key files specified in `sshd_config` e.g.
625 ; It is possible to have multiple key files specified in `sshd_config` e.g.
622 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
626 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
623 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
624
628
625 ; Command to execute the SSH wrapper. The binary is available in the
629 ; Command to execute the SSH wrapper. The binary is available in the
626 ; RhodeCode installation directory.
630 ; RhodeCode installation directory.
627 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
631 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
628 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
629
633
630 ; Allow shell when executing the ssh-wrapper command
634 ; Allow shell when executing the ssh-wrapper command
631 ssh.wrapper_cmd_allow_shell = false
635 ssh.wrapper_cmd_allow_shell = false
632
636
633 ; Enables logging, and detailed output send back to the client during SSH
637 ; Enables logging, and detailed output send back to the client during SSH
634 ; operations. Useful for debugging, shouldn't be used in production.
638 ; operations. Useful for debugging, shouldn't be used in production.
635 ssh.enable_debug_logging = false
639 ssh.enable_debug_logging = false
636
640
637 ; Paths to binary executable, by default they are the names, but we can
641 ; Paths to binary executable, by default they are the names, but we can
638 ; override them if we want to use a custom one
642 ; override them if we want to use a custom one
639 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
640 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
641 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
642
646
643 ; Enables SSH key generator web interface. Disabling this still allows users
647 ; Enables SSH key generator web interface. Disabling this still allows users
644 ; to add their own keys.
648 ; to add their own keys.
645 ssh.enable_ui_key_generator = true
649 ssh.enable_ui_key_generator = true
646
650
647
651
648 ; #################
652 ; #################
649 ; APPENLIGHT CONFIG
653 ; APPENLIGHT CONFIG
650 ; #################
654 ; #################
651
655
652 ; Appenlight is tailored to work with RhodeCode, see
656 ; Appenlight is tailored to work with RhodeCode, see
653 ; http://appenlight.rhodecode.com for details how to obtain an account
657 ; http://appenlight.rhodecode.com for details how to obtain an account
654
658
655 ; Appenlight integration enabled
659 ; Appenlight integration enabled
656 appenlight = false
660 appenlight = false
657
661
658 appenlight.server_url = https://api.appenlight.com
662 appenlight.server_url = https://api.appenlight.com
659 appenlight.api_key = YOUR_API_KEY
663 appenlight.api_key = YOUR_API_KEY
660 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
664 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
661
665
662 ; used for JS client
666 ; used for JS client
663 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
667 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
664
668
665 ; TWEAK AMOUNT OF INFO SENT HERE
669 ; TWEAK AMOUNT OF INFO SENT HERE
666
670
667 ; enables 404 error logging (default False)
671 ; enables 404 error logging (default False)
668 appenlight.report_404 = false
672 appenlight.report_404 = false
669
673
670 ; time in seconds after request is considered being slow (default 1)
674 ; time in seconds after request is considered being slow (default 1)
671 appenlight.slow_request_time = 1
675 appenlight.slow_request_time = 1
672
676
673 ; record slow requests in application
677 ; record slow requests in application
674 ; (needs to be enabled for slow datastore recording and time tracking)
678 ; (needs to be enabled for slow datastore recording and time tracking)
675 appenlight.slow_requests = true
679 appenlight.slow_requests = true
676
680
677 ; enable hooking to application loggers
681 ; enable hooking to application loggers
678 appenlight.logging = true
682 appenlight.logging = true
679
683
680 ; minimum log level for log capture
684 ; minimum log level for log capture
681 appenlight.logging.level = WARNING
685 appenlight.logging.level = WARNING
682
686
683 ; send logs only from erroneous/slow requests
687 ; send logs only from erroneous/slow requests
684 ; (saves API quota for intensive logging)
688 ; (saves API quota for intensive logging)
685 appenlight.logging_on_error = false
689 appenlight.logging_on_error = false
686
690
687 ; list of additional keywords that should be grabbed from environ object
691 ; list of additional keywords that should be grabbed from environ object
688 ; can be string with comma separated list of words in lowercase
692 ; can be string with comma separated list of words in lowercase
689 ; (by default client will always send following info:
693 ; (by default client will always send following info:
690 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
694 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
691 ; start with HTTP* this list be extended with additional keywords here
695 ; start with HTTP* this list be extended with additional keywords here
692 appenlight.environ_keys_whitelist =
696 appenlight.environ_keys_whitelist =
693
697
694 ; list of keywords that should be blanked from request object
698 ; list of keywords that should be blanked from request object
695 ; can be string with comma separated list of words in lowercase
699 ; can be string with comma separated list of words in lowercase
696 ; (by default client will always blank keys that contain following words
700 ; (by default client will always blank keys that contain following words
697 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
701 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
698 ; this list be extended with additional keywords set here
702 ; this list be extended with additional keywords set here
699 appenlight.request_keys_blacklist =
703 appenlight.request_keys_blacklist =
700
704
701 ; list of namespaces that should be ignores when gathering log entries
705 ; list of namespaces that should be ignores when gathering log entries
702 ; can be string with comma separated list of namespaces
706 ; can be string with comma separated list of namespaces
703 ; (by default the client ignores own entries: appenlight_client.client)
707 ; (by default the client ignores own entries: appenlight_client.client)
704 appenlight.log_namespace_blacklist =
708 appenlight.log_namespace_blacklist =
705
709
706 ; Dummy marker to add new entries after.
710 ; Dummy marker to add new entries after.
707 ; Add any custom entries below. Please don't remove this marker.
711 ; Add any custom entries below. Please don't remove this marker.
708 custom.conf = 1
712 custom.conf = 1
709
713
710
714
711 ; #####################
715 ; #####################
712 ; LOGGING CONFIGURATION
716 ; LOGGING CONFIGURATION
713 ; #####################
717 ; #####################
714 [loggers]
718 [loggers]
715 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
719 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
716
720
717 [handlers]
721 [handlers]
718 keys = console, console_sql
722 keys = console, console_sql
719
723
720 [formatters]
724 [formatters]
721 keys = generic, color_formatter, color_formatter_sql
725 keys = generic, color_formatter, color_formatter_sql
722
726
723 ; #######
727 ; #######
724 ; LOGGERS
728 ; LOGGERS
725 ; #######
729 ; #######
726 [logger_root]
730 [logger_root]
727 level = NOTSET
731 level = NOTSET
728 handlers = console
732 handlers = console
729
733
730 [logger_sqlalchemy]
734 [logger_sqlalchemy]
731 level = INFO
735 level = INFO
732 handlers = console_sql
736 handlers = console_sql
733 qualname = sqlalchemy.engine
737 qualname = sqlalchemy.engine
734 propagate = 0
738 propagate = 0
735
739
736 [logger_beaker]
740 [logger_beaker]
737 level = DEBUG
741 level = DEBUG
738 handlers =
742 handlers =
739 qualname = beaker.container
743 qualname = beaker.container
740 propagate = 1
744 propagate = 1
741
745
742 [logger_rhodecode]
746 [logger_rhodecode]
743 level = DEBUG
747 level = DEBUG
744 handlers =
748 handlers =
745 qualname = rhodecode
749 qualname = rhodecode
746 propagate = 1
750 propagate = 1
747
751
748 [logger_ssh_wrapper]
752 [logger_ssh_wrapper]
749 level = DEBUG
753 level = DEBUG
750 handlers =
754 handlers =
751 qualname = ssh_wrapper
755 qualname = ssh_wrapper
752 propagate = 1
756 propagate = 1
753
757
754 [logger_celery]
758 [logger_celery]
755 level = DEBUG
759 level = DEBUG
756 handlers =
760 handlers =
757 qualname = celery
761 qualname = celery
758
762
759
763
760 ; ########
764 ; ########
761 ; HANDLERS
765 ; HANDLERS
762 ; ########
766 ; ########
763
767
764 [handler_console]
768 [handler_console]
765 class = StreamHandler
769 class = StreamHandler
766 args = (sys.stderr, )
770 args = (sys.stderr, )
767 level = INFO
771 level = INFO
768 formatter = generic
772 formatter = generic
769
773
770 [handler_console_sql]
774 [handler_console_sql]
771 ; "level = DEBUG" logs SQL queries and results.
775 ; "level = DEBUG" logs SQL queries and results.
772 ; "level = INFO" logs SQL queries.
776 ; "level = INFO" logs SQL queries.
773 ; "level = WARN" logs neither. (Recommended for production systems.)
777 ; "level = WARN" logs neither. (Recommended for production systems.)
774 class = StreamHandler
778 class = StreamHandler
775 args = (sys.stderr, )
779 args = (sys.stderr, )
776 level = WARN
780 level = WARN
777 formatter = generic
781 formatter = generic
778
782
779 ; ##########
783 ; ##########
780 ; FORMATTERS
784 ; FORMATTERS
781 ; ##########
785 ; ##########
782
786
783 [formatter_generic]
787 [formatter_generic]
784 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
788 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
785 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
789 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
786 datefmt = %Y-%m-%d %H:%M:%S
790 datefmt = %Y-%m-%d %H:%M:%S
787
791
788 [formatter_color_formatter]
792 [formatter_color_formatter]
789 class = rhodecode.lib.logging_formatter.ColorFormatter
793 class = rhodecode.lib.logging_formatter.ColorFormatter
790 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
794 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
791 datefmt = %Y-%m-%d %H:%M:%S
795 datefmt = %Y-%m-%d %H:%M:%S
792
796
793 [formatter_color_formatter_sql]
797 [formatter_color_formatter_sql]
794 class = rhodecode.lib.logging_formatter.ColorFormatterSql
798 class = rhodecode.lib.logging_formatter.ColorFormatterSql
795 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
799 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
796 datefmt = %Y-%m-%d %H:%M:%S
800 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,156 +1,157 b''
1 .. _rhodecode-release-notes-ref:
1 .. _rhodecode-release-notes-ref:
2
2
3 Release Notes
3 Release Notes
4 =============
4 =============
5
5
6 |RCE| 4.x Versions
6 |RCE| 4.x Versions
7 ------------------
7 ------------------
8
8
9 .. toctree::
9 .. toctree::
10 :maxdepth: 1
10 :maxdepth: 1
11
11
12 release-notes-4.26.0.rst
12 release-notes-4.25.2.rst
13 release-notes-4.25.2.rst
13 release-notes-4.25.1.rst
14 release-notes-4.25.1.rst
14 release-notes-4.25.0.rst
15 release-notes-4.25.0.rst
15 release-notes-4.24.1.rst
16 release-notes-4.24.1.rst
16 release-notes-4.24.0.rst
17 release-notes-4.24.0.rst
17 release-notes-4.23.2.rst
18 release-notes-4.23.2.rst
18 release-notes-4.23.1.rst
19 release-notes-4.23.1.rst
19 release-notes-4.23.0.rst
20 release-notes-4.23.0.rst
20 release-notes-4.22.0.rst
21 release-notes-4.22.0.rst
21 release-notes-4.21.0.rst
22 release-notes-4.21.0.rst
22 release-notes-4.20.1.rst
23 release-notes-4.20.1.rst
23 release-notes-4.20.0.rst
24 release-notes-4.20.0.rst
24 release-notes-4.19.3.rst
25 release-notes-4.19.3.rst
25 release-notes-4.19.2.rst
26 release-notes-4.19.2.rst
26 release-notes-4.19.1.rst
27 release-notes-4.19.1.rst
27 release-notes-4.19.0.rst
28 release-notes-4.19.0.rst
28 release-notes-4.18.3.rst
29 release-notes-4.18.3.rst
29 release-notes-4.18.2.rst
30 release-notes-4.18.2.rst
30 release-notes-4.18.1.rst
31 release-notes-4.18.1.rst
31 release-notes-4.18.0.rst
32 release-notes-4.18.0.rst
32 release-notes-4.17.4.rst
33 release-notes-4.17.4.rst
33 release-notes-4.17.3.rst
34 release-notes-4.17.3.rst
34 release-notes-4.17.2.rst
35 release-notes-4.17.2.rst
35 release-notes-4.17.1.rst
36 release-notes-4.17.1.rst
36 release-notes-4.17.0.rst
37 release-notes-4.17.0.rst
37 release-notes-4.16.2.rst
38 release-notes-4.16.2.rst
38 release-notes-4.16.1.rst
39 release-notes-4.16.1.rst
39 release-notes-4.16.0.rst
40 release-notes-4.16.0.rst
40 release-notes-4.15.2.rst
41 release-notes-4.15.2.rst
41 release-notes-4.15.1.rst
42 release-notes-4.15.1.rst
42 release-notes-4.15.0.rst
43 release-notes-4.15.0.rst
43 release-notes-4.14.1.rst
44 release-notes-4.14.1.rst
44 release-notes-4.14.0.rst
45 release-notes-4.14.0.rst
45 release-notes-4.13.3.rst
46 release-notes-4.13.3.rst
46 release-notes-4.13.2.rst
47 release-notes-4.13.2.rst
47 release-notes-4.13.1.rst
48 release-notes-4.13.1.rst
48 release-notes-4.13.0.rst
49 release-notes-4.13.0.rst
49 release-notes-4.12.4.rst
50 release-notes-4.12.4.rst
50 release-notes-4.12.3.rst
51 release-notes-4.12.3.rst
51 release-notes-4.12.2.rst
52 release-notes-4.12.2.rst
52 release-notes-4.12.1.rst
53 release-notes-4.12.1.rst
53 release-notes-4.12.0.rst
54 release-notes-4.12.0.rst
54 release-notes-4.11.6.rst
55 release-notes-4.11.6.rst
55 release-notes-4.11.5.rst
56 release-notes-4.11.5.rst
56 release-notes-4.11.4.rst
57 release-notes-4.11.4.rst
57 release-notes-4.11.3.rst
58 release-notes-4.11.3.rst
58 release-notes-4.11.2.rst
59 release-notes-4.11.2.rst
59 release-notes-4.11.1.rst
60 release-notes-4.11.1.rst
60 release-notes-4.11.0.rst
61 release-notes-4.11.0.rst
61 release-notes-4.10.6.rst
62 release-notes-4.10.6.rst
62 release-notes-4.10.5.rst
63 release-notes-4.10.5.rst
63 release-notes-4.10.4.rst
64 release-notes-4.10.4.rst
64 release-notes-4.10.3.rst
65 release-notes-4.10.3.rst
65 release-notes-4.10.2.rst
66 release-notes-4.10.2.rst
66 release-notes-4.10.1.rst
67 release-notes-4.10.1.rst
67 release-notes-4.10.0.rst
68 release-notes-4.10.0.rst
68 release-notes-4.9.1.rst
69 release-notes-4.9.1.rst
69 release-notes-4.9.0.rst
70 release-notes-4.9.0.rst
70 release-notes-4.8.0.rst
71 release-notes-4.8.0.rst
71 release-notes-4.7.2.rst
72 release-notes-4.7.2.rst
72 release-notes-4.7.1.rst
73 release-notes-4.7.1.rst
73 release-notes-4.7.0.rst
74 release-notes-4.7.0.rst
74 release-notes-4.6.1.rst
75 release-notes-4.6.1.rst
75 release-notes-4.6.0.rst
76 release-notes-4.6.0.rst
76 release-notes-4.5.2.rst
77 release-notes-4.5.2.rst
77 release-notes-4.5.1.rst
78 release-notes-4.5.1.rst
78 release-notes-4.5.0.rst
79 release-notes-4.5.0.rst
79 release-notes-4.4.2.rst
80 release-notes-4.4.2.rst
80 release-notes-4.4.1.rst
81 release-notes-4.4.1.rst
81 release-notes-4.4.0.rst
82 release-notes-4.4.0.rst
82 release-notes-4.3.1.rst
83 release-notes-4.3.1.rst
83 release-notes-4.3.0.rst
84 release-notes-4.3.0.rst
84 release-notes-4.2.1.rst
85 release-notes-4.2.1.rst
85 release-notes-4.2.0.rst
86 release-notes-4.2.0.rst
86 release-notes-4.1.2.rst
87 release-notes-4.1.2.rst
87 release-notes-4.1.1.rst
88 release-notes-4.1.1.rst
88 release-notes-4.1.0.rst
89 release-notes-4.1.0.rst
89 release-notes-4.0.1.rst
90 release-notes-4.0.1.rst
90 release-notes-4.0.0.rst
91 release-notes-4.0.0.rst
91
92
92 |RCE| 3.x Versions
93 |RCE| 3.x Versions
93 ------------------
94 ------------------
94
95
95 .. toctree::
96 .. toctree::
96 :maxdepth: 1
97 :maxdepth: 1
97
98
98 release-notes-3.8.4.rst
99 release-notes-3.8.4.rst
99 release-notes-3.8.3.rst
100 release-notes-3.8.3.rst
100 release-notes-3.8.2.rst
101 release-notes-3.8.2.rst
101 release-notes-3.8.1.rst
102 release-notes-3.8.1.rst
102 release-notes-3.8.0.rst
103 release-notes-3.8.0.rst
103 release-notes-3.7.1.rst
104 release-notes-3.7.1.rst
104 release-notes-3.7.0.rst
105 release-notes-3.7.0.rst
105 release-notes-3.6.1.rst
106 release-notes-3.6.1.rst
106 release-notes-3.6.0.rst
107 release-notes-3.6.0.rst
107 release-notes-3.5.2.rst
108 release-notes-3.5.2.rst
108 release-notes-3.5.1.rst
109 release-notes-3.5.1.rst
109 release-notes-3.5.0.rst
110 release-notes-3.5.0.rst
110 release-notes-3.4.1.rst
111 release-notes-3.4.1.rst
111 release-notes-3.4.0.rst
112 release-notes-3.4.0.rst
112 release-notes-3.3.4.rst
113 release-notes-3.3.4.rst
113 release-notes-3.3.3.rst
114 release-notes-3.3.3.rst
114 release-notes-3.3.2.rst
115 release-notes-3.3.2.rst
115 release-notes-3.3.1.rst
116 release-notes-3.3.1.rst
116 release-notes-3.3.0.rst
117 release-notes-3.3.0.rst
117 release-notes-3.2.3.rst
118 release-notes-3.2.3.rst
118 release-notes-3.2.2.rst
119 release-notes-3.2.2.rst
119 release-notes-3.2.1.rst
120 release-notes-3.2.1.rst
120 release-notes-3.2.0.rst
121 release-notes-3.2.0.rst
121 release-notes-3.1.1.rst
122 release-notes-3.1.1.rst
122 release-notes-3.1.0.rst
123 release-notes-3.1.0.rst
123 release-notes-3.0.2.rst
124 release-notes-3.0.2.rst
124 release-notes-3.0.1.rst
125 release-notes-3.0.1.rst
125 release-notes-3.0.0.rst
126 release-notes-3.0.0.rst
126
127
127 |RCE| 2.x Versions
128 |RCE| 2.x Versions
128 ------------------
129 ------------------
129
130
130 .. toctree::
131 .. toctree::
131 :maxdepth: 1
132 :maxdepth: 1
132
133
133 release-notes-2.2.8.rst
134 release-notes-2.2.8.rst
134 release-notes-2.2.7.rst
135 release-notes-2.2.7.rst
135 release-notes-2.2.6.rst
136 release-notes-2.2.6.rst
136 release-notes-2.2.5.rst
137 release-notes-2.2.5.rst
137 release-notes-2.2.4.rst
138 release-notes-2.2.4.rst
138 release-notes-2.2.3.rst
139 release-notes-2.2.3.rst
139 release-notes-2.2.2.rst
140 release-notes-2.2.2.rst
140 release-notes-2.2.1.rst
141 release-notes-2.2.1.rst
141 release-notes-2.2.0.rst
142 release-notes-2.2.0.rst
142 release-notes-2.1.0.rst
143 release-notes-2.1.0.rst
143 release-notes-2.0.2.rst
144 release-notes-2.0.2.rst
144 release-notes-2.0.1.rst
145 release-notes-2.0.1.rst
145 release-notes-2.0.0.rst
146 release-notes-2.0.0.rst
146
147
147 |RCE| 1.x Versions
148 |RCE| 1.x Versions
148 ------------------
149 ------------------
149
150
150 .. toctree::
151 .. toctree::
151 :maxdepth: 1
152 :maxdepth: 1
152
153
153 release-notes-1.7.2.rst
154 release-notes-1.7.2.rst
154 release-notes-1.7.1.rst
155 release-notes-1.7.1.rst
155 release-notes-1.7.0.rst
156 release-notes-1.7.0.rst
156 release-notes-1.6.0.rst
157 release-notes-1.6.0.rst
@@ -1,2520 +1,2520 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "alembic" = super.buildPythonPackage {
7 "alembic" = super.buildPythonPackage {
8 name = "alembic-1.4.2";
8 name = "alembic-1.4.2";
9 doCheck = false;
9 doCheck = false;
10 propagatedBuildInputs = [
10 propagatedBuildInputs = [
11 self."sqlalchemy"
11 self."sqlalchemy"
12 self."mako"
12 self."mako"
13 self."python-editor"
13 self."python-editor"
14 self."python-dateutil"
14 self."python-dateutil"
15 ];
15 ];
16 src = fetchurl {
16 src = fetchurl {
17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
19 };
19 };
20 meta = {
20 meta = {
21 license = [ pkgs.lib.licenses.mit ];
21 license = [ pkgs.lib.licenses.mit ];
22 };
22 };
23 };
23 };
24 "amqp" = super.buildPythonPackage {
24 "amqp" = super.buildPythonPackage {
25 name = "amqp-2.5.2";
25 name = "amqp-2.5.2";
26 doCheck = false;
26 doCheck = false;
27 propagatedBuildInputs = [
27 propagatedBuildInputs = [
28 self."vine"
28 self."vine"
29 ];
29 ];
30 src = fetchurl {
30 src = fetchurl {
31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
33 };
33 };
34 meta = {
34 meta = {
35 license = [ pkgs.lib.licenses.bsdOriginal ];
35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 };
36 };
37 };
37 };
38 "apispec" = super.buildPythonPackage {
38 "apispec" = super.buildPythonPackage {
39 name = "apispec-1.0.0";
39 name = "apispec-1.0.0";
40 doCheck = false;
40 doCheck = false;
41 propagatedBuildInputs = [
41 propagatedBuildInputs = [
42 self."PyYAML"
42 self."PyYAML"
43 ];
43 ];
44 src = fetchurl {
44 src = fetchurl {
45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
47 };
47 };
48 meta = {
48 meta = {
49 license = [ pkgs.lib.licenses.mit ];
49 license = [ pkgs.lib.licenses.mit ];
50 };
50 };
51 };
51 };
52 "appenlight-client" = super.buildPythonPackage {
52 "appenlight-client" = super.buildPythonPackage {
53 name = "appenlight-client-0.6.26";
53 name = "appenlight-client-0.6.26";
54 doCheck = false;
54 doCheck = false;
55 propagatedBuildInputs = [
55 propagatedBuildInputs = [
56 self."webob"
56 self."webob"
57 self."requests"
57 self."requests"
58 self."six"
58 self."six"
59 ];
59 ];
60 src = fetchurl {
60 src = fetchurl {
61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
63 };
63 };
64 meta = {
64 meta = {
65 license = [ pkgs.lib.licenses.bsdOriginal ];
65 license = [ pkgs.lib.licenses.bsdOriginal ];
66 };
66 };
67 };
67 };
68 "asn1crypto" = super.buildPythonPackage {
68 "asn1crypto" = super.buildPythonPackage {
69 name = "asn1crypto-0.24.0";
69 name = "asn1crypto-0.24.0";
70 doCheck = false;
70 doCheck = false;
71 src = fetchurl {
71 src = fetchurl {
72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
74 };
74 };
75 meta = {
75 meta = {
76 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
77 };
77 };
78 };
78 };
79 "atomicwrites" = super.buildPythonPackage {
79 "atomicwrites" = super.buildPythonPackage {
80 name = "atomicwrites-1.3.0";
80 name = "atomicwrites-1.3.0";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.mit ];
87 license = [ pkgs.lib.licenses.mit ];
88 };
88 };
89 };
89 };
90 "attrs" = super.buildPythonPackage {
90 "attrs" = super.buildPythonPackage {
91 name = "attrs-19.3.0";
91 name = "attrs-19.3.0";
92 doCheck = false;
92 doCheck = false;
93 src = fetchurl {
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
96 };
96 };
97 meta = {
97 meta = {
98 license = [ pkgs.lib.licenses.mit ];
98 license = [ pkgs.lib.licenses.mit ];
99 };
99 };
100 };
100 };
101 "babel" = super.buildPythonPackage {
101 "babel" = super.buildPythonPackage {
102 name = "babel-1.3";
102 name = "babel-1.3";
103 doCheck = false;
103 doCheck = false;
104 propagatedBuildInputs = [
104 propagatedBuildInputs = [
105 self."pytz"
105 self."pytz"
106 ];
106 ];
107 src = fetchurl {
107 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
110 };
110 };
111 meta = {
111 meta = {
112 license = [ pkgs.lib.licenses.bsdOriginal ];
112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 };
113 };
114 };
114 };
115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
116 name = "backports.shutil-get-terminal-size-1.0.0";
116 name = "backports.shutil-get-terminal-size-1.0.0";
117 doCheck = false;
117 doCheck = false;
118 src = fetchurl {
118 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
121 };
121 };
122 meta = {
122 meta = {
123 license = [ pkgs.lib.licenses.mit ];
123 license = [ pkgs.lib.licenses.mit ];
124 };
124 };
125 };
125 };
126 "beaker" = super.buildPythonPackage {
126 "beaker" = super.buildPythonPackage {
127 name = "beaker-1.9.1";
127 name = "beaker-1.9.1";
128 doCheck = false;
128 doCheck = false;
129 propagatedBuildInputs = [
129 propagatedBuildInputs = [
130 self."funcsigs"
130 self."funcsigs"
131 ];
131 ];
132 src = fetchurl {
132 src = fetchurl {
133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
135 };
135 };
136 meta = {
136 meta = {
137 license = [ pkgs.lib.licenses.bsdOriginal ];
137 license = [ pkgs.lib.licenses.bsdOriginal ];
138 };
138 };
139 };
139 };
140 "beautifulsoup4" = super.buildPythonPackage {
140 "beautifulsoup4" = super.buildPythonPackage {
141 name = "beautifulsoup4-4.6.3";
141 name = "beautifulsoup4-4.6.3";
142 doCheck = false;
142 doCheck = false;
143 src = fetchurl {
143 src = fetchurl {
144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
146 };
146 };
147 meta = {
147 meta = {
148 license = [ pkgs.lib.licenses.mit ];
148 license = [ pkgs.lib.licenses.mit ];
149 };
149 };
150 };
150 };
151 "billiard" = super.buildPythonPackage {
151 "billiard" = super.buildPythonPackage {
152 name = "billiard-3.6.1.0";
152 name = "billiard-3.6.1.0";
153 doCheck = false;
153 doCheck = false;
154 src = fetchurl {
154 src = fetchurl {
155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
157 };
157 };
158 meta = {
158 meta = {
159 license = [ pkgs.lib.licenses.bsdOriginal ];
159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 };
160 };
161 };
161 };
162 "bleach" = super.buildPythonPackage {
162 "bleach" = super.buildPythonPackage {
163 name = "bleach-3.1.3";
163 name = "bleach-3.1.3";
164 doCheck = false;
164 doCheck = false;
165 propagatedBuildInputs = [
165 propagatedBuildInputs = [
166 self."six"
166 self."six"
167 self."webencodings"
167 self."webencodings"
168 ];
168 ];
169 src = fetchurl {
169 src = fetchurl {
170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
172 };
172 };
173 meta = {
173 meta = {
174 license = [ pkgs.lib.licenses.asl20 ];
174 license = [ pkgs.lib.licenses.asl20 ];
175 };
175 };
176 };
176 };
177 "bumpversion" = super.buildPythonPackage {
177 "bumpversion" = super.buildPythonPackage {
178 name = "bumpversion-0.5.3";
178 name = "bumpversion-0.5.3";
179 doCheck = false;
179 doCheck = false;
180 src = fetchurl {
180 src = fetchurl {
181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
183 };
183 };
184 meta = {
184 meta = {
185 license = [ pkgs.lib.licenses.mit ];
185 license = [ pkgs.lib.licenses.mit ];
186 };
186 };
187 };
187 };
188 "cachetools" = super.buildPythonPackage {
188 "cachetools" = super.buildPythonPackage {
189 name = "cachetools-3.1.1";
189 name = "cachetools-3.1.1";
190 doCheck = false;
190 doCheck = false;
191 src = fetchurl {
191 src = fetchurl {
192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
194 };
194 };
195 meta = {
195 meta = {
196 license = [ pkgs.lib.licenses.mit ];
196 license = [ pkgs.lib.licenses.mit ];
197 };
197 };
198 };
198 };
199 "celery" = super.buildPythonPackage {
199 "celery" = super.buildPythonPackage {
200 name = "celery-4.3.0";
200 name = "celery-4.3.0";
201 doCheck = false;
201 doCheck = false;
202 propagatedBuildInputs = [
202 propagatedBuildInputs = [
203 self."pytz"
203 self."pytz"
204 self."billiard"
204 self."billiard"
205 self."kombu"
205 self."kombu"
206 self."vine"
206 self."vine"
207 ];
207 ];
208 src = fetchurl {
208 src = fetchurl {
209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
211 };
211 };
212 meta = {
212 meta = {
213 license = [ pkgs.lib.licenses.bsdOriginal ];
213 license = [ pkgs.lib.licenses.bsdOriginal ];
214 };
214 };
215 };
215 };
216 "certifi" = super.buildPythonPackage {
216 "certifi" = super.buildPythonPackage {
217 name = "certifi-2020.4.5.1";
217 name = "certifi-2020.4.5.1";
218 doCheck = false;
218 doCheck = false;
219 src = fetchurl {
219 src = fetchurl {
220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
222 };
222 };
223 meta = {
223 meta = {
224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
225 };
225 };
226 };
226 };
227 "cffi" = super.buildPythonPackage {
227 "cffi" = super.buildPythonPackage {
228 name = "cffi-1.12.3";
228 name = "cffi-1.12.3";
229 doCheck = false;
229 doCheck = false;
230 propagatedBuildInputs = [
230 propagatedBuildInputs = [
231 self."pycparser"
231 self."pycparser"
232 ];
232 ];
233 src = fetchurl {
233 src = fetchurl {
234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
236 };
236 };
237 meta = {
237 meta = {
238 license = [ pkgs.lib.licenses.mit ];
238 license = [ pkgs.lib.licenses.mit ];
239 };
239 };
240 };
240 };
241 "chameleon" = super.buildPythonPackage {
241 "chameleon" = super.buildPythonPackage {
242 name = "chameleon-2.24";
242 name = "chameleon-2.24";
243 doCheck = false;
243 doCheck = false;
244 src = fetchurl {
244 src = fetchurl {
245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
247 };
247 };
248 meta = {
248 meta = {
249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
250 };
250 };
251 };
251 };
252 "channelstream" = super.buildPythonPackage {
252 "channelstream" = super.buildPythonPackage {
253 name = "channelstream-0.6.14";
253 name = "channelstream-0.6.14";
254 doCheck = false;
254 doCheck = false;
255 propagatedBuildInputs = [
255 propagatedBuildInputs = [
256 self."gevent"
256 self."gevent"
257 self."ws4py"
257 self."ws4py"
258 self."marshmallow"
258 self."marshmallow"
259 self."python-dateutil"
259 self."python-dateutil"
260 self."pyramid"
260 self."pyramid"
261 self."pyramid-jinja2"
261 self."pyramid-jinja2"
262 self."pyramid-apispec"
262 self."pyramid-apispec"
263 self."itsdangerous"
263 self."itsdangerous"
264 self."requests"
264 self."requests"
265 self."six"
265 self."six"
266 ];
266 ];
267 src = fetchurl {
267 src = fetchurl {
268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
270 };
270 };
271 meta = {
271 meta = {
272 license = [ pkgs.lib.licenses.bsdOriginal ];
272 license = [ pkgs.lib.licenses.bsdOriginal ];
273 };
273 };
274 };
274 };
275 "chardet" = super.buildPythonPackage {
275 "chardet" = super.buildPythonPackage {
276 name = "chardet-3.0.4";
276 name = "chardet-3.0.4";
277 doCheck = false;
277 doCheck = false;
278 src = fetchurl {
278 src = fetchurl {
279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
281 };
281 };
282 meta = {
282 meta = {
283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
284 };
284 };
285 };
285 };
286 "click" = super.buildPythonPackage {
286 "click" = super.buildPythonPackage {
287 name = "click-7.0";
287 name = "click-7.0";
288 doCheck = false;
288 doCheck = false;
289 src = fetchurl {
289 src = fetchurl {
290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
292 };
292 };
293 meta = {
293 meta = {
294 license = [ pkgs.lib.licenses.bsdOriginal ];
294 license = [ pkgs.lib.licenses.bsdOriginal ];
295 };
295 };
296 };
296 };
297 "colander" = super.buildPythonPackage {
297 "colander" = super.buildPythonPackage {
298 name = "colander-1.7.0";
298 name = "colander-1.7.0";
299 doCheck = false;
299 doCheck = false;
300 propagatedBuildInputs = [
300 propagatedBuildInputs = [
301 self."translationstring"
301 self."translationstring"
302 self."iso8601"
302 self."iso8601"
303 self."enum34"
303 self."enum34"
304 ];
304 ];
305 src = fetchurl {
305 src = fetchurl {
306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
308 };
308 };
309 meta = {
309 meta = {
310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
311 };
311 };
312 };
312 };
313 "configobj" = super.buildPythonPackage {
313 "configobj" = super.buildPythonPackage {
314 name = "configobj-5.0.6";
314 name = "configobj-5.0.6";
315 doCheck = false;
315 doCheck = false;
316 propagatedBuildInputs = [
316 propagatedBuildInputs = [
317 self."six"
317 self."six"
318 ];
318 ];
319 src = fetchurl {
319 src = fetchurl {
320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
322 };
322 };
323 meta = {
323 meta = {
324 license = [ pkgs.lib.licenses.bsdOriginal ];
324 license = [ pkgs.lib.licenses.bsdOriginal ];
325 };
325 };
326 };
326 };
327 "configparser" = super.buildPythonPackage {
327 "configparser" = super.buildPythonPackage {
328 name = "configparser-4.0.2";
328 name = "configparser-4.0.2";
329 doCheck = false;
329 doCheck = false;
330 src = fetchurl {
330 src = fetchurl {
331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
333 };
333 };
334 meta = {
334 meta = {
335 license = [ pkgs.lib.licenses.mit ];
335 license = [ pkgs.lib.licenses.mit ];
336 };
336 };
337 };
337 };
338 "contextlib2" = super.buildPythonPackage {
338 "contextlib2" = super.buildPythonPackage {
339 name = "contextlib2-0.6.0.post1";
339 name = "contextlib2-0.6.0.post1";
340 doCheck = false;
340 doCheck = false;
341 src = fetchurl {
341 src = fetchurl {
342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
344 };
344 };
345 meta = {
345 meta = {
346 license = [ pkgs.lib.licenses.psfl ];
346 license = [ pkgs.lib.licenses.psfl ];
347 };
347 };
348 };
348 };
349 "cov-core" = super.buildPythonPackage {
349 "cov-core" = super.buildPythonPackage {
350 name = "cov-core-1.15.0";
350 name = "cov-core-1.15.0";
351 doCheck = false;
351 doCheck = false;
352 propagatedBuildInputs = [
352 propagatedBuildInputs = [
353 self."coverage"
353 self."coverage"
354 ];
354 ];
355 src = fetchurl {
355 src = fetchurl {
356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
358 };
358 };
359 meta = {
359 meta = {
360 license = [ pkgs.lib.licenses.mit ];
360 license = [ pkgs.lib.licenses.mit ];
361 };
361 };
362 };
362 };
363 "coverage" = super.buildPythonPackage {
363 "coverage" = super.buildPythonPackage {
364 name = "coverage-4.5.4";
364 name = "coverage-4.5.4";
365 doCheck = false;
365 doCheck = false;
366 src = fetchurl {
366 src = fetchurl {
367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
369 };
369 };
370 meta = {
370 meta = {
371 license = [ pkgs.lib.licenses.asl20 ];
371 license = [ pkgs.lib.licenses.asl20 ];
372 };
372 };
373 };
373 };
374 "cryptography" = super.buildPythonPackage {
374 "cryptography" = super.buildPythonPackage {
375 name = "cryptography-2.6.1";
375 name = "cryptography-2.6.1";
376 doCheck = false;
376 doCheck = false;
377 propagatedBuildInputs = [
377 propagatedBuildInputs = [
378 self."asn1crypto"
378 self."asn1crypto"
379 self."six"
379 self."six"
380 self."cffi"
380 self."cffi"
381 self."enum34"
381 self."enum34"
382 self."ipaddress"
382 self."ipaddress"
383 ];
383 ];
384 src = fetchurl {
384 src = fetchurl {
385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
387 };
387 };
388 meta = {
388 meta = {
389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
390 };
390 };
391 };
391 };
392 "cssselect" = super.buildPythonPackage {
392 "cssselect" = super.buildPythonPackage {
393 name = "cssselect-1.0.3";
393 name = "cssselect-1.0.3";
394 doCheck = false;
394 doCheck = false;
395 src = fetchurl {
395 src = fetchurl {
396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
398 };
398 };
399 meta = {
399 meta = {
400 license = [ pkgs.lib.licenses.bsdOriginal ];
400 license = [ pkgs.lib.licenses.bsdOriginal ];
401 };
401 };
402 };
402 };
403 "cssutils" = super.buildPythonPackage {
403 "cssutils" = super.buildPythonPackage {
404 name = "cssutils-1.0.2";
404 name = "cssutils-1.0.2";
405 doCheck = false;
405 doCheck = false;
406 src = fetchurl {
406 src = fetchurl {
407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
409 };
409 };
410 meta = {
410 meta = {
411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
412 };
412 };
413 };
413 };
414 "decorator" = super.buildPythonPackage {
414 "decorator" = super.buildPythonPackage {
415 name = "decorator-4.1.2";
415 name = "decorator-4.1.2";
416 doCheck = false;
416 doCheck = false;
417 src = fetchurl {
417 src = fetchurl {
418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
420 };
420 };
421 meta = {
421 meta = {
422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
423 };
423 };
424 };
424 };
425 "deform" = super.buildPythonPackage {
425 "deform" = super.buildPythonPackage {
426 name = "deform-2.0.8";
426 name = "deform-2.0.8";
427 doCheck = false;
427 doCheck = false;
428 propagatedBuildInputs = [
428 propagatedBuildInputs = [
429 self."chameleon"
429 self."chameleon"
430 self."colander"
430 self."colander"
431 self."iso8601"
431 self."iso8601"
432 self."peppercorn"
432 self."peppercorn"
433 self."translationstring"
433 self."translationstring"
434 self."zope.deprecation"
434 self."zope.deprecation"
435 ];
435 ];
436 src = fetchurl {
436 src = fetchurl {
437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
439 };
439 };
440 meta = {
440 meta = {
441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
442 };
442 };
443 };
443 };
444 "defusedxml" = super.buildPythonPackage {
444 "defusedxml" = super.buildPythonPackage {
445 name = "defusedxml-0.6.0";
445 name = "defusedxml-0.6.0";
446 doCheck = false;
446 doCheck = false;
447 src = fetchurl {
447 src = fetchurl {
448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
450 };
450 };
451 meta = {
451 meta = {
452 license = [ pkgs.lib.licenses.psfl ];
452 license = [ pkgs.lib.licenses.psfl ];
453 };
453 };
454 };
454 };
455 "dm.xmlsec.binding" = super.buildPythonPackage {
455 "dm.xmlsec.binding" = super.buildPythonPackage {
456 name = "dm.xmlsec.binding-1.3.7";
456 name = "dm.xmlsec.binding-1.3.7";
457 doCheck = false;
457 doCheck = false;
458 propagatedBuildInputs = [
458 propagatedBuildInputs = [
459 self."setuptools"
459 self."setuptools"
460 self."lxml"
460 self."lxml"
461 ];
461 ];
462 src = fetchurl {
462 src = fetchurl {
463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
465 };
465 };
466 meta = {
466 meta = {
467 license = [ pkgs.lib.licenses.bsdOriginal ];
467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 };
468 };
469 };
469 };
470 "docutils" = super.buildPythonPackage {
470 "docutils" = super.buildPythonPackage {
471 name = "docutils-0.16";
471 name = "docutils-0.16";
472 doCheck = false;
472 doCheck = false;
473 src = fetchurl {
473 src = fetchurl {
474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
476 };
476 };
477 meta = {
477 meta = {
478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
479 };
479 };
480 };
480 };
481 "dogpile.cache" = super.buildPythonPackage {
481 "dogpile.cache" = super.buildPythonPackage {
482 name = "dogpile.cache-0.9.0";
482 name = "dogpile.cache-0.9.0";
483 doCheck = false;
483 doCheck = false;
484 propagatedBuildInputs = [
484 propagatedBuildInputs = [
485 self."decorator"
485 self."decorator"
486 ];
486 ];
487 src = fetchurl {
487 src = fetchurl {
488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
490 };
490 };
491 meta = {
491 meta = {
492 license = [ pkgs.lib.licenses.bsdOriginal ];
492 license = [ pkgs.lib.licenses.bsdOriginal ];
493 };
493 };
494 };
494 };
495 "dogpile.core" = super.buildPythonPackage {
495 "dogpile.core" = super.buildPythonPackage {
496 name = "dogpile.core-0.4.1";
496 name = "dogpile.core-0.4.1";
497 doCheck = false;
497 doCheck = false;
498 src = fetchurl {
498 src = fetchurl {
499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
501 };
501 };
502 meta = {
502 meta = {
503 license = [ pkgs.lib.licenses.bsdOriginal ];
503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 };
504 };
505 };
505 };
506 "ecdsa" = super.buildPythonPackage {
506 "ecdsa" = super.buildPythonPackage {
507 name = "ecdsa-0.13.2";
507 name = "ecdsa-0.13.2";
508 doCheck = false;
508 doCheck = false;
509 src = fetchurl {
509 src = fetchurl {
510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
512 };
512 };
513 meta = {
513 meta = {
514 license = [ pkgs.lib.licenses.mit ];
514 license = [ pkgs.lib.licenses.mit ];
515 };
515 };
516 };
516 };
517 "elasticsearch" = super.buildPythonPackage {
517 "elasticsearch" = super.buildPythonPackage {
518 name = "elasticsearch-6.3.1";
518 name = "elasticsearch-6.3.1";
519 doCheck = false;
519 doCheck = false;
520 propagatedBuildInputs = [
520 propagatedBuildInputs = [
521 self."urllib3"
521 self."urllib3"
522 ];
522 ];
523 src = fetchurl {
523 src = fetchurl {
524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
526 };
526 };
527 meta = {
527 meta = {
528 license = [ pkgs.lib.licenses.asl20 ];
528 license = [ pkgs.lib.licenses.asl20 ];
529 };
529 };
530 };
530 };
531 "elasticsearch-dsl" = super.buildPythonPackage {
531 "elasticsearch-dsl" = super.buildPythonPackage {
532 name = "elasticsearch-dsl-6.3.1";
532 name = "elasticsearch-dsl-6.3.1";
533 doCheck = false;
533 doCheck = false;
534 propagatedBuildInputs = [
534 propagatedBuildInputs = [
535 self."six"
535 self."six"
536 self."python-dateutil"
536 self."python-dateutil"
537 self."elasticsearch"
537 self."elasticsearch"
538 self."ipaddress"
538 self."ipaddress"
539 ];
539 ];
540 src = fetchurl {
540 src = fetchurl {
541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
543 };
543 };
544 meta = {
544 meta = {
545 license = [ pkgs.lib.licenses.asl20 ];
545 license = [ pkgs.lib.licenses.asl20 ];
546 };
546 };
547 };
547 };
548 "elasticsearch1" = super.buildPythonPackage {
548 "elasticsearch1" = super.buildPythonPackage {
549 name = "elasticsearch1-1.10.0";
549 name = "elasticsearch1-1.10.0";
550 doCheck = false;
550 doCheck = false;
551 propagatedBuildInputs = [
551 propagatedBuildInputs = [
552 self."urllib3"
552 self."urllib3"
553 ];
553 ];
554 src = fetchurl {
554 src = fetchurl {
555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
557 };
557 };
558 meta = {
558 meta = {
559 license = [ pkgs.lib.licenses.asl20 ];
559 license = [ pkgs.lib.licenses.asl20 ];
560 };
560 };
561 };
561 };
562 "elasticsearch1-dsl" = super.buildPythonPackage {
562 "elasticsearch1-dsl" = super.buildPythonPackage {
563 name = "elasticsearch1-dsl-0.0.12";
563 name = "elasticsearch1-dsl-0.0.12";
564 doCheck = false;
564 doCheck = false;
565 propagatedBuildInputs = [
565 propagatedBuildInputs = [
566 self."six"
566 self."six"
567 self."python-dateutil"
567 self."python-dateutil"
568 self."elasticsearch1"
568 self."elasticsearch1"
569 ];
569 ];
570 src = fetchurl {
570 src = fetchurl {
571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
573 };
573 };
574 meta = {
574 meta = {
575 license = [ pkgs.lib.licenses.asl20 ];
575 license = [ pkgs.lib.licenses.asl20 ];
576 };
576 };
577 };
577 };
578 "elasticsearch2" = super.buildPythonPackage {
578 "elasticsearch2" = super.buildPythonPackage {
579 name = "elasticsearch2-2.5.1";
579 name = "elasticsearch2-2.5.1";
580 doCheck = false;
580 doCheck = false;
581 propagatedBuildInputs = [
581 propagatedBuildInputs = [
582 self."urllib3"
582 self."urllib3"
583 ];
583 ];
584 src = fetchurl {
584 src = fetchurl {
585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
587 };
587 };
588 meta = {
588 meta = {
589 license = [ pkgs.lib.licenses.asl20 ];
589 license = [ pkgs.lib.licenses.asl20 ];
590 };
590 };
591 };
591 };
592 "entrypoints" = super.buildPythonPackage {
592 "entrypoints" = super.buildPythonPackage {
593 name = "entrypoints-0.2.2";
593 name = "entrypoints-0.2.2";
594 doCheck = false;
594 doCheck = false;
595 propagatedBuildInputs = [
595 propagatedBuildInputs = [
596 self."configparser"
596 self."configparser"
597 ];
597 ];
598 src = fetchurl {
598 src = fetchurl {
599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
601 };
601 };
602 meta = {
602 meta = {
603 license = [ pkgs.lib.licenses.mit ];
603 license = [ pkgs.lib.licenses.mit ];
604 };
604 };
605 };
605 };
606 "enum34" = super.buildPythonPackage {
606 "enum34" = super.buildPythonPackage {
607 name = "enum34-1.1.10";
607 name = "enum34-1.1.10";
608 doCheck = false;
608 doCheck = false;
609 src = fetchurl {
609 src = fetchurl {
610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
612 };
612 };
613 meta = {
613 meta = {
614 license = [ pkgs.lib.licenses.bsdOriginal ];
614 license = [ pkgs.lib.licenses.bsdOriginal ];
615 };
615 };
616 };
616 };
617 "formencode" = super.buildPythonPackage {
617 "formencode" = super.buildPythonPackage {
618 name = "formencode-1.2.4";
618 name = "formencode-1.2.4";
619 doCheck = false;
619 doCheck = false;
620 src = fetchurl {
620 src = fetchurl {
621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
623 };
623 };
624 meta = {
624 meta = {
625 license = [ pkgs.lib.licenses.psfl ];
625 license = [ pkgs.lib.licenses.psfl ];
626 };
626 };
627 };
627 };
628 "funcsigs" = super.buildPythonPackage {
628 "funcsigs" = super.buildPythonPackage {
629 name = "funcsigs-1.0.2";
629 name = "funcsigs-1.0.2";
630 doCheck = false;
630 doCheck = false;
631 src = fetchurl {
631 src = fetchurl {
632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
634 };
634 };
635 meta = {
635 meta = {
636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
637 };
637 };
638 };
638 };
639 "functools32" = super.buildPythonPackage {
639 "functools32" = super.buildPythonPackage {
640 name = "functools32-3.2.3.post2";
640 name = "functools32-3.2.3.post2";
641 doCheck = false;
641 doCheck = false;
642 src = fetchurl {
642 src = fetchurl {
643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
645 };
645 };
646 meta = {
646 meta = {
647 license = [ pkgs.lib.licenses.psfl ];
647 license = [ pkgs.lib.licenses.psfl ];
648 };
648 };
649 };
649 };
650 "future" = super.buildPythonPackage {
650 "future" = super.buildPythonPackage {
651 name = "future-0.14.3";
651 name = "future-0.14.3";
652 doCheck = false;
652 doCheck = false;
653 src = fetchurl {
653 src = fetchurl {
654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
656 };
656 };
657 meta = {
657 meta = {
658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
659 };
659 };
660 };
660 };
661 "futures" = super.buildPythonPackage {
661 "futures" = super.buildPythonPackage {
662 name = "futures-3.0.2";
662 name = "futures-3.0.2";
663 doCheck = false;
663 doCheck = false;
664 src = fetchurl {
664 src = fetchurl {
665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
667 };
667 };
668 meta = {
668 meta = {
669 license = [ pkgs.lib.licenses.bsdOriginal ];
669 license = [ pkgs.lib.licenses.bsdOriginal ];
670 };
670 };
671 };
671 };
672 "gevent" = super.buildPythonPackage {
672 "gevent" = super.buildPythonPackage {
673 name = "gevent-1.5.0";
673 name = "gevent-1.5.0";
674 doCheck = false;
674 doCheck = false;
675 propagatedBuildInputs = [
675 propagatedBuildInputs = [
676 self."greenlet"
676 self."greenlet"
677 ];
677 ];
678 src = fetchurl {
678 src = fetchurl {
679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
681 };
681 };
682 meta = {
682 meta = {
683 license = [ pkgs.lib.licenses.mit ];
683 license = [ pkgs.lib.licenses.mit ];
684 };
684 };
685 };
685 };
686 "gnureadline" = super.buildPythonPackage {
686 "gnureadline" = super.buildPythonPackage {
687 name = "gnureadline-6.3.8";
687 name = "gnureadline-6.3.8";
688 doCheck = false;
688 doCheck = false;
689 src = fetchurl {
689 src = fetchurl {
690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
692 };
692 };
693 meta = {
693 meta = {
694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
695 };
695 };
696 };
696 };
697 "gprof2dot" = super.buildPythonPackage {
697 "gprof2dot" = super.buildPythonPackage {
698 name = "gprof2dot-2017.9.19";
698 name = "gprof2dot-2017.9.19";
699 doCheck = false;
699 doCheck = false;
700 src = fetchurl {
700 src = fetchurl {
701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
703 };
703 };
704 meta = {
704 meta = {
705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
706 };
706 };
707 };
707 };
708 "greenlet" = super.buildPythonPackage {
708 "greenlet" = super.buildPythonPackage {
709 name = "greenlet-0.4.15";
709 name = "greenlet-0.4.15";
710 doCheck = false;
710 doCheck = false;
711 src = fetchurl {
711 src = fetchurl {
712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
714 };
714 };
715 meta = {
715 meta = {
716 license = [ pkgs.lib.licenses.mit ];
716 license = [ pkgs.lib.licenses.mit ];
717 };
717 };
718 };
718 };
719 "gunicorn" = super.buildPythonPackage {
719 "gunicorn" = super.buildPythonPackage {
720 name = "gunicorn-19.9.0";
720 name = "gunicorn-19.9.0";
721 doCheck = false;
721 doCheck = false;
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
728 };
728 };
729 };
729 };
730 "hupper" = super.buildPythonPackage {
730 "hupper" = super.buildPythonPackage {
731 name = "hupper-1.10.2";
731 name = "hupper-1.10.2";
732 doCheck = false;
732 doCheck = false;
733 src = fetchurl {
733 src = fetchurl {
734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
736 };
736 };
737 meta = {
737 meta = {
738 license = [ pkgs.lib.licenses.mit ];
738 license = [ pkgs.lib.licenses.mit ];
739 };
739 };
740 };
740 };
741 "idna" = super.buildPythonPackage {
741 "idna" = super.buildPythonPackage {
742 name = "idna-2.8";
742 name = "idna-2.8";
743 doCheck = false;
743 doCheck = false;
744 src = fetchurl {
744 src = fetchurl {
745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
747 };
747 };
748 meta = {
748 meta = {
749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
750 };
750 };
751 };
751 };
752 "importlib-metadata" = super.buildPythonPackage {
752 "importlib-metadata" = super.buildPythonPackage {
753 name = "importlib-metadata-1.6.0";
753 name = "importlib-metadata-1.6.0";
754 doCheck = false;
754 doCheck = false;
755 propagatedBuildInputs = [
755 propagatedBuildInputs = [
756 self."zipp"
756 self."zipp"
757 self."pathlib2"
757 self."pathlib2"
758 self."contextlib2"
758 self."contextlib2"
759 self."configparser"
759 self."configparser"
760 ];
760 ];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.asl20 ];
766 license = [ pkgs.lib.licenses.asl20 ];
767 };
767 };
768 };
768 };
769 "infrae.cache" = super.buildPythonPackage {
769 "infrae.cache" = super.buildPythonPackage {
770 name = "infrae.cache-1.0.1";
770 name = "infrae.cache-1.0.1";
771 doCheck = false;
771 doCheck = false;
772 propagatedBuildInputs = [
772 propagatedBuildInputs = [
773 self."beaker"
773 self."beaker"
774 self."repoze.lru"
774 self."repoze.lru"
775 ];
775 ];
776 src = fetchurl {
776 src = fetchurl {
777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
779 };
779 };
780 meta = {
780 meta = {
781 license = [ pkgs.lib.licenses.zpl21 ];
781 license = [ pkgs.lib.licenses.zpl21 ];
782 };
782 };
783 };
783 };
784 "invoke" = super.buildPythonPackage {
784 "invoke" = super.buildPythonPackage {
785 name = "invoke-0.13.0";
785 name = "invoke-0.13.0";
786 doCheck = false;
786 doCheck = false;
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ pkgs.lib.licenses.bsdOriginal ];
792 license = [ pkgs.lib.licenses.bsdOriginal ];
793 };
793 };
794 };
794 };
795 "ipaddress" = super.buildPythonPackage {
795 "ipaddress" = super.buildPythonPackage {
796 name = "ipaddress-1.0.23";
796 name = "ipaddress-1.0.23";
797 doCheck = false;
797 doCheck = false;
798 src = fetchurl {
798 src = fetchurl {
799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
801 };
801 };
802 meta = {
802 meta = {
803 license = [ pkgs.lib.licenses.psfl ];
803 license = [ pkgs.lib.licenses.psfl ];
804 };
804 };
805 };
805 };
806 "ipdb" = super.buildPythonPackage {
806 "ipdb" = super.buildPythonPackage {
807 name = "ipdb-0.13.2";
807 name = "ipdb-0.13.2";
808 doCheck = false;
808 doCheck = false;
809 propagatedBuildInputs = [
809 propagatedBuildInputs = [
810 self."setuptools"
810 self."setuptools"
811 self."ipython"
811 self."ipython"
812 ];
812 ];
813 src = fetchurl {
813 src = fetchurl {
814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
816 };
816 };
817 meta = {
817 meta = {
818 license = [ pkgs.lib.licenses.bsdOriginal ];
818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 };
819 };
820 };
820 };
821 "ipython" = super.buildPythonPackage {
821 "ipython" = super.buildPythonPackage {
822 name = "ipython-5.1.0";
822 name = "ipython-5.1.0";
823 doCheck = false;
823 doCheck = false;
824 propagatedBuildInputs = [
824 propagatedBuildInputs = [
825 self."setuptools"
825 self."setuptools"
826 self."decorator"
826 self."decorator"
827 self."pickleshare"
827 self."pickleshare"
828 self."simplegeneric"
828 self."simplegeneric"
829 self."traitlets"
829 self."traitlets"
830 self."prompt-toolkit"
830 self."prompt-toolkit"
831 self."pygments"
831 self."pygments"
832 self."pexpect"
832 self."pexpect"
833 self."backports.shutil-get-terminal-size"
833 self."backports.shutil-get-terminal-size"
834 self."pathlib2"
834 self."pathlib2"
835 self."pexpect"
835 self."pexpect"
836 ];
836 ];
837 src = fetchurl {
837 src = fetchurl {
838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
840 };
840 };
841 meta = {
841 meta = {
842 license = [ pkgs.lib.licenses.bsdOriginal ];
842 license = [ pkgs.lib.licenses.bsdOriginal ];
843 };
843 };
844 };
844 };
845 "ipython-genutils" = super.buildPythonPackage {
845 "ipython-genutils" = super.buildPythonPackage {
846 name = "ipython-genutils-0.2.0";
846 name = "ipython-genutils-0.2.0";
847 doCheck = false;
847 doCheck = false;
848 src = fetchurl {
848 src = fetchurl {
849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
851 };
851 };
852 meta = {
852 meta = {
853 license = [ pkgs.lib.licenses.bsdOriginal ];
853 license = [ pkgs.lib.licenses.bsdOriginal ];
854 };
854 };
855 };
855 };
856 "iso8601" = super.buildPythonPackage {
856 "iso8601" = super.buildPythonPackage {
857 name = "iso8601-0.1.12";
857 name = "iso8601-0.1.12";
858 doCheck = false;
858 doCheck = false;
859 src = fetchurl {
859 src = fetchurl {
860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
862 };
862 };
863 meta = {
863 meta = {
864 license = [ pkgs.lib.licenses.mit ];
864 license = [ pkgs.lib.licenses.mit ];
865 };
865 };
866 };
866 };
867 "isodate" = super.buildPythonPackage {
867 "isodate" = super.buildPythonPackage {
868 name = "isodate-0.6.0";
868 name = "isodate-0.6.0";
869 doCheck = false;
869 doCheck = false;
870 propagatedBuildInputs = [
870 propagatedBuildInputs = [
871 self."six"
871 self."six"
872 ];
872 ];
873 src = fetchurl {
873 src = fetchurl {
874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
876 };
876 };
877 meta = {
877 meta = {
878 license = [ pkgs.lib.licenses.bsdOriginal ];
878 license = [ pkgs.lib.licenses.bsdOriginal ];
879 };
879 };
880 };
880 };
881 "itsdangerous" = super.buildPythonPackage {
881 "itsdangerous" = super.buildPythonPackage {
882 name = "itsdangerous-1.1.0";
882 name = "itsdangerous-1.1.0";
883 doCheck = false;
883 doCheck = false;
884 src = fetchurl {
884 src = fetchurl {
885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
887 };
887 };
888 meta = {
888 meta = {
889 license = [ pkgs.lib.licenses.bsdOriginal ];
889 license = [ pkgs.lib.licenses.bsdOriginal ];
890 };
890 };
891 };
891 };
892 "jinja2" = super.buildPythonPackage {
892 "jinja2" = super.buildPythonPackage {
893 name = "jinja2-2.9.6";
893 name = "jinja2-2.9.6";
894 doCheck = false;
894 doCheck = false;
895 propagatedBuildInputs = [
895 propagatedBuildInputs = [
896 self."markupsafe"
896 self."markupsafe"
897 ];
897 ];
898 src = fetchurl {
898 src = fetchurl {
899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
901 };
901 };
902 meta = {
902 meta = {
903 license = [ pkgs.lib.licenses.bsdOriginal ];
903 license = [ pkgs.lib.licenses.bsdOriginal ];
904 };
904 };
905 };
905 };
906 "jsonschema" = super.buildPythonPackage {
906 "jsonschema" = super.buildPythonPackage {
907 name = "jsonschema-2.6.0";
907 name = "jsonschema-2.6.0";
908 doCheck = false;
908 doCheck = false;
909 propagatedBuildInputs = [
909 propagatedBuildInputs = [
910 self."functools32"
910 self."functools32"
911 ];
911 ];
912 src = fetchurl {
912 src = fetchurl {
913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
915 };
915 };
916 meta = {
916 meta = {
917 license = [ pkgs.lib.licenses.mit ];
917 license = [ pkgs.lib.licenses.mit ];
918 };
918 };
919 };
919 };
920 "jupyter-client" = super.buildPythonPackage {
920 "jupyter-client" = super.buildPythonPackage {
921 name = "jupyter-client-5.0.0";
921 name = "jupyter-client-5.0.0";
922 doCheck = false;
922 doCheck = false;
923 propagatedBuildInputs = [
923 propagatedBuildInputs = [
924 self."traitlets"
924 self."traitlets"
925 self."jupyter-core"
925 self."jupyter-core"
926 self."pyzmq"
926 self."pyzmq"
927 self."python-dateutil"
927 self."python-dateutil"
928 ];
928 ];
929 src = fetchurl {
929 src = fetchurl {
930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
932 };
932 };
933 meta = {
933 meta = {
934 license = [ pkgs.lib.licenses.bsdOriginal ];
934 license = [ pkgs.lib.licenses.bsdOriginal ];
935 };
935 };
936 };
936 };
937 "jupyter-core" = super.buildPythonPackage {
937 "jupyter-core" = super.buildPythonPackage {
938 name = "jupyter-core-4.5.0";
938 name = "jupyter-core-4.5.0";
939 doCheck = false;
939 doCheck = false;
940 propagatedBuildInputs = [
940 propagatedBuildInputs = [
941 self."traitlets"
941 self."traitlets"
942 ];
942 ];
943 src = fetchurl {
943 src = fetchurl {
944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
946 };
946 };
947 meta = {
947 meta = {
948 license = [ pkgs.lib.licenses.bsdOriginal ];
948 license = [ pkgs.lib.licenses.bsdOriginal ];
949 };
949 };
950 };
950 };
951 "kombu" = super.buildPythonPackage {
951 "kombu" = super.buildPythonPackage {
952 name = "kombu-4.6.6";
952 name = "kombu-4.6.6";
953 doCheck = false;
953 doCheck = false;
954 propagatedBuildInputs = [
954 propagatedBuildInputs = [
955 self."amqp"
955 self."amqp"
956 self."importlib-metadata"
956 self."importlib-metadata"
957 ];
957 ];
958 src = fetchurl {
958 src = fetchurl {
959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
961 };
961 };
962 meta = {
962 meta = {
963 license = [ pkgs.lib.licenses.bsdOriginal ];
963 license = [ pkgs.lib.licenses.bsdOriginal ];
964 };
964 };
965 };
965 };
966 "lxml" = super.buildPythonPackage {
966 "lxml" = super.buildPythonPackage {
967 name = "lxml-4.2.5";
967 name = "lxml-4.2.5";
968 doCheck = false;
968 doCheck = false;
969 src = fetchurl {
969 src = fetchurl {
970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
972 };
972 };
973 meta = {
973 meta = {
974 license = [ pkgs.lib.licenses.bsdOriginal ];
974 license = [ pkgs.lib.licenses.bsdOriginal ];
975 };
975 };
976 };
976 };
977 "mako" = super.buildPythonPackage {
977 "mako" = super.buildPythonPackage {
978 name = "mako-1.1.0";
978 name = "mako-1.1.0";
979 doCheck = false;
979 doCheck = false;
980 propagatedBuildInputs = [
980 propagatedBuildInputs = [
981 self."markupsafe"
981 self."markupsafe"
982 ];
982 ];
983 src = fetchurl {
983 src = fetchurl {
984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
986 };
986 };
987 meta = {
987 meta = {
988 license = [ pkgs.lib.licenses.mit ];
988 license = [ pkgs.lib.licenses.mit ];
989 };
989 };
990 };
990 };
991 "markdown" = super.buildPythonPackage {
991 "markdown" = super.buildPythonPackage {
992 name = "markdown-2.6.11";
992 name = "markdown-2.6.11";
993 doCheck = false;
993 doCheck = false;
994 src = fetchurl {
994 src = fetchurl {
995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
997 };
997 };
998 meta = {
998 meta = {
999 license = [ pkgs.lib.licenses.bsdOriginal ];
999 license = [ pkgs.lib.licenses.bsdOriginal ];
1000 };
1000 };
1001 };
1001 };
1002 "markupsafe" = super.buildPythonPackage {
1002 "markupsafe" = super.buildPythonPackage {
1003 name = "markupsafe-1.1.1";
1003 name = "markupsafe-1.1.1";
1004 doCheck = false;
1004 doCheck = false;
1005 src = fetchurl {
1005 src = fetchurl {
1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1008 };
1008 };
1009 meta = {
1009 meta = {
1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1011 };
1011 };
1012 };
1012 };
1013 "marshmallow" = super.buildPythonPackage {
1013 "marshmallow" = super.buildPythonPackage {
1014 name = "marshmallow-2.18.0";
1014 name = "marshmallow-2.18.0";
1015 doCheck = false;
1015 doCheck = false;
1016 src = fetchurl {
1016 src = fetchurl {
1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1019 };
1019 };
1020 meta = {
1020 meta = {
1021 license = [ pkgs.lib.licenses.mit ];
1021 license = [ pkgs.lib.licenses.mit ];
1022 };
1022 };
1023 };
1023 };
1024 "mistune" = super.buildPythonPackage {
1024 "mistune" = super.buildPythonPackage {
1025 name = "mistune-0.8.4";
1025 name = "mistune-0.8.4";
1026 doCheck = false;
1026 doCheck = false;
1027 src = fetchurl {
1027 src = fetchurl {
1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1030 };
1030 };
1031 meta = {
1031 meta = {
1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1033 };
1033 };
1034 };
1034 };
1035 "mock" = super.buildPythonPackage {
1035 "mock" = super.buildPythonPackage {
1036 name = "mock-3.0.5";
1036 name = "mock-3.0.5";
1037 doCheck = false;
1037 doCheck = false;
1038 propagatedBuildInputs = [
1038 propagatedBuildInputs = [
1039 self."six"
1039 self."six"
1040 self."funcsigs"
1040 self."funcsigs"
1041 ];
1041 ];
1042 src = fetchurl {
1042 src = fetchurl {
1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1045 };
1045 };
1046 meta = {
1046 meta = {
1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1048 };
1048 };
1049 };
1049 };
1050 "more-itertools" = super.buildPythonPackage {
1050 "more-itertools" = super.buildPythonPackage {
1051 name = "more-itertools-5.0.0";
1051 name = "more-itertools-5.0.0";
1052 doCheck = false;
1052 doCheck = false;
1053 propagatedBuildInputs = [
1053 propagatedBuildInputs = [
1054 self."six"
1054 self."six"
1055 ];
1055 ];
1056 src = fetchurl {
1056 src = fetchurl {
1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1059 };
1059 };
1060 meta = {
1060 meta = {
1061 license = [ pkgs.lib.licenses.mit ];
1061 license = [ pkgs.lib.licenses.mit ];
1062 };
1062 };
1063 };
1063 };
1064 "msgpack-python" = super.buildPythonPackage {
1064 "msgpack-python" = super.buildPythonPackage {
1065 name = "msgpack-python-0.5.6";
1065 name = "msgpack-python-0.5.6";
1066 doCheck = false;
1066 doCheck = false;
1067 src = fetchurl {
1067 src = fetchurl {
1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1070 };
1070 };
1071 meta = {
1071 meta = {
1072 license = [ pkgs.lib.licenses.asl20 ];
1072 license = [ pkgs.lib.licenses.asl20 ];
1073 };
1073 };
1074 };
1074 };
1075 "mysql-python" = super.buildPythonPackage {
1075 "mysql-python" = super.buildPythonPackage {
1076 name = "mysql-python-1.2.5";
1076 name = "mysql-python-1.2.5";
1077 doCheck = false;
1077 doCheck = false;
1078 src = fetchurl {
1078 src = fetchurl {
1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1081 };
1081 };
1082 meta = {
1082 meta = {
1083 license = [ pkgs.lib.licenses.gpl1 ];
1083 license = [ pkgs.lib.licenses.gpl1 ];
1084 };
1084 };
1085 };
1085 };
1086 "nbconvert" = super.buildPythonPackage {
1086 "nbconvert" = super.buildPythonPackage {
1087 name = "nbconvert-5.3.1";
1087 name = "nbconvert-5.3.1";
1088 doCheck = false;
1088 doCheck = false;
1089 propagatedBuildInputs = [
1089 propagatedBuildInputs = [
1090 self."mistune"
1090 self."mistune"
1091 self."jinja2"
1091 self."jinja2"
1092 self."pygments"
1092 self."pygments"
1093 self."traitlets"
1093 self."traitlets"
1094 self."jupyter-core"
1094 self."jupyter-core"
1095 self."nbformat"
1095 self."nbformat"
1096 self."entrypoints"
1096 self."entrypoints"
1097 self."bleach"
1097 self."bleach"
1098 self."pandocfilters"
1098 self."pandocfilters"
1099 self."testpath"
1099 self."testpath"
1100 ];
1100 ];
1101 src = fetchurl {
1101 src = fetchurl {
1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1104 };
1104 };
1105 meta = {
1105 meta = {
1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 };
1107 };
1108 };
1108 };
1109 "nbformat" = super.buildPythonPackage {
1109 "nbformat" = super.buildPythonPackage {
1110 name = "nbformat-4.4.0";
1110 name = "nbformat-4.4.0";
1111 doCheck = false;
1111 doCheck = false;
1112 propagatedBuildInputs = [
1112 propagatedBuildInputs = [
1113 self."ipython-genutils"
1113 self."ipython-genutils"
1114 self."traitlets"
1114 self."traitlets"
1115 self."jsonschema"
1115 self."jsonschema"
1116 self."jupyter-core"
1116 self."jupyter-core"
1117 ];
1117 ];
1118 src = fetchurl {
1118 src = fetchurl {
1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1121 };
1121 };
1122 meta = {
1122 meta = {
1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1124 };
1124 };
1125 };
1125 };
1126 "packaging" = super.buildPythonPackage {
1126 "packaging" = super.buildPythonPackage {
1127 name = "packaging-20.3";
1127 name = "packaging-20.3";
1128 doCheck = false;
1128 doCheck = false;
1129 propagatedBuildInputs = [
1129 propagatedBuildInputs = [
1130 self."pyparsing"
1130 self."pyparsing"
1131 self."six"
1131 self."six"
1132 ];
1132 ];
1133 src = fetchurl {
1133 src = fetchurl {
1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1136 };
1136 };
1137 meta = {
1137 meta = {
1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1139 };
1139 };
1140 };
1140 };
1141 "pandocfilters" = super.buildPythonPackage {
1141 "pandocfilters" = super.buildPythonPackage {
1142 name = "pandocfilters-1.4.2";
1142 name = "pandocfilters-1.4.2";
1143 doCheck = false;
1143 doCheck = false;
1144 src = fetchurl {
1144 src = fetchurl {
1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1147 };
1147 };
1148 meta = {
1148 meta = {
1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1150 };
1150 };
1151 };
1151 };
1152 "paste" = super.buildPythonPackage {
1152 "paste" = super.buildPythonPackage {
1153 name = "paste-3.4.0";
1153 name = "paste-3.4.0";
1154 doCheck = false;
1154 doCheck = false;
1155 propagatedBuildInputs = [
1155 propagatedBuildInputs = [
1156 self."six"
1156 self."six"
1157 ];
1157 ];
1158 src = fetchurl {
1158 src = fetchurl {
1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1161 };
1161 };
1162 meta = {
1162 meta = {
1163 license = [ pkgs.lib.licenses.mit ];
1163 license = [ pkgs.lib.licenses.mit ];
1164 };
1164 };
1165 };
1165 };
1166 "pastedeploy" = super.buildPythonPackage {
1166 "pastedeploy" = super.buildPythonPackage {
1167 name = "pastedeploy-2.1.0";
1167 name = "pastedeploy-2.1.0";
1168 doCheck = false;
1168 doCheck = false;
1169 src = fetchurl {
1169 src = fetchurl {
1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1172 };
1172 };
1173 meta = {
1173 meta = {
1174 license = [ pkgs.lib.licenses.mit ];
1174 license = [ pkgs.lib.licenses.mit ];
1175 };
1175 };
1176 };
1176 };
1177 "pastescript" = super.buildPythonPackage {
1177 "pastescript" = super.buildPythonPackage {
1178 name = "pastescript-3.2.0";
1178 name = "pastescript-3.2.0";
1179 doCheck = false;
1179 doCheck = false;
1180 propagatedBuildInputs = [
1180 propagatedBuildInputs = [
1181 self."paste"
1181 self."paste"
1182 self."pastedeploy"
1182 self."pastedeploy"
1183 self."six"
1183 self."six"
1184 ];
1184 ];
1185 src = fetchurl {
1185 src = fetchurl {
1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1188 };
1188 };
1189 meta = {
1189 meta = {
1190 license = [ pkgs.lib.licenses.mit ];
1190 license = [ pkgs.lib.licenses.mit ];
1191 };
1191 };
1192 };
1192 };
1193 "pathlib2" = super.buildPythonPackage {
1193 "pathlib2" = super.buildPythonPackage {
1194 name = "pathlib2-2.3.5";
1194 name = "pathlib2-2.3.5";
1195 doCheck = false;
1195 doCheck = false;
1196 propagatedBuildInputs = [
1196 propagatedBuildInputs = [
1197 self."six"
1197 self."six"
1198 self."scandir"
1198 self."scandir"
1199 ];
1199 ];
1200 src = fetchurl {
1200 src = fetchurl {
1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1203 };
1203 };
1204 meta = {
1204 meta = {
1205 license = [ pkgs.lib.licenses.mit ];
1205 license = [ pkgs.lib.licenses.mit ];
1206 };
1206 };
1207 };
1207 };
1208 "peppercorn" = super.buildPythonPackage {
1208 "peppercorn" = super.buildPythonPackage {
1209 name = "peppercorn-0.6";
1209 name = "peppercorn-0.6";
1210 doCheck = false;
1210 doCheck = false;
1211 src = fetchurl {
1211 src = fetchurl {
1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1214 };
1214 };
1215 meta = {
1215 meta = {
1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1217 };
1217 };
1218 };
1218 };
1219 "pexpect" = super.buildPythonPackage {
1219 "pexpect" = super.buildPythonPackage {
1220 name = "pexpect-4.8.0";
1220 name = "pexpect-4.8.0";
1221 doCheck = false;
1221 doCheck = false;
1222 propagatedBuildInputs = [
1222 propagatedBuildInputs = [
1223 self."ptyprocess"
1223 self."ptyprocess"
1224 ];
1224 ];
1225 src = fetchurl {
1225 src = fetchurl {
1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1228 };
1228 };
1229 meta = {
1229 meta = {
1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1231 };
1231 };
1232 };
1232 };
1233 "pickleshare" = super.buildPythonPackage {
1233 "pickleshare" = super.buildPythonPackage {
1234 name = "pickleshare-0.7.5";
1234 name = "pickleshare-0.7.5";
1235 doCheck = false;
1235 doCheck = false;
1236 propagatedBuildInputs = [
1236 propagatedBuildInputs = [
1237 self."pathlib2"
1237 self."pathlib2"
1238 ];
1238 ];
1239 src = fetchurl {
1239 src = fetchurl {
1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1242 };
1242 };
1243 meta = {
1243 meta = {
1244 license = [ pkgs.lib.licenses.mit ];
1244 license = [ pkgs.lib.licenses.mit ];
1245 };
1245 };
1246 };
1246 };
1247 "plaster" = super.buildPythonPackage {
1247 "plaster" = super.buildPythonPackage {
1248 name = "plaster-1.0";
1248 name = "plaster-1.0";
1249 doCheck = false;
1249 doCheck = false;
1250 propagatedBuildInputs = [
1250 propagatedBuildInputs = [
1251 self."setuptools"
1251 self."setuptools"
1252 ];
1252 ];
1253 src = fetchurl {
1253 src = fetchurl {
1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1256 };
1256 };
1257 meta = {
1257 meta = {
1258 license = [ pkgs.lib.licenses.mit ];
1258 license = [ pkgs.lib.licenses.mit ];
1259 };
1259 };
1260 };
1260 };
1261 "plaster-pastedeploy" = super.buildPythonPackage {
1261 "plaster-pastedeploy" = super.buildPythonPackage {
1262 name = "plaster-pastedeploy-0.7";
1262 name = "plaster-pastedeploy-0.7";
1263 doCheck = false;
1263 doCheck = false;
1264 propagatedBuildInputs = [
1264 propagatedBuildInputs = [
1265 self."pastedeploy"
1265 self."pastedeploy"
1266 self."plaster"
1266 self."plaster"
1267 ];
1267 ];
1268 src = fetchurl {
1268 src = fetchurl {
1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1271 };
1271 };
1272 meta = {
1272 meta = {
1273 license = [ pkgs.lib.licenses.mit ];
1273 license = [ pkgs.lib.licenses.mit ];
1274 };
1274 };
1275 };
1275 };
1276 "pluggy" = super.buildPythonPackage {
1276 "pluggy" = super.buildPythonPackage {
1277 name = "pluggy-0.13.1";
1277 name = "pluggy-0.13.1";
1278 doCheck = false;
1278 doCheck = false;
1279 propagatedBuildInputs = [
1279 propagatedBuildInputs = [
1280 self."importlib-metadata"
1280 self."importlib-metadata"
1281 ];
1281 ];
1282 src = fetchurl {
1282 src = fetchurl {
1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1285 };
1285 };
1286 meta = {
1286 meta = {
1287 license = [ pkgs.lib.licenses.mit ];
1287 license = [ pkgs.lib.licenses.mit ];
1288 };
1288 };
1289 };
1289 };
1290 "premailer" = super.buildPythonPackage {
1290 "premailer" = super.buildPythonPackage {
1291 name = "premailer-3.6.1";
1291 name = "premailer-3.6.1";
1292 doCheck = false;
1292 doCheck = false;
1293 propagatedBuildInputs = [
1293 propagatedBuildInputs = [
1294 self."lxml"
1294 self."lxml"
1295 self."cssselect"
1295 self."cssselect"
1296 self."cssutils"
1296 self."cssutils"
1297 self."requests"
1297 self."requests"
1298 self."cachetools"
1298 self."cachetools"
1299 ];
1299 ];
1300 src = fetchurl {
1300 src = fetchurl {
1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1303 };
1303 };
1304 meta = {
1304 meta = {
1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1306 };
1306 };
1307 };
1307 };
1308 "prompt-toolkit" = super.buildPythonPackage {
1308 "prompt-toolkit" = super.buildPythonPackage {
1309 name = "prompt-toolkit-1.0.18";
1309 name = "prompt-toolkit-1.0.18";
1310 doCheck = false;
1310 doCheck = false;
1311 propagatedBuildInputs = [
1311 propagatedBuildInputs = [
1312 self."six"
1312 self."six"
1313 self."wcwidth"
1313 self."wcwidth"
1314 ];
1314 ];
1315 src = fetchurl {
1315 src = fetchurl {
1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1318 };
1318 };
1319 meta = {
1319 meta = {
1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1321 };
1321 };
1322 };
1322 };
1323 "psutil" = super.buildPythonPackage {
1323 "psutil" = super.buildPythonPackage {
1324 name = "psutil-5.7.0";
1324 name = "psutil-5.7.0";
1325 doCheck = false;
1325 doCheck = false;
1326 src = fetchurl {
1326 src = fetchurl {
1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1329 };
1329 };
1330 meta = {
1330 meta = {
1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1332 };
1332 };
1333 };
1333 };
1334 "psycopg2" = super.buildPythonPackage {
1334 "psycopg2" = super.buildPythonPackage {
1335 name = "psycopg2-2.8.4";
1335 name = "psycopg2-2.8.4";
1336 doCheck = false;
1336 doCheck = false;
1337 src = fetchurl {
1337 src = fetchurl {
1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1340 };
1340 };
1341 meta = {
1341 meta = {
1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1343 };
1343 };
1344 };
1344 };
1345 "ptyprocess" = super.buildPythonPackage {
1345 "ptyprocess" = super.buildPythonPackage {
1346 name = "ptyprocess-0.6.0";
1346 name = "ptyprocess-0.6.0";
1347 doCheck = false;
1347 doCheck = false;
1348 src = fetchurl {
1348 src = fetchurl {
1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1351 };
1351 };
1352 meta = {
1352 meta = {
1353 license = [ ];
1353 license = [ ];
1354 };
1354 };
1355 };
1355 };
1356 "py" = super.buildPythonPackage {
1356 "py" = super.buildPythonPackage {
1357 name = "py-1.8.0";
1357 name = "py-1.8.0";
1358 doCheck = false;
1358 doCheck = false;
1359 src = fetchurl {
1359 src = fetchurl {
1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1362 };
1362 };
1363 meta = {
1363 meta = {
1364 license = [ pkgs.lib.licenses.mit ];
1364 license = [ pkgs.lib.licenses.mit ];
1365 };
1365 };
1366 };
1366 };
1367 "py-bcrypt" = super.buildPythonPackage {
1367 "py-bcrypt" = super.buildPythonPackage {
1368 name = "py-bcrypt-0.4";
1368 name = "py-bcrypt-0.4";
1369 doCheck = false;
1369 doCheck = false;
1370 src = fetchurl {
1370 src = fetchurl {
1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1373 };
1373 };
1374 meta = {
1374 meta = {
1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1376 };
1376 };
1377 };
1377 };
1378 "py-gfm" = super.buildPythonPackage {
1378 "py-gfm" = super.buildPythonPackage {
1379 name = "py-gfm-0.1.4";
1379 name = "py-gfm-0.1.4";
1380 doCheck = false;
1380 doCheck = false;
1381 propagatedBuildInputs = [
1381 propagatedBuildInputs = [
1382 self."setuptools"
1382 self."setuptools"
1383 self."markdown"
1383 self."markdown"
1384 ];
1384 ];
1385 src = fetchurl {
1385 src = fetchurl {
1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1388 };
1388 };
1389 meta = {
1389 meta = {
1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1391 };
1391 };
1392 };
1392 };
1393 "pyasn1" = super.buildPythonPackage {
1393 "pyasn1" = super.buildPythonPackage {
1394 name = "pyasn1-0.4.8";
1394 name = "pyasn1-0.4.8";
1395 doCheck = false;
1395 doCheck = false;
1396 src = fetchurl {
1396 src = fetchurl {
1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1399 };
1399 };
1400 meta = {
1400 meta = {
1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1402 };
1402 };
1403 };
1403 };
1404 "pyasn1-modules" = super.buildPythonPackage {
1404 "pyasn1-modules" = super.buildPythonPackage {
1405 name = "pyasn1-modules-0.2.6";
1405 name = "pyasn1-modules-0.2.6";
1406 doCheck = false;
1406 doCheck = false;
1407 propagatedBuildInputs = [
1407 propagatedBuildInputs = [
1408 self."pyasn1"
1408 self."pyasn1"
1409 ];
1409 ];
1410 src = fetchurl {
1410 src = fetchurl {
1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1413 };
1413 };
1414 meta = {
1414 meta = {
1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1416 };
1416 };
1417 };
1417 };
1418 "pycparser" = super.buildPythonPackage {
1418 "pycparser" = super.buildPythonPackage {
1419 name = "pycparser-2.20";
1419 name = "pycparser-2.20";
1420 doCheck = false;
1420 doCheck = false;
1421 src = fetchurl {
1421 src = fetchurl {
1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1424 };
1424 };
1425 meta = {
1425 meta = {
1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1427 };
1427 };
1428 };
1428 };
1429 "pycrypto" = super.buildPythonPackage {
1429 "pycrypto" = super.buildPythonPackage {
1430 name = "pycrypto-2.6.1";
1430 name = "pycrypto-2.6.1";
1431 doCheck = false;
1431 doCheck = false;
1432 src = fetchurl {
1432 src = fetchurl {
1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1435 };
1435 };
1436 meta = {
1436 meta = {
1437 license = [ pkgs.lib.licenses.publicDomain ];
1437 license = [ pkgs.lib.licenses.publicDomain ];
1438 };
1438 };
1439 };
1439 };
1440 "pycurl" = super.buildPythonPackage {
1440 "pycurl" = super.buildPythonPackage {
1441 name = "pycurl-7.43.0.3";
1441 name = "pycurl-7.43.0.3";
1442 doCheck = false;
1442 doCheck = false;
1443 src = fetchurl {
1443 src = fetchurl {
1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1446 };
1446 };
1447 meta = {
1447 meta = {
1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1449 };
1449 };
1450 };
1450 };
1451 "pygments" = super.buildPythonPackage {
1451 "pygments" = super.buildPythonPackage {
1452 name = "pygments-2.4.2";
1452 name = "pygments-2.4.2";
1453 doCheck = false;
1453 doCheck = false;
1454 src = fetchurl {
1454 src = fetchurl {
1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1457 };
1457 };
1458 meta = {
1458 meta = {
1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1460 };
1460 };
1461 };
1461 };
1462 "pymysql" = super.buildPythonPackage {
1462 "pymysql" = super.buildPythonPackage {
1463 name = "pymysql-0.8.1";
1463 name = "pymysql-0.8.1";
1464 doCheck = false;
1464 doCheck = false;
1465 src = fetchurl {
1465 src = fetchurl {
1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1468 };
1468 };
1469 meta = {
1469 meta = {
1470 license = [ pkgs.lib.licenses.mit ];
1470 license = [ pkgs.lib.licenses.mit ];
1471 };
1471 };
1472 };
1472 };
1473 "pyotp" = super.buildPythonPackage {
1473 "pyotp" = super.buildPythonPackage {
1474 name = "pyotp-2.3.0";
1474 name = "pyotp-2.3.0";
1475 doCheck = false;
1475 doCheck = false;
1476 src = fetchurl {
1476 src = fetchurl {
1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1479 };
1479 };
1480 meta = {
1480 meta = {
1481 license = [ pkgs.lib.licenses.mit ];
1481 license = [ pkgs.lib.licenses.mit ];
1482 };
1482 };
1483 };
1483 };
1484 "pyparsing" = super.buildPythonPackage {
1484 "pyparsing" = super.buildPythonPackage {
1485 name = "pyparsing-2.4.7";
1485 name = "pyparsing-2.4.7";
1486 doCheck = false;
1486 doCheck = false;
1487 src = fetchurl {
1487 src = fetchurl {
1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1490 };
1490 };
1491 meta = {
1491 meta = {
1492 license = [ pkgs.lib.licenses.mit ];
1492 license = [ pkgs.lib.licenses.mit ];
1493 };
1493 };
1494 };
1494 };
1495 "pyramid" = super.buildPythonPackage {
1495 "pyramid" = super.buildPythonPackage {
1496 name = "pyramid-1.10.4";
1496 name = "pyramid-1.10.4";
1497 doCheck = false;
1497 doCheck = false;
1498 propagatedBuildInputs = [
1498 propagatedBuildInputs = [
1499 self."hupper"
1499 self."hupper"
1500 self."plaster"
1500 self."plaster"
1501 self."plaster-pastedeploy"
1501 self."plaster-pastedeploy"
1502 self."setuptools"
1502 self."setuptools"
1503 self."translationstring"
1503 self."translationstring"
1504 self."venusian"
1504 self."venusian"
1505 self."webob"
1505 self."webob"
1506 self."zope.deprecation"
1506 self."zope.deprecation"
1507 self."zope.interface"
1507 self."zope.interface"
1508 self."repoze.lru"
1508 self."repoze.lru"
1509 ];
1509 ];
1510 src = fetchurl {
1510 src = fetchurl {
1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1513 };
1513 };
1514 meta = {
1514 meta = {
1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1516 };
1516 };
1517 };
1517 };
1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1519 name = "pyramid-debugtoolbar-4.6.1";
1519 name = "pyramid-debugtoolbar-4.6.1";
1520 doCheck = false;
1520 doCheck = false;
1521 propagatedBuildInputs = [
1521 propagatedBuildInputs = [
1522 self."pyramid"
1522 self."pyramid"
1523 self."pyramid-mako"
1523 self."pyramid-mako"
1524 self."repoze.lru"
1524 self."repoze.lru"
1525 self."pygments"
1525 self."pygments"
1526 self."ipaddress"
1526 self."ipaddress"
1527 ];
1527 ];
1528 src = fetchurl {
1528 src = fetchurl {
1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1531 };
1531 };
1532 meta = {
1532 meta = {
1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1534 };
1534 };
1535 };
1535 };
1536 "pyramid-jinja2" = super.buildPythonPackage {
1536 "pyramid-jinja2" = super.buildPythonPackage {
1537 name = "pyramid-jinja2-2.7";
1537 name = "pyramid-jinja2-2.7";
1538 doCheck = false;
1538 doCheck = false;
1539 propagatedBuildInputs = [
1539 propagatedBuildInputs = [
1540 self."pyramid"
1540 self."pyramid"
1541 self."zope.deprecation"
1541 self."zope.deprecation"
1542 self."jinja2"
1542 self."jinja2"
1543 self."markupsafe"
1543 self."markupsafe"
1544 ];
1544 ];
1545 src = fetchurl {
1545 src = fetchurl {
1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1548 };
1548 };
1549 meta = {
1549 meta = {
1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1551 };
1551 };
1552 };
1552 };
1553 "pyramid-apispec" = super.buildPythonPackage {
1553 "pyramid-apispec" = super.buildPythonPackage {
1554 name = "pyramid-apispec-0.3.2";
1554 name = "pyramid-apispec-0.3.2";
1555 doCheck = false;
1555 doCheck = false;
1556 propagatedBuildInputs = [
1556 propagatedBuildInputs = [
1557 self."apispec"
1557 self."apispec"
1558 ];
1558 ];
1559 src = fetchurl {
1559 src = fetchurl {
1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1562 };
1562 };
1563 meta = {
1563 meta = {
1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1565 };
1565 };
1566 };
1566 };
1567 "pyramid-mailer" = super.buildPythonPackage {
1567 "pyramid-mailer" = super.buildPythonPackage {
1568 name = "pyramid-mailer-0.15.1";
1568 name = "pyramid-mailer-0.15.1";
1569 doCheck = false;
1569 doCheck = false;
1570 propagatedBuildInputs = [
1570 propagatedBuildInputs = [
1571 self."pyramid"
1571 self."pyramid"
1572 self."repoze.sendmail"
1572 self."repoze.sendmail"
1573 self."transaction"
1573 self."transaction"
1574 ];
1574 ];
1575 src = fetchurl {
1575 src = fetchurl {
1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1578 };
1578 };
1579 meta = {
1579 meta = {
1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1581 };
1581 };
1582 };
1582 };
1583 "pyramid-mako" = super.buildPythonPackage {
1583 "pyramid-mako" = super.buildPythonPackage {
1584 name = "pyramid-mako-1.1.0";
1584 name = "pyramid-mako-1.1.0";
1585 doCheck = false;
1585 doCheck = false;
1586 propagatedBuildInputs = [
1586 propagatedBuildInputs = [
1587 self."pyramid"
1587 self."pyramid"
1588 self."mako"
1588 self."mako"
1589 ];
1589 ];
1590 src = fetchurl {
1590 src = fetchurl {
1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1593 };
1593 };
1594 meta = {
1594 meta = {
1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1596 };
1596 };
1597 };
1597 };
1598 "pysqlite" = super.buildPythonPackage {
1598 "pysqlite" = super.buildPythonPackage {
1599 name = "pysqlite-2.8.3";
1599 name = "pysqlite-2.8.3";
1600 doCheck = false;
1600 doCheck = false;
1601 src = fetchurl {
1601 src = fetchurl {
1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1604 };
1604 };
1605 meta = {
1605 meta = {
1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1607 };
1607 };
1608 };
1608 };
1609 "pytest" = super.buildPythonPackage {
1609 "pytest" = super.buildPythonPackage {
1610 name = "pytest-4.6.5";
1610 name = "pytest-4.6.5";
1611 doCheck = false;
1611 doCheck = false;
1612 propagatedBuildInputs = [
1612 propagatedBuildInputs = [
1613 self."py"
1613 self."py"
1614 self."six"
1614 self."six"
1615 self."packaging"
1615 self."packaging"
1616 self."attrs"
1616 self."attrs"
1617 self."atomicwrites"
1617 self."atomicwrites"
1618 self."pluggy"
1618 self."pluggy"
1619 self."importlib-metadata"
1619 self."importlib-metadata"
1620 self."wcwidth"
1620 self."wcwidth"
1621 self."funcsigs"
1621 self."funcsigs"
1622 self."pathlib2"
1622 self."pathlib2"
1623 self."more-itertools"
1623 self."more-itertools"
1624 ];
1624 ];
1625 src = fetchurl {
1625 src = fetchurl {
1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1628 };
1628 };
1629 meta = {
1629 meta = {
1630 license = [ pkgs.lib.licenses.mit ];
1630 license = [ pkgs.lib.licenses.mit ];
1631 };
1631 };
1632 };
1632 };
1633 "pytest-cov" = super.buildPythonPackage {
1633 "pytest-cov" = super.buildPythonPackage {
1634 name = "pytest-cov-2.7.1";
1634 name = "pytest-cov-2.7.1";
1635 doCheck = false;
1635 doCheck = false;
1636 propagatedBuildInputs = [
1636 propagatedBuildInputs = [
1637 self."pytest"
1637 self."pytest"
1638 self."coverage"
1638 self."coverage"
1639 ];
1639 ];
1640 src = fetchurl {
1640 src = fetchurl {
1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1643 };
1643 };
1644 meta = {
1644 meta = {
1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1646 };
1646 };
1647 };
1647 };
1648 "pytest-profiling" = super.buildPythonPackage {
1648 "pytest-profiling" = super.buildPythonPackage {
1649 name = "pytest-profiling-1.7.0";
1649 name = "pytest-profiling-1.7.0";
1650 doCheck = false;
1650 doCheck = false;
1651 propagatedBuildInputs = [
1651 propagatedBuildInputs = [
1652 self."six"
1652 self."six"
1653 self."pytest"
1653 self."pytest"
1654 self."gprof2dot"
1654 self."gprof2dot"
1655 ];
1655 ];
1656 src = fetchurl {
1656 src = fetchurl {
1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1659 };
1659 };
1660 meta = {
1660 meta = {
1661 license = [ pkgs.lib.licenses.mit ];
1661 license = [ pkgs.lib.licenses.mit ];
1662 };
1662 };
1663 };
1663 };
1664 "pytest-runner" = super.buildPythonPackage {
1664 "pytest-runner" = super.buildPythonPackage {
1665 name = "pytest-runner-5.1";
1665 name = "pytest-runner-5.1";
1666 doCheck = false;
1666 doCheck = false;
1667 src = fetchurl {
1667 src = fetchurl {
1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1670 };
1670 };
1671 meta = {
1671 meta = {
1672 license = [ pkgs.lib.licenses.mit ];
1672 license = [ pkgs.lib.licenses.mit ];
1673 };
1673 };
1674 };
1674 };
1675 "pytest-sugar" = super.buildPythonPackage {
1675 "pytest-sugar" = super.buildPythonPackage {
1676 name = "pytest-sugar-0.9.2";
1676 name = "pytest-sugar-0.9.2";
1677 doCheck = false;
1677 doCheck = false;
1678 propagatedBuildInputs = [
1678 propagatedBuildInputs = [
1679 self."pytest"
1679 self."pytest"
1680 self."termcolor"
1680 self."termcolor"
1681 self."packaging"
1681 self."packaging"
1682 ];
1682 ];
1683 src = fetchurl {
1683 src = fetchurl {
1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1686 };
1686 };
1687 meta = {
1687 meta = {
1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1689 };
1689 };
1690 };
1690 };
1691 "pytest-timeout" = super.buildPythonPackage {
1691 "pytest-timeout" = super.buildPythonPackage {
1692 name = "pytest-timeout-1.3.3";
1692 name = "pytest-timeout-1.3.3";
1693 doCheck = false;
1693 doCheck = false;
1694 propagatedBuildInputs = [
1694 propagatedBuildInputs = [
1695 self."pytest"
1695 self."pytest"
1696 ];
1696 ];
1697 src = fetchurl {
1697 src = fetchurl {
1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1700 };
1700 };
1701 meta = {
1701 meta = {
1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1703 };
1703 };
1704 };
1704 };
1705 "python-dateutil" = super.buildPythonPackage {
1705 "python-dateutil" = super.buildPythonPackage {
1706 name = "python-dateutil-2.8.1";
1706 name = "python-dateutil-2.8.1";
1707 doCheck = false;
1707 doCheck = false;
1708 propagatedBuildInputs = [
1708 propagatedBuildInputs = [
1709 self."six"
1709 self."six"
1710 ];
1710 ];
1711 src = fetchurl {
1711 src = fetchurl {
1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1714 };
1714 };
1715 meta = {
1715 meta = {
1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1717 };
1717 };
1718 };
1718 };
1719 "python-editor" = super.buildPythonPackage {
1719 "python-editor" = super.buildPythonPackage {
1720 name = "python-editor-1.0.4";
1720 name = "python-editor-1.0.4";
1721 doCheck = false;
1721 doCheck = false;
1722 src = fetchurl {
1722 src = fetchurl {
1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1725 };
1725 };
1726 meta = {
1726 meta = {
1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1728 };
1728 };
1729 };
1729 };
1730 "python-ldap" = super.buildPythonPackage {
1730 "python-ldap" = super.buildPythonPackage {
1731 name = "python-ldap-3.2.0";
1731 name = "python-ldap-3.2.0";
1732 doCheck = false;
1732 doCheck = false;
1733 propagatedBuildInputs = [
1733 propagatedBuildInputs = [
1734 self."pyasn1"
1734 self."pyasn1"
1735 self."pyasn1-modules"
1735 self."pyasn1-modules"
1736 ];
1736 ];
1737 src = fetchurl {
1737 src = fetchurl {
1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1740 };
1740 };
1741 meta = {
1741 meta = {
1742 license = [ pkgs.lib.licenses.psfl ];
1742 license = [ pkgs.lib.licenses.psfl ];
1743 };
1743 };
1744 };
1744 };
1745 "python-memcached" = super.buildPythonPackage {
1745 "python-memcached" = super.buildPythonPackage {
1746 name = "python-memcached-1.59";
1746 name = "python-memcached-1.59";
1747 doCheck = false;
1747 doCheck = false;
1748 propagatedBuildInputs = [
1748 propagatedBuildInputs = [
1749 self."six"
1749 self."six"
1750 ];
1750 ];
1751 src = fetchurl {
1751 src = fetchurl {
1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1754 };
1754 };
1755 meta = {
1755 meta = {
1756 license = [ pkgs.lib.licenses.psfl ];
1756 license = [ pkgs.lib.licenses.psfl ];
1757 };
1757 };
1758 };
1758 };
1759 "python-pam" = super.buildPythonPackage {
1759 "python-pam" = super.buildPythonPackage {
1760 name = "python-pam-1.8.4";
1760 name = "python-pam-1.8.4";
1761 doCheck = false;
1761 doCheck = false;
1762 src = fetchurl {
1762 src = fetchurl {
1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1765 };
1765 };
1766 meta = {
1766 meta = {
1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1768 };
1768 };
1769 };
1769 };
1770 "python-saml" = super.buildPythonPackage {
1770 "python-saml" = super.buildPythonPackage {
1771 name = "python-saml-2.4.2";
1771 name = "python-saml-2.4.2";
1772 doCheck = false;
1772 doCheck = false;
1773 propagatedBuildInputs = [
1773 propagatedBuildInputs = [
1774 self."dm.xmlsec.binding"
1774 self."dm.xmlsec.binding"
1775 self."isodate"
1775 self."isodate"
1776 self."defusedxml"
1776 self."defusedxml"
1777 ];
1777 ];
1778 src = fetchurl {
1778 src = fetchurl {
1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1781 };
1781 };
1782 meta = {
1782 meta = {
1783 license = [ pkgs.lib.licenses.mit ];
1783 license = [ pkgs.lib.licenses.mit ];
1784 };
1784 };
1785 };
1785 };
1786 "pytz" = super.buildPythonPackage {
1786 "pytz" = super.buildPythonPackage {
1787 name = "pytz-2019.3";
1787 name = "pytz-2019.3";
1788 doCheck = false;
1788 doCheck = false;
1789 src = fetchurl {
1789 src = fetchurl {
1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1792 };
1792 };
1793 meta = {
1793 meta = {
1794 license = [ pkgs.lib.licenses.mit ];
1794 license = [ pkgs.lib.licenses.mit ];
1795 };
1795 };
1796 };
1796 };
1797 "pyzmq" = super.buildPythonPackage {
1797 "pyzmq" = super.buildPythonPackage {
1798 name = "pyzmq-14.6.0";
1798 name = "pyzmq-14.6.0";
1799 doCheck = false;
1799 doCheck = false;
1800 src = fetchurl {
1800 src = fetchurl {
1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1803 };
1803 };
1804 meta = {
1804 meta = {
1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1806 };
1806 };
1807 };
1807 };
1808 "PyYAML" = super.buildPythonPackage {
1808 "PyYAML" = super.buildPythonPackage {
1809 name = "PyYAML-5.3.1";
1809 name = "PyYAML-5.3.1";
1810 doCheck = false;
1810 doCheck = false;
1811 src = fetchurl {
1811 src = fetchurl {
1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1814 };
1814 };
1815 meta = {
1815 meta = {
1816 license = [ pkgs.lib.licenses.mit ];
1816 license = [ pkgs.lib.licenses.mit ];
1817 };
1817 };
1818 };
1818 };
1819 "regex" = super.buildPythonPackage {
1819 "regex" = super.buildPythonPackage {
1820 name = "regex-2020.9.27";
1820 name = "regex-2020.9.27";
1821 doCheck = false;
1821 doCheck = false;
1822 src = fetchurl {
1822 src = fetchurl {
1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1825 };
1825 };
1826 meta = {
1826 meta = {
1827 license = [ pkgs.lib.licenses.psfl ];
1827 license = [ pkgs.lib.licenses.psfl ];
1828 };
1828 };
1829 };
1829 };
1830 "redis" = super.buildPythonPackage {
1830 "redis" = super.buildPythonPackage {
1831 name = "redis-3.5.3";
1831 name = "redis-3.5.3";
1832 doCheck = false;
1832 doCheck = false;
1833 src = fetchurl {
1833 src = fetchurl {
1834 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
1834 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
1835 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
1835 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
1836 };
1836 };
1837 meta = {
1837 meta = {
1838 license = [ pkgs.lib.licenses.mit ];
1838 license = [ pkgs.lib.licenses.mit ];
1839 };
1839 };
1840 };
1840 };
1841 "repoze.lru" = super.buildPythonPackage {
1841 "repoze.lru" = super.buildPythonPackage {
1842 name = "repoze.lru-0.7";
1842 name = "repoze.lru-0.7";
1843 doCheck = false;
1843 doCheck = false;
1844 src = fetchurl {
1844 src = fetchurl {
1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1847 };
1847 };
1848 meta = {
1848 meta = {
1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1850 };
1850 };
1851 };
1851 };
1852 "repoze.sendmail" = super.buildPythonPackage {
1852 "repoze.sendmail" = super.buildPythonPackage {
1853 name = "repoze.sendmail-4.4.1";
1853 name = "repoze.sendmail-4.4.1";
1854 doCheck = false;
1854 doCheck = false;
1855 propagatedBuildInputs = [
1855 propagatedBuildInputs = [
1856 self."setuptools"
1856 self."setuptools"
1857 self."zope.interface"
1857 self."zope.interface"
1858 self."transaction"
1858 self."transaction"
1859 ];
1859 ];
1860 src = fetchurl {
1860 src = fetchurl {
1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1863 };
1863 };
1864 meta = {
1864 meta = {
1865 license = [ pkgs.lib.licenses.zpl21 ];
1865 license = [ pkgs.lib.licenses.zpl21 ];
1866 };
1866 };
1867 };
1867 };
1868 "requests" = super.buildPythonPackage {
1868 "requests" = super.buildPythonPackage {
1869 name = "requests-2.22.0";
1869 name = "requests-2.22.0";
1870 doCheck = false;
1870 doCheck = false;
1871 propagatedBuildInputs = [
1871 propagatedBuildInputs = [
1872 self."chardet"
1872 self."chardet"
1873 self."idna"
1873 self."idna"
1874 self."urllib3"
1874 self."urllib3"
1875 self."certifi"
1875 self."certifi"
1876 ];
1876 ];
1877 src = fetchurl {
1877 src = fetchurl {
1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1880 };
1880 };
1881 meta = {
1881 meta = {
1882 license = [ pkgs.lib.licenses.asl20 ];
1882 license = [ pkgs.lib.licenses.asl20 ];
1883 };
1883 };
1884 };
1884 };
1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1886 name = "rhodecode-enterprise-ce-4.25.2";
1886 name = "rhodecode-enterprise-ce-4.26.0";
1887 buildInputs = [
1887 buildInputs = [
1888 self."pytest"
1888 self."pytest"
1889 self."py"
1889 self."py"
1890 self."pytest-cov"
1890 self."pytest-cov"
1891 self."pytest-sugar"
1891 self."pytest-sugar"
1892 self."pytest-runner"
1892 self."pytest-runner"
1893 self."pytest-profiling"
1893 self."pytest-profiling"
1894 self."pytest-timeout"
1894 self."pytest-timeout"
1895 self."gprof2dot"
1895 self."gprof2dot"
1896 self."mock"
1896 self."mock"
1897 self."cov-core"
1897 self."cov-core"
1898 self."coverage"
1898 self."coverage"
1899 self."webtest"
1899 self."webtest"
1900 self."beautifulsoup4"
1900 self."beautifulsoup4"
1901 self."configobj"
1901 self."configobj"
1902 ];
1902 ];
1903 doCheck = true;
1903 doCheck = true;
1904 propagatedBuildInputs = [
1904 propagatedBuildInputs = [
1905 self."amqp"
1905 self."amqp"
1906 self."babel"
1906 self."babel"
1907 self."beaker"
1907 self."beaker"
1908 self."bleach"
1908 self."bleach"
1909 self."celery"
1909 self."celery"
1910 self."channelstream"
1910 self."channelstream"
1911 self."click"
1911 self."click"
1912 self."colander"
1912 self."colander"
1913 self."configobj"
1913 self."configobj"
1914 self."cssselect"
1914 self."cssselect"
1915 self."cryptography"
1915 self."cryptography"
1916 self."decorator"
1916 self."decorator"
1917 self."deform"
1917 self."deform"
1918 self."docutils"
1918 self."docutils"
1919 self."dogpile.cache"
1919 self."dogpile.cache"
1920 self."dogpile.core"
1920 self."dogpile.core"
1921 self."formencode"
1921 self."formencode"
1922 self."future"
1922 self."future"
1923 self."futures"
1923 self."futures"
1924 self."infrae.cache"
1924 self."infrae.cache"
1925 self."iso8601"
1925 self."iso8601"
1926 self."itsdangerous"
1926 self."itsdangerous"
1927 self."kombu"
1927 self."kombu"
1928 self."lxml"
1928 self."lxml"
1929 self."mako"
1929 self."mako"
1930 self."markdown"
1930 self."markdown"
1931 self."markupsafe"
1931 self."markupsafe"
1932 self."msgpack-python"
1932 self."msgpack-python"
1933 self."pyotp"
1933 self."pyotp"
1934 self."packaging"
1934 self."packaging"
1935 self."pathlib2"
1935 self."pathlib2"
1936 self."paste"
1936 self."paste"
1937 self."pastedeploy"
1937 self."pastedeploy"
1938 self."pastescript"
1938 self."pastescript"
1939 self."peppercorn"
1939 self."peppercorn"
1940 self."premailer"
1940 self."premailer"
1941 self."psutil"
1941 self."psutil"
1942 self."py-bcrypt"
1942 self."py-bcrypt"
1943 self."pycurl"
1943 self."pycurl"
1944 self."pycrypto"
1944 self."pycrypto"
1945 self."pygments"
1945 self."pygments"
1946 self."pyparsing"
1946 self."pyparsing"
1947 self."pyramid-debugtoolbar"
1947 self."pyramid-debugtoolbar"
1948 self."pyramid-mako"
1948 self."pyramid-mako"
1949 self."pyramid"
1949 self."pyramid"
1950 self."pyramid-mailer"
1950 self."pyramid-mailer"
1951 self."python-dateutil"
1951 self."python-dateutil"
1952 self."python-ldap"
1952 self."python-ldap"
1953 self."python-memcached"
1953 self."python-memcached"
1954 self."python-pam"
1954 self."python-pam"
1955 self."python-saml"
1955 self."python-saml"
1956 self."pytz"
1956 self."pytz"
1957 self."tzlocal"
1957 self."tzlocal"
1958 self."pyzmq"
1958 self."pyzmq"
1959 self."py-gfm"
1959 self."py-gfm"
1960 self."regex"
1960 self."regex"
1961 self."redis"
1961 self."redis"
1962 self."repoze.lru"
1962 self."repoze.lru"
1963 self."requests"
1963 self."requests"
1964 self."routes"
1964 self."routes"
1965 self."simplejson"
1965 self."simplejson"
1966 self."six"
1966 self."six"
1967 self."sqlalchemy"
1967 self."sqlalchemy"
1968 self."sshpubkeys"
1968 self."sshpubkeys"
1969 self."subprocess32"
1969 self."subprocess32"
1970 self."supervisor"
1970 self."supervisor"
1971 self."translationstring"
1971 self."translationstring"
1972 self."urllib3"
1972 self."urllib3"
1973 self."urlobject"
1973 self."urlobject"
1974 self."venusian"
1974 self."venusian"
1975 self."weberror"
1975 self."weberror"
1976 self."webhelpers2"
1976 self."webhelpers2"
1977 self."webob"
1977 self."webob"
1978 self."whoosh"
1978 self."whoosh"
1979 self."wsgiref"
1979 self."wsgiref"
1980 self."zope.cachedescriptors"
1980 self."zope.cachedescriptors"
1981 self."zope.deprecation"
1981 self."zope.deprecation"
1982 self."zope.event"
1982 self."zope.event"
1983 self."zope.interface"
1983 self."zope.interface"
1984 self."mysql-python"
1984 self."mysql-python"
1985 self."pymysql"
1985 self."pymysql"
1986 self."pysqlite"
1986 self."pysqlite"
1987 self."psycopg2"
1987 self."psycopg2"
1988 self."nbconvert"
1988 self."nbconvert"
1989 self."nbformat"
1989 self."nbformat"
1990 self."jupyter-client"
1990 self."jupyter-client"
1991 self."jupyter-core"
1991 self."jupyter-core"
1992 self."alembic"
1992 self."alembic"
1993 self."invoke"
1993 self."invoke"
1994 self."bumpversion"
1994 self."bumpversion"
1995 self."gevent"
1995 self."gevent"
1996 self."greenlet"
1996 self."greenlet"
1997 self."gunicorn"
1997 self."gunicorn"
1998 self."waitress"
1998 self."waitress"
1999 self."ipdb"
1999 self."ipdb"
2000 self."ipython"
2000 self."ipython"
2001 self."rhodecode-tools"
2001 self."rhodecode-tools"
2002 self."appenlight-client"
2002 self."appenlight-client"
2003 self."pytest"
2003 self."pytest"
2004 self."py"
2004 self."py"
2005 self."pytest-cov"
2005 self."pytest-cov"
2006 self."pytest-sugar"
2006 self."pytest-sugar"
2007 self."pytest-runner"
2007 self."pytest-runner"
2008 self."pytest-profiling"
2008 self."pytest-profiling"
2009 self."pytest-timeout"
2009 self."pytest-timeout"
2010 self."gprof2dot"
2010 self."gprof2dot"
2011 self."mock"
2011 self."mock"
2012 self."cov-core"
2012 self."cov-core"
2013 self."coverage"
2013 self."coverage"
2014 self."webtest"
2014 self."webtest"
2015 self."beautifulsoup4"
2015 self."beautifulsoup4"
2016 ];
2016 ];
2017 src = ./.;
2017 src = ./.;
2018 meta = {
2018 meta = {
2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2020 };
2020 };
2021 };
2021 };
2022 "rhodecode-tools" = super.buildPythonPackage {
2022 "rhodecode-tools" = super.buildPythonPackage {
2023 name = "rhodecode-tools-1.4.0";
2023 name = "rhodecode-tools-1.4.0";
2024 doCheck = false;
2024 doCheck = false;
2025 propagatedBuildInputs = [
2025 propagatedBuildInputs = [
2026 self."click"
2026 self."click"
2027 self."future"
2027 self."future"
2028 self."six"
2028 self."six"
2029 self."mako"
2029 self."mako"
2030 self."markupsafe"
2030 self."markupsafe"
2031 self."requests"
2031 self."requests"
2032 self."urllib3"
2032 self."urllib3"
2033 self."whoosh"
2033 self."whoosh"
2034 self."elasticsearch"
2034 self."elasticsearch"
2035 self."elasticsearch-dsl"
2035 self."elasticsearch-dsl"
2036 self."elasticsearch2"
2036 self."elasticsearch2"
2037 self."elasticsearch1-dsl"
2037 self."elasticsearch1-dsl"
2038 ];
2038 ];
2039 src = fetchurl {
2039 src = fetchurl {
2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2042 };
2042 };
2043 meta = {
2043 meta = {
2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2045 };
2045 };
2046 };
2046 };
2047 "routes" = super.buildPythonPackage {
2047 "routes" = super.buildPythonPackage {
2048 name = "routes-2.4.1";
2048 name = "routes-2.4.1";
2049 doCheck = false;
2049 doCheck = false;
2050 propagatedBuildInputs = [
2050 propagatedBuildInputs = [
2051 self."six"
2051 self."six"
2052 self."repoze.lru"
2052 self."repoze.lru"
2053 ];
2053 ];
2054 src = fetchurl {
2054 src = fetchurl {
2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2057 };
2057 };
2058 meta = {
2058 meta = {
2059 license = [ pkgs.lib.licenses.mit ];
2059 license = [ pkgs.lib.licenses.mit ];
2060 };
2060 };
2061 };
2061 };
2062 "scandir" = super.buildPythonPackage {
2062 "scandir" = super.buildPythonPackage {
2063 name = "scandir-1.10.0";
2063 name = "scandir-1.10.0";
2064 doCheck = false;
2064 doCheck = false;
2065 src = fetchurl {
2065 src = fetchurl {
2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2068 };
2068 };
2069 meta = {
2069 meta = {
2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2071 };
2071 };
2072 };
2072 };
2073 "setproctitle" = super.buildPythonPackage {
2073 "setproctitle" = super.buildPythonPackage {
2074 name = "setproctitle-1.1.10";
2074 name = "setproctitle-1.1.10";
2075 doCheck = false;
2075 doCheck = false;
2076 src = fetchurl {
2076 src = fetchurl {
2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2079 };
2079 };
2080 meta = {
2080 meta = {
2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2082 };
2082 };
2083 };
2083 };
2084 "setuptools" = super.buildPythonPackage {
2084 "setuptools" = super.buildPythonPackage {
2085 name = "setuptools-44.1.0";
2085 name = "setuptools-44.1.0";
2086 doCheck = false;
2086 doCheck = false;
2087 src = fetchurl {
2087 src = fetchurl {
2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2090 };
2090 };
2091 meta = {
2091 meta = {
2092 license = [ pkgs.lib.licenses.mit ];
2092 license = [ pkgs.lib.licenses.mit ];
2093 };
2093 };
2094 };
2094 };
2095 "setuptools-scm" = super.buildPythonPackage {
2095 "setuptools-scm" = super.buildPythonPackage {
2096 name = "setuptools-scm-3.5.0";
2096 name = "setuptools-scm-3.5.0";
2097 doCheck = false;
2097 doCheck = false;
2098 src = fetchurl {
2098 src = fetchurl {
2099 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
2099 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
2100 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
2100 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
2101 };
2101 };
2102 meta = {
2102 meta = {
2103 license = [ pkgs.lib.licenses.psfl ];
2103 license = [ pkgs.lib.licenses.psfl ];
2104 };
2104 };
2105 };
2105 };
2106 "simplegeneric" = super.buildPythonPackage {
2106 "simplegeneric" = super.buildPythonPackage {
2107 name = "simplegeneric-0.8.1";
2107 name = "simplegeneric-0.8.1";
2108 doCheck = false;
2108 doCheck = false;
2109 src = fetchurl {
2109 src = fetchurl {
2110 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2110 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2111 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2111 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2112 };
2112 };
2113 meta = {
2113 meta = {
2114 license = [ pkgs.lib.licenses.zpl21 ];
2114 license = [ pkgs.lib.licenses.zpl21 ];
2115 };
2115 };
2116 };
2116 };
2117 "simplejson" = super.buildPythonPackage {
2117 "simplejson" = super.buildPythonPackage {
2118 name = "simplejson-3.16.0";
2118 name = "simplejson-3.16.0";
2119 doCheck = false;
2119 doCheck = false;
2120 src = fetchurl {
2120 src = fetchurl {
2121 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2121 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2122 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2122 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2123 };
2123 };
2124 meta = {
2124 meta = {
2125 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2125 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2126 };
2126 };
2127 };
2127 };
2128 "six" = super.buildPythonPackage {
2128 "six" = super.buildPythonPackage {
2129 name = "six-1.11.0";
2129 name = "six-1.11.0";
2130 doCheck = false;
2130 doCheck = false;
2131 src = fetchurl {
2131 src = fetchurl {
2132 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2132 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2133 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2133 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2134 };
2134 };
2135 meta = {
2135 meta = {
2136 license = [ pkgs.lib.licenses.mit ];
2136 license = [ pkgs.lib.licenses.mit ];
2137 };
2137 };
2138 };
2138 };
2139 "sqlalchemy" = super.buildPythonPackage {
2139 "sqlalchemy" = super.buildPythonPackage {
2140 name = "sqlalchemy-1.3.15";
2140 name = "sqlalchemy-1.3.15";
2141 doCheck = false;
2141 doCheck = false;
2142 src = fetchurl {
2142 src = fetchurl {
2143 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2143 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2144 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2144 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2145 };
2145 };
2146 meta = {
2146 meta = {
2147 license = [ pkgs.lib.licenses.mit ];
2147 license = [ pkgs.lib.licenses.mit ];
2148 };
2148 };
2149 };
2149 };
2150 "sshpubkeys" = super.buildPythonPackage {
2150 "sshpubkeys" = super.buildPythonPackage {
2151 name = "sshpubkeys-3.1.0";
2151 name = "sshpubkeys-3.1.0";
2152 doCheck = false;
2152 doCheck = false;
2153 propagatedBuildInputs = [
2153 propagatedBuildInputs = [
2154 self."cryptography"
2154 self."cryptography"
2155 self."ecdsa"
2155 self."ecdsa"
2156 ];
2156 ];
2157 src = fetchurl {
2157 src = fetchurl {
2158 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2158 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2159 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2159 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2160 };
2160 };
2161 meta = {
2161 meta = {
2162 license = [ pkgs.lib.licenses.bsdOriginal ];
2162 license = [ pkgs.lib.licenses.bsdOriginal ];
2163 };
2163 };
2164 };
2164 };
2165 "subprocess32" = super.buildPythonPackage {
2165 "subprocess32" = super.buildPythonPackage {
2166 name = "subprocess32-3.5.4";
2166 name = "subprocess32-3.5.4";
2167 doCheck = false;
2167 doCheck = false;
2168 src = fetchurl {
2168 src = fetchurl {
2169 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2169 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2170 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2170 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2171 };
2171 };
2172 meta = {
2172 meta = {
2173 license = [ pkgs.lib.licenses.psfl ];
2173 license = [ pkgs.lib.licenses.psfl ];
2174 };
2174 };
2175 };
2175 };
2176 "supervisor" = super.buildPythonPackage {
2176 "supervisor" = super.buildPythonPackage {
2177 name = "supervisor-4.1.0";
2177 name = "supervisor-4.1.0";
2178 doCheck = false;
2178 doCheck = false;
2179 src = fetchurl {
2179 src = fetchurl {
2180 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2180 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2181 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2181 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2182 };
2182 };
2183 meta = {
2183 meta = {
2184 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2184 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2185 };
2185 };
2186 };
2186 };
2187 "tempita" = super.buildPythonPackage {
2187 "tempita" = super.buildPythonPackage {
2188 name = "tempita-0.5.2";
2188 name = "tempita-0.5.2";
2189 doCheck = false;
2189 doCheck = false;
2190 src = fetchurl {
2190 src = fetchurl {
2191 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2191 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2192 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2192 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2193 };
2193 };
2194 meta = {
2194 meta = {
2195 license = [ pkgs.lib.licenses.mit ];
2195 license = [ pkgs.lib.licenses.mit ];
2196 };
2196 };
2197 };
2197 };
2198 "termcolor" = super.buildPythonPackage {
2198 "termcolor" = super.buildPythonPackage {
2199 name = "termcolor-1.1.0";
2199 name = "termcolor-1.1.0";
2200 doCheck = false;
2200 doCheck = false;
2201 src = fetchurl {
2201 src = fetchurl {
2202 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2202 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2203 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2203 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2204 };
2204 };
2205 meta = {
2205 meta = {
2206 license = [ pkgs.lib.licenses.mit ];
2206 license = [ pkgs.lib.licenses.mit ];
2207 };
2207 };
2208 };
2208 };
2209 "testpath" = super.buildPythonPackage {
2209 "testpath" = super.buildPythonPackage {
2210 name = "testpath-0.4.4";
2210 name = "testpath-0.4.4";
2211 doCheck = false;
2211 doCheck = false;
2212 src = fetchurl {
2212 src = fetchurl {
2213 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2213 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2214 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2214 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2215 };
2215 };
2216 meta = {
2216 meta = {
2217 license = [ ];
2217 license = [ ];
2218 };
2218 };
2219 };
2219 };
2220 "traitlets" = super.buildPythonPackage {
2220 "traitlets" = super.buildPythonPackage {
2221 name = "traitlets-4.3.3";
2221 name = "traitlets-4.3.3";
2222 doCheck = false;
2222 doCheck = false;
2223 propagatedBuildInputs = [
2223 propagatedBuildInputs = [
2224 self."ipython-genutils"
2224 self."ipython-genutils"
2225 self."six"
2225 self."six"
2226 self."decorator"
2226 self."decorator"
2227 self."enum34"
2227 self."enum34"
2228 ];
2228 ];
2229 src = fetchurl {
2229 src = fetchurl {
2230 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2230 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2231 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2231 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2232 };
2232 };
2233 meta = {
2233 meta = {
2234 license = [ pkgs.lib.licenses.bsdOriginal ];
2234 license = [ pkgs.lib.licenses.bsdOriginal ];
2235 };
2235 };
2236 };
2236 };
2237 "transaction" = super.buildPythonPackage {
2237 "transaction" = super.buildPythonPackage {
2238 name = "transaction-2.4.0";
2238 name = "transaction-2.4.0";
2239 doCheck = false;
2239 doCheck = false;
2240 propagatedBuildInputs = [
2240 propagatedBuildInputs = [
2241 self."zope.interface"
2241 self."zope.interface"
2242 ];
2242 ];
2243 src = fetchurl {
2243 src = fetchurl {
2244 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2244 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2245 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2245 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2246 };
2246 };
2247 meta = {
2247 meta = {
2248 license = [ pkgs.lib.licenses.zpl21 ];
2248 license = [ pkgs.lib.licenses.zpl21 ];
2249 };
2249 };
2250 };
2250 };
2251 "translationstring" = super.buildPythonPackage {
2251 "translationstring" = super.buildPythonPackage {
2252 name = "translationstring-1.3";
2252 name = "translationstring-1.3";
2253 doCheck = false;
2253 doCheck = false;
2254 src = fetchurl {
2254 src = fetchurl {
2255 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2255 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2256 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2256 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2257 };
2257 };
2258 meta = {
2258 meta = {
2259 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2259 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2260 };
2260 };
2261 };
2261 };
2262 "tzlocal" = super.buildPythonPackage {
2262 "tzlocal" = super.buildPythonPackage {
2263 name = "tzlocal-1.5.1";
2263 name = "tzlocal-1.5.1";
2264 doCheck = false;
2264 doCheck = false;
2265 propagatedBuildInputs = [
2265 propagatedBuildInputs = [
2266 self."pytz"
2266 self."pytz"
2267 ];
2267 ];
2268 src = fetchurl {
2268 src = fetchurl {
2269 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2269 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2270 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2270 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2271 };
2271 };
2272 meta = {
2272 meta = {
2273 license = [ pkgs.lib.licenses.mit ];
2273 license = [ pkgs.lib.licenses.mit ];
2274 };
2274 };
2275 };
2275 };
2276 "urllib3" = super.buildPythonPackage {
2276 "urllib3" = super.buildPythonPackage {
2277 name = "urllib3-1.25.2";
2277 name = "urllib3-1.25.2";
2278 doCheck = false;
2278 doCheck = false;
2279 src = fetchurl {
2279 src = fetchurl {
2280 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2280 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2281 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2281 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2282 };
2282 };
2283 meta = {
2283 meta = {
2284 license = [ pkgs.lib.licenses.mit ];
2284 license = [ pkgs.lib.licenses.mit ];
2285 };
2285 };
2286 };
2286 };
2287 "urlobject" = super.buildPythonPackage {
2287 "urlobject" = super.buildPythonPackage {
2288 name = "urlobject-2.4.3";
2288 name = "urlobject-2.4.3";
2289 doCheck = false;
2289 doCheck = false;
2290 src = fetchurl {
2290 src = fetchurl {
2291 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2291 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2292 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2292 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2293 };
2293 };
2294 meta = {
2294 meta = {
2295 license = [ pkgs.lib.licenses.publicDomain ];
2295 license = [ pkgs.lib.licenses.publicDomain ];
2296 };
2296 };
2297 };
2297 };
2298 "venusian" = super.buildPythonPackage {
2298 "venusian" = super.buildPythonPackage {
2299 name = "venusian-1.2.0";
2299 name = "venusian-1.2.0";
2300 doCheck = false;
2300 doCheck = false;
2301 src = fetchurl {
2301 src = fetchurl {
2302 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2302 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2303 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2303 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2304 };
2304 };
2305 meta = {
2305 meta = {
2306 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2306 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2307 };
2307 };
2308 };
2308 };
2309 "vine" = super.buildPythonPackage {
2309 "vine" = super.buildPythonPackage {
2310 name = "vine-1.3.0";
2310 name = "vine-1.3.0";
2311 doCheck = false;
2311 doCheck = false;
2312 src = fetchurl {
2312 src = fetchurl {
2313 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2313 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2314 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2314 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2315 };
2315 };
2316 meta = {
2316 meta = {
2317 license = [ pkgs.lib.licenses.bsdOriginal ];
2317 license = [ pkgs.lib.licenses.bsdOriginal ];
2318 };
2318 };
2319 };
2319 };
2320 "waitress" = super.buildPythonPackage {
2320 "waitress" = super.buildPythonPackage {
2321 name = "waitress-1.3.1";
2321 name = "waitress-1.3.1";
2322 doCheck = false;
2322 doCheck = false;
2323 src = fetchurl {
2323 src = fetchurl {
2324 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2324 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2325 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2325 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2326 };
2326 };
2327 meta = {
2327 meta = {
2328 license = [ pkgs.lib.licenses.zpl21 ];
2328 license = [ pkgs.lib.licenses.zpl21 ];
2329 };
2329 };
2330 };
2330 };
2331 "wcwidth" = super.buildPythonPackage {
2331 "wcwidth" = super.buildPythonPackage {
2332 name = "wcwidth-0.1.9";
2332 name = "wcwidth-0.1.9";
2333 doCheck = false;
2333 doCheck = false;
2334 src = fetchurl {
2334 src = fetchurl {
2335 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2335 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2336 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2336 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2337 };
2337 };
2338 meta = {
2338 meta = {
2339 license = [ pkgs.lib.licenses.mit ];
2339 license = [ pkgs.lib.licenses.mit ];
2340 };
2340 };
2341 };
2341 };
2342 "webencodings" = super.buildPythonPackage {
2342 "webencodings" = super.buildPythonPackage {
2343 name = "webencodings-0.5.1";
2343 name = "webencodings-0.5.1";
2344 doCheck = false;
2344 doCheck = false;
2345 src = fetchurl {
2345 src = fetchurl {
2346 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2346 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2347 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2347 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2348 };
2348 };
2349 meta = {
2349 meta = {
2350 license = [ pkgs.lib.licenses.bsdOriginal ];
2350 license = [ pkgs.lib.licenses.bsdOriginal ];
2351 };
2351 };
2352 };
2352 };
2353 "weberror" = super.buildPythonPackage {
2353 "weberror" = super.buildPythonPackage {
2354 name = "weberror-0.13.1";
2354 name = "weberror-0.13.1";
2355 doCheck = false;
2355 doCheck = false;
2356 propagatedBuildInputs = [
2356 propagatedBuildInputs = [
2357 self."webob"
2357 self."webob"
2358 self."tempita"
2358 self."tempita"
2359 self."pygments"
2359 self."pygments"
2360 self."paste"
2360 self."paste"
2361 ];
2361 ];
2362 src = fetchurl {
2362 src = fetchurl {
2363 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2363 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2364 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2364 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2365 };
2365 };
2366 meta = {
2366 meta = {
2367 license = [ pkgs.lib.licenses.mit ];
2367 license = [ pkgs.lib.licenses.mit ];
2368 };
2368 };
2369 };
2369 };
2370 "webhelpers2" = super.buildPythonPackage {
2370 "webhelpers2" = super.buildPythonPackage {
2371 name = "webhelpers2-2.0";
2371 name = "webhelpers2-2.0";
2372 doCheck = false;
2372 doCheck = false;
2373 propagatedBuildInputs = [
2373 propagatedBuildInputs = [
2374 self."markupsafe"
2374 self."markupsafe"
2375 self."six"
2375 self."six"
2376 ];
2376 ];
2377 src = fetchurl {
2377 src = fetchurl {
2378 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2378 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2379 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2379 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2380 };
2380 };
2381 meta = {
2381 meta = {
2382 license = [ pkgs.lib.licenses.mit ];
2382 license = [ pkgs.lib.licenses.mit ];
2383 };
2383 };
2384 };
2384 };
2385 "webob" = super.buildPythonPackage {
2385 "webob" = super.buildPythonPackage {
2386 name = "webob-1.8.5";
2386 name = "webob-1.8.5";
2387 doCheck = false;
2387 doCheck = false;
2388 src = fetchurl {
2388 src = fetchurl {
2389 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2389 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2390 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2390 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2391 };
2391 };
2392 meta = {
2392 meta = {
2393 license = [ pkgs.lib.licenses.mit ];
2393 license = [ pkgs.lib.licenses.mit ];
2394 };
2394 };
2395 };
2395 };
2396 "webtest" = super.buildPythonPackage {
2396 "webtest" = super.buildPythonPackage {
2397 name = "webtest-2.0.34";
2397 name = "webtest-2.0.34";
2398 doCheck = false;
2398 doCheck = false;
2399 propagatedBuildInputs = [
2399 propagatedBuildInputs = [
2400 self."six"
2400 self."six"
2401 self."webob"
2401 self."webob"
2402 self."waitress"
2402 self."waitress"
2403 self."beautifulsoup4"
2403 self."beautifulsoup4"
2404 ];
2404 ];
2405 src = fetchurl {
2405 src = fetchurl {
2406 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2406 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2407 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2407 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2408 };
2408 };
2409 meta = {
2409 meta = {
2410 license = [ pkgs.lib.licenses.mit ];
2410 license = [ pkgs.lib.licenses.mit ];
2411 };
2411 };
2412 };
2412 };
2413 "whoosh" = super.buildPythonPackage {
2413 "whoosh" = super.buildPythonPackage {
2414 name = "whoosh-2.7.4";
2414 name = "whoosh-2.7.4";
2415 doCheck = false;
2415 doCheck = false;
2416 src = fetchurl {
2416 src = fetchurl {
2417 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2417 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2418 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2418 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2419 };
2419 };
2420 meta = {
2420 meta = {
2421 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2421 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2422 };
2422 };
2423 };
2423 };
2424 "ws4py" = super.buildPythonPackage {
2424 "ws4py" = super.buildPythonPackage {
2425 name = "ws4py-0.5.1";
2425 name = "ws4py-0.5.1";
2426 doCheck = false;
2426 doCheck = false;
2427 src = fetchurl {
2427 src = fetchurl {
2428 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2428 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2429 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2429 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2430 };
2430 };
2431 meta = {
2431 meta = {
2432 license = [ pkgs.lib.licenses.bsdOriginal ];
2432 license = [ pkgs.lib.licenses.bsdOriginal ];
2433 };
2433 };
2434 };
2434 };
2435 "wsgiref" = super.buildPythonPackage {
2435 "wsgiref" = super.buildPythonPackage {
2436 name = "wsgiref-0.1.2";
2436 name = "wsgiref-0.1.2";
2437 doCheck = false;
2437 doCheck = false;
2438 src = fetchurl {
2438 src = fetchurl {
2439 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2439 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2440 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2440 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2441 };
2441 };
2442 meta = {
2442 meta = {
2443 license = [ { fullName = "PSF or ZPL"; } ];
2443 license = [ { fullName = "PSF or ZPL"; } ];
2444 };
2444 };
2445 };
2445 };
2446 "zipp" = super.buildPythonPackage {
2446 "zipp" = super.buildPythonPackage {
2447 name = "zipp-1.2.0";
2447 name = "zipp-1.2.0";
2448 doCheck = false;
2448 doCheck = false;
2449 propagatedBuildInputs = [
2449 propagatedBuildInputs = [
2450 self."contextlib2"
2450 self."contextlib2"
2451 ];
2451 ];
2452 src = fetchurl {
2452 src = fetchurl {
2453 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2453 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2454 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2454 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2455 };
2455 };
2456 meta = {
2456 meta = {
2457 license = [ pkgs.lib.licenses.mit ];
2457 license = [ pkgs.lib.licenses.mit ];
2458 };
2458 };
2459 };
2459 };
2460 "zope.cachedescriptors" = super.buildPythonPackage {
2460 "zope.cachedescriptors" = super.buildPythonPackage {
2461 name = "zope.cachedescriptors-4.3.1";
2461 name = "zope.cachedescriptors-4.3.1";
2462 doCheck = false;
2462 doCheck = false;
2463 propagatedBuildInputs = [
2463 propagatedBuildInputs = [
2464 self."setuptools"
2464 self."setuptools"
2465 ];
2465 ];
2466 src = fetchurl {
2466 src = fetchurl {
2467 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2467 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2468 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2468 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2469 };
2469 };
2470 meta = {
2470 meta = {
2471 license = [ pkgs.lib.licenses.zpl21 ];
2471 license = [ pkgs.lib.licenses.zpl21 ];
2472 };
2472 };
2473 };
2473 };
2474 "zope.deprecation" = super.buildPythonPackage {
2474 "zope.deprecation" = super.buildPythonPackage {
2475 name = "zope.deprecation-4.4.0";
2475 name = "zope.deprecation-4.4.0";
2476 doCheck = false;
2476 doCheck = false;
2477 propagatedBuildInputs = [
2477 propagatedBuildInputs = [
2478 self."setuptools"
2478 self."setuptools"
2479 ];
2479 ];
2480 src = fetchurl {
2480 src = fetchurl {
2481 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2481 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2482 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2482 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2483 };
2483 };
2484 meta = {
2484 meta = {
2485 license = [ pkgs.lib.licenses.zpl21 ];
2485 license = [ pkgs.lib.licenses.zpl21 ];
2486 };
2486 };
2487 };
2487 };
2488 "zope.event" = super.buildPythonPackage {
2488 "zope.event" = super.buildPythonPackage {
2489 name = "zope.event-4.4";
2489 name = "zope.event-4.4";
2490 doCheck = false;
2490 doCheck = false;
2491 propagatedBuildInputs = [
2491 propagatedBuildInputs = [
2492 self."setuptools"
2492 self."setuptools"
2493 ];
2493 ];
2494 src = fetchurl {
2494 src = fetchurl {
2495 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2495 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2496 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2496 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2497 };
2497 };
2498 meta = {
2498 meta = {
2499 license = [ pkgs.lib.licenses.zpl21 ];
2499 license = [ pkgs.lib.licenses.zpl21 ];
2500 };
2500 };
2501 };
2501 };
2502 "zope.interface" = super.buildPythonPackage {
2502 "zope.interface" = super.buildPythonPackage {
2503 name = "zope.interface-4.6.0";
2503 name = "zope.interface-4.6.0";
2504 doCheck = false;
2504 doCheck = false;
2505 propagatedBuildInputs = [
2505 propagatedBuildInputs = [
2506 self."setuptools"
2506 self."setuptools"
2507 ];
2507 ];
2508 src = fetchurl {
2508 src = fetchurl {
2509 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2509 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2510 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2510 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2511 };
2511 };
2512 meta = {
2512 meta = {
2513 license = [ pkgs.lib.licenses.zpl21 ];
2513 license = [ pkgs.lib.licenses.zpl21 ];
2514 };
2514 };
2515 };
2515 };
2516
2516
2517 ### Test requirements
2517 ### Test requirements
2518
2518
2519
2519
2520 }
2520 }
@@ -1,1 +1,1 b''
1 4.25.2 No newline at end of file
1 4.26.0 No newline at end of file
@@ -1,55 +1,56 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 from rhodecode import events
23 from rhodecode import events
24 from rhodecode.lib import rc_cache
24 from rhodecode.lib import rc_cache
25
25
26 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
27
27
28 # names of namespaces used for different permission related cached
28 # names of namespaces used for different permission related cached
29 # during flush operation we need to take care of all those
29 # during flush operation we need to take care of all those
30 cache_namespaces = [
30 cache_namespaces = [
31 'cache_user_auth.{}',
31 'cache_user_auth.{}',
32 'cache_user_repo_acl_ids.{}',
32 'cache_user_repo_acl_ids.{}',
33 'cache_user_user_group_acl_ids.{}',
33 'cache_user_user_group_acl_ids.{}',
34 'cache_user_repo_group_acl_ids.{}'
34 'cache_user_repo_group_acl_ids.{}'
35 ]
35 ]
36
36
37
37
38 def trigger_user_permission_flush(event):
38 def trigger_user_permission_flush(event):
39 """
39 """
40 Subscriber to the `UserPermissionsChange`. This triggers the
40 Subscriber to the `UserPermissionsChange`. This triggers the
41 automatic flush of permission caches, so the users affected receive new permissions
41 automatic flush of permission caches, so the users affected receive new permissions
42 Right Away
42 Right Away
43 """
43 """
44
44 invalidate = True
45 affected_user_ids = set(event.user_ids)
45 affected_user_ids = set(event.user_ids)
46 for user_id in affected_user_ids:
46 for user_id in affected_user_ids:
47 for cache_namespace_uid_tmpl in cache_namespaces:
47 for cache_namespace_uid_tmpl in cache_namespaces:
48 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
48 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
49 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
49 del_keys = rc_cache.clear_cache_namespace(
50 log.debug('Deleted %s cache keys for user_id: %s and namespace %s',
50 'cache_perms', cache_namespace_uid, invalidate=invalidate)
51 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
51 del_keys, user_id, cache_namespace_uid)
52 del_keys, user_id, cache_namespace_uid)
52
53
53
54
54 def includeme(config):
55 def includeme(config):
55 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
56 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
@@ -1,254 +1,253 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25
25
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27
27
28 from pyramid.response import Response
28 from pyramid.response import Response
29 from pyramid.renderers import render
29 from pyramid.renderers import render
30
30
31 from rhodecode import events
31 from rhodecode import events
32 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 from rhodecode.apps._base import BaseAppView, DataGridAppView
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
34 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
35 from rhodecode.lib import helpers as h, audit_logger
35 from rhodecode.lib import helpers as h, audit_logger
36 from rhodecode.lib.utils2 import safe_unicode
36 from rhodecode.lib.utils2 import safe_unicode
37
37
38 from rhodecode.model.forms import UserGroupForm
38 from rhodecode.model.forms import UserGroupForm
39 from rhodecode.model.permission import PermissionModel
39 from rhodecode.model.permission import PermissionModel
40 from rhodecode.model.scm import UserGroupList
40 from rhodecode.model.scm import UserGroupList
41 from rhodecode.model.db import (
41 from rhodecode.model.db import (
42 or_, count, User, UserGroup, UserGroupMember, in_filter_generator)
42 or_, count, User, UserGroup, UserGroupMember, in_filter_generator)
43 from rhodecode.model.meta import Session
43 from rhodecode.model.meta import Session
44 from rhodecode.model.user_group import UserGroupModel
44 from rhodecode.model.user_group import UserGroupModel
45 from rhodecode.model.db import true
45 from rhodecode.model.db import true
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class AdminUserGroupsView(BaseAppView, DataGridAppView):
50 class AdminUserGroupsView(BaseAppView, DataGridAppView):
51
51
52 def load_default_context(self):
52 def load_default_context(self):
53 c = self._get_local_tmpl_context()
53 c = self._get_local_tmpl_context()
54 PermissionModel().set_global_permission_choices(
54 PermissionModel().set_global_permission_choices(
55 c, gettext_translator=self.request.translate)
55 c, gettext_translator=self.request.translate)
56 return c
56 return c
57
57
58 # permission check in data loading of
58 # permission check in data loading of
59 # `user_groups_list_data` via UserGroupList
59 # `user_groups_list_data` via UserGroupList
60 @LoginRequired()
60 @LoginRequired()
61 @NotAnonymous()
61 @NotAnonymous()
62 def user_groups_list(self):
62 def user_groups_list(self):
63 c = self.load_default_context()
63 c = self.load_default_context()
64 return self._get_template_context(c)
64 return self._get_template_context(c)
65
65
66 # permission check inside
66 # permission check inside
67 @LoginRequired()
67 @LoginRequired()
68 @NotAnonymous()
68 @NotAnonymous()
69 def user_groups_list_data(self):
69 def user_groups_list_data(self):
70 self.load_default_context()
70 self.load_default_context()
71 column_map = {
71 column_map = {
72 'active': 'users_group_active',
72 'active': 'users_group_active',
73 'description': 'user_group_description',
73 'description': 'user_group_description',
74 'members': 'members_total',
74 'members': 'members_total',
75 'owner': 'user_username',
75 'owner': 'user_username',
76 'sync': 'group_data'
76 'sync': 'group_data'
77 }
77 }
78 draw, start, limit = self._extract_chunk(self.request)
78 draw, start, limit = self._extract_chunk(self.request)
79 search_q, order_by, order_dir = self._extract_ordering(
79 search_q, order_by, order_dir = self._extract_ordering(
80 self.request, column_map=column_map)
80 self.request, column_map=column_map)
81
81
82 _render = self.request.get_partial_renderer(
82 _render = self.request.get_partial_renderer(
83 'rhodecode:templates/data_table/_dt_elements.mako')
83 'rhodecode:templates/data_table/_dt_elements.mako')
84
84
85 def user_group_name(user_group_name):
85 def user_group_name(user_group_name):
86 return _render("user_group_name", user_group_name)
86 return _render("user_group_name", user_group_name)
87
87
88 def user_group_actions(user_group_id, user_group_name):
88 def user_group_actions(user_group_id, user_group_name):
89 return _render("user_group_actions", user_group_id, user_group_name)
89 return _render("user_group_actions", user_group_id, user_group_name)
90
90
91 def user_profile(username):
91 def user_profile(username):
92 return _render('user_profile', username)
92 return _render('user_profile', username)
93
93
94 _perms = ['usergroup.admin']
94 _perms = ['usergroup.admin']
95 allowed_ids = [-1] + self._rhodecode_user.user_group_acl_ids_from_stack(_perms)
95 allowed_ids = [-1] + self._rhodecode_user.user_group_acl_ids_from_stack(_perms)
96
96
97 user_groups_data_total_count = UserGroup.query()\
97 user_groups_data_total_count = UserGroup.query()\
98 .filter(or_(
98 .filter(or_(
99 # generate multiple IN to fix limitation problems
99 # generate multiple IN to fix limitation problems
100 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
100 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
101 ))\
101 ))\
102 .count()
102 .count()
103
103
104 user_groups_data_total_inactive_count = UserGroup.query()\
104 user_groups_data_total_inactive_count = UserGroup.query()\
105 .filter(or_(
105 .filter(or_(
106 # generate multiple IN to fix limitation problems
106 # generate multiple IN to fix limitation problems
107 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
107 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
108 ))\
108 ))\
109 .filter(UserGroup.users_group_active != true()).count()
109 .filter(UserGroup.users_group_active != true()).count()
110
110
111 member_count = count(UserGroupMember.user_id)
111 member_count = count(UserGroupMember.user_id)
112 base_q = Session.query(
112 base_q = Session.query(
113 UserGroup.users_group_name,
113 UserGroup.users_group_name,
114 UserGroup.user_group_description,
114 UserGroup.user_group_description,
115 UserGroup.users_group_active,
115 UserGroup.users_group_active,
116 UserGroup.users_group_id,
116 UserGroup.users_group_id,
117 UserGroup.group_data,
117 UserGroup.group_data,
118 User,
118 User,
119 member_count.label('member_count')
119 member_count.label('member_count')
120 ) \
120 ) \
121 .filter(or_(
121 .filter(or_(
122 # generate multiple IN to fix limitation problems
122 # generate multiple IN to fix limitation problems
123 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
123 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
124 )) \
124 )) \
125 .outerjoin(UserGroupMember, UserGroupMember.users_group_id == UserGroup.users_group_id) \
125 .outerjoin(UserGroupMember, UserGroupMember.users_group_id == UserGroup.users_group_id) \
126 .join(User, User.user_id == UserGroup.user_id) \
126 .join(User, User.user_id == UserGroup.user_id) \
127 .group_by(UserGroup, User)
127 .group_by(UserGroup, User)
128
128
129 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
129 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
130
130
131 if search_q:
131 if search_q:
132 like_expression = u'%{}%'.format(safe_unicode(search_q))
132 like_expression = u'%{}%'.format(safe_unicode(search_q))
133 base_q = base_q.filter(or_(
133 base_q = base_q.filter(or_(
134 UserGroup.users_group_name.ilike(like_expression),
134 UserGroup.users_group_name.ilike(like_expression),
135 ))
135 ))
136 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
136 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
137
137
138 user_groups_data_total_filtered_count = base_q.count()
138 user_groups_data_total_filtered_count = base_q.count()
139 user_groups_data_total_filtered_inactive_count = base_q_inactive.count()
139 user_groups_data_total_filtered_inactive_count = base_q_inactive.count()
140
140
141 sort_defined = False
141 sort_defined = False
142 if order_by == 'members_total':
142 if order_by == 'members_total':
143 sort_col = member_count
143 sort_col = member_count
144 sort_defined = True
144 sort_defined = True
145 elif order_by == 'user_username':
145 elif order_by == 'user_username':
146 sort_col = User.username
146 sort_col = User.username
147 else:
147 else:
148 sort_col = getattr(UserGroup, order_by, None)
148 sort_col = getattr(UserGroup, order_by, None)
149
149
150 if sort_defined or sort_col:
150 if sort_defined or sort_col:
151 if order_dir == 'asc':
151 if order_dir == 'asc':
152 sort_col = sort_col.asc()
152 sort_col = sort_col.asc()
153 else:
153 else:
154 sort_col = sort_col.desc()
154 sort_col = sort_col.desc()
155
155
156 base_q = base_q.order_by(sort_col)
156 base_q = base_q.order_by(sort_col)
157 base_q = base_q.offset(start).limit(limit)
157 base_q = base_q.offset(start).limit(limit)
158
158
159 # authenticated access to user groups
159 # authenticated access to user groups
160 auth_user_group_list = base_q.all()
160 auth_user_group_list = base_q.all()
161
161
162 user_groups_data = []
162 user_groups_data = []
163 for user_gr in auth_user_group_list:
163 for user_gr in auth_user_group_list:
164 row = {
164 row = {
165 "users_group_name": user_group_name(user_gr.users_group_name),
165 "users_group_name": user_group_name(user_gr.users_group_name),
166 "description": h.escape(user_gr.user_group_description),
166 "description": h.escape(user_gr.user_group_description),
167 "members": user_gr.member_count,
167 "members": user_gr.member_count,
168 # NOTE(marcink): because of advanced query we
168 # NOTE(marcink): because of advanced query we
169 # need to load it like that
169 # need to load it like that
170 "sync": UserGroup._load_sync(
170 "sync": UserGroup._load_sync(
171 UserGroup._load_group_data(user_gr.group_data)),
171 UserGroup._load_group_data(user_gr.group_data)),
172 "active": h.bool2icon(user_gr.users_group_active),
172 "active": h.bool2icon(user_gr.users_group_active),
173 "owner": user_profile(user_gr.User.username),
173 "owner": user_profile(user_gr.User.username),
174 "action": user_group_actions(
174 "action": user_group_actions(
175 user_gr.users_group_id, user_gr.users_group_name)
175 user_gr.users_group_id, user_gr.users_group_name)
176 }
176 }
177 user_groups_data.append(row)
177 user_groups_data.append(row)
178
178
179 data = ({
179 data = ({
180 'draw': draw,
180 'draw': draw,
181 'data': user_groups_data,
181 'data': user_groups_data,
182 'recordsTotal': user_groups_data_total_count,
182 'recordsTotal': user_groups_data_total_count,
183 'recordsTotalInactive': user_groups_data_total_inactive_count,
183 'recordsTotalInactive': user_groups_data_total_inactive_count,
184 'recordsFiltered': user_groups_data_total_filtered_count,
184 'recordsFiltered': user_groups_data_total_filtered_count,
185 'recordsFilteredInactive': user_groups_data_total_filtered_inactive_count,
185 'recordsFilteredInactive': user_groups_data_total_filtered_inactive_count,
186 })
186 })
187
187
188 return data
188 return data
189
189
190 @LoginRequired()
190 @LoginRequired()
191 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
191 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
192 def user_groups_new(self):
192 def user_groups_new(self):
193 c = self.load_default_context()
193 c = self.load_default_context()
194 return self._get_template_context(c)
194 return self._get_template_context(c)
195
195
196 @LoginRequired()
196 @LoginRequired()
197 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
197 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
198 @CSRFRequired()
198 @CSRFRequired()
199 def user_groups_create(self):
199 def user_groups_create(self):
200 _ = self.request.translate
200 _ = self.request.translate
201 c = self.load_default_context()
201 c = self.load_default_context()
202 users_group_form = UserGroupForm(self.request.translate)()
202 users_group_form = UserGroupForm(self.request.translate)()
203
203
204 user_group_name = self.request.POST.get('users_group_name')
204 user_group_name = self.request.POST.get('users_group_name')
205 try:
205 try:
206 form_result = users_group_form.to_python(dict(self.request.POST))
206 form_result = users_group_form.to_python(dict(self.request.POST))
207 user_group = UserGroupModel().create(
207 user_group = UserGroupModel().create(
208 name=form_result['users_group_name'],
208 name=form_result['users_group_name'],
209 description=form_result['user_group_description'],
209 description=form_result['user_group_description'],
210 owner=self._rhodecode_user.user_id,
210 owner=self._rhodecode_user.user_id,
211 active=form_result['users_group_active'])
211 active=form_result['users_group_active'])
212 Session().flush()
212 Session().flush()
213 creation_data = user_group.get_api_data()
213 creation_data = user_group.get_api_data()
214 user_group_name = form_result['users_group_name']
214 user_group_name = form_result['users_group_name']
215
215
216 audit_logger.store_web(
216 audit_logger.store_web(
217 'user_group.create', action_data={'data': creation_data},
217 'user_group.create', action_data={'data': creation_data},
218 user=self._rhodecode_user)
218 user=self._rhodecode_user)
219
219
220 user_group_link = h.link_to(
220 user_group_link = h.link_to(
221 h.escape(user_group_name),
221 h.escape(user_group_name),
222 h.route_path(
222 h.route_path(
223 'edit_user_group', user_group_id=user_group.users_group_id))
223 'edit_user_group', user_group_id=user_group.users_group_id))
224 h.flash(h.literal(_('Created user group %(user_group_link)s')
224 h.flash(h.literal(_('Created user group %(user_group_link)s')
225 % {'user_group_link': user_group_link}),
225 % {'user_group_link': user_group_link}),
226 category='success')
226 category='success')
227 Session().commit()
227 Session().commit()
228 user_group_id = user_group.users_group_id
228 user_group_id = user_group.users_group_id
229 except formencode.Invalid as errors:
229 except formencode.Invalid as errors:
230
230
231 data = render(
231 data = render(
232 'rhodecode:templates/admin/user_groups/user_group_add.mako',
232 'rhodecode:templates/admin/user_groups/user_group_add.mako',
233 self._get_template_context(c), self.request)
233 self._get_template_context(c), self.request)
234 html = formencode.htmlfill.render(
234 html = formencode.htmlfill.render(
235 data,
235 data,
236 defaults=errors.value,
236 defaults=errors.value,
237 errors=errors.error_dict or {},
237 errors=errors.error_dict or {},
238 prefix_error=False,
238 prefix_error=False,
239 encoding="UTF-8",
239 encoding="UTF-8",
240 force_defaults=False
240 force_defaults=False
241 )
241 )
242 return Response(html)
242 return Response(html)
243
243
244 except Exception:
244 except Exception:
245 log.exception("Exception creating user group")
245 log.exception("Exception creating user group")
246 h.flash(_('Error occurred during creation of user group %s') \
246 h.flash(_('Error occurred during creation of user group %s') \
247 % user_group_name, category='error')
247 % user_group_name, category='error')
248 raise HTTPFound(h.route_path('user_groups_new'))
248 raise HTTPFound(h.route_path('user_groups_new'))
249
249
250 affected_user_ids = [self._rhodecode_user.user_id]
250 PermissionModel().trigger_permission_flush()
251 PermissionModel().trigger_permission_flush(affected_user_ids)
252
251
253 raise HTTPFound(
252 raise HTTPFound(
254 h.route_path('edit_user_group', user_group_id=user_group_id))
253 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,1227 +1,1227 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 from rhodecode.apps._base import add_route_with_slash
20 from rhodecode.apps._base import add_route_with_slash
21
21
22
22
23 def includeme(config):
23 def includeme(config):
24 from rhodecode.apps.repository.views.repo_artifacts import RepoArtifactsView
24 from rhodecode.apps.repository.views.repo_artifacts import RepoArtifactsView
25 from rhodecode.apps.repository.views.repo_audit_logs import AuditLogsView
25 from rhodecode.apps.repository.views.repo_audit_logs import AuditLogsView
26 from rhodecode.apps.repository.views.repo_automation import RepoAutomationView
26 from rhodecode.apps.repository.views.repo_automation import RepoAutomationView
27 from rhodecode.apps.repository.views.repo_bookmarks import RepoBookmarksView
27 from rhodecode.apps.repository.views.repo_bookmarks import RepoBookmarksView
28 from rhodecode.apps.repository.views.repo_branch_permissions import RepoSettingsBranchPermissionsView
28 from rhodecode.apps.repository.views.repo_branch_permissions import RepoSettingsBranchPermissionsView
29 from rhodecode.apps.repository.views.repo_branches import RepoBranchesView
29 from rhodecode.apps.repository.views.repo_branches import RepoBranchesView
30 from rhodecode.apps.repository.views.repo_caches import RepoCachesView
30 from rhodecode.apps.repository.views.repo_caches import RepoCachesView
31 from rhodecode.apps.repository.views.repo_changelog import RepoChangelogView
31 from rhodecode.apps.repository.views.repo_changelog import RepoChangelogView
32 from rhodecode.apps.repository.views.repo_checks import RepoChecksView
32 from rhodecode.apps.repository.views.repo_checks import RepoChecksView
33 from rhodecode.apps.repository.views.repo_commits import RepoCommitsView
33 from rhodecode.apps.repository.views.repo_commits import RepoCommitsView
34 from rhodecode.apps.repository.views.repo_compare import RepoCompareView
34 from rhodecode.apps.repository.views.repo_compare import RepoCompareView
35 from rhodecode.apps.repository.views.repo_feed import RepoFeedView
35 from rhodecode.apps.repository.views.repo_feed import RepoFeedView
36 from rhodecode.apps.repository.views.repo_files import RepoFilesView
36 from rhodecode.apps.repository.views.repo_files import RepoFilesView
37 from rhodecode.apps.repository.views.repo_forks import RepoForksView
37 from rhodecode.apps.repository.views.repo_forks import RepoForksView
38 from rhodecode.apps.repository.views.repo_maintainance import RepoMaintenanceView
38 from rhodecode.apps.repository.views.repo_maintainance import RepoMaintenanceView
39 from rhodecode.apps.repository.views.repo_permissions import RepoSettingsPermissionsView
39 from rhodecode.apps.repository.views.repo_permissions import RepoSettingsPermissionsView
40 from rhodecode.apps.repository.views.repo_pull_requests import RepoPullRequestsView
40 from rhodecode.apps.repository.views.repo_pull_requests import RepoPullRequestsView
41 from rhodecode.apps.repository.views.repo_review_rules import RepoReviewRulesView
41 from rhodecode.apps.repository.views.repo_review_rules import RepoReviewRulesView
42 from rhodecode.apps.repository.views.repo_settings import RepoSettingsView
42 from rhodecode.apps.repository.views.repo_settings import RepoSettingsView
43 from rhodecode.apps.repository.views.repo_settings_advanced import RepoSettingsAdvancedView
43 from rhodecode.apps.repository.views.repo_settings_advanced import RepoSettingsAdvancedView
44 from rhodecode.apps.repository.views.repo_settings_fields import RepoSettingsFieldsView
44 from rhodecode.apps.repository.views.repo_settings_fields import RepoSettingsFieldsView
45 from rhodecode.apps.repository.views.repo_settings_issue_trackers import RepoSettingsIssueTrackersView
45 from rhodecode.apps.repository.views.repo_settings_issue_trackers import RepoSettingsIssueTrackersView
46 from rhodecode.apps.repository.views.repo_settings_remote import RepoSettingsRemoteView
46 from rhodecode.apps.repository.views.repo_settings_remote import RepoSettingsRemoteView
47 from rhodecode.apps.repository.views.repo_settings_vcs import RepoSettingsVcsView
47 from rhodecode.apps.repository.views.repo_settings_vcs import RepoSettingsVcsView
48 from rhodecode.apps.repository.views.repo_strip import RepoStripView
48 from rhodecode.apps.repository.views.repo_strip import RepoStripView
49 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
49 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
50 from rhodecode.apps.repository.views.repo_tags import RepoTagsView
50 from rhodecode.apps.repository.views.repo_tags import RepoTagsView
51
51
52 # repo creating checks, special cases that aren't repo routes
52 # repo creating checks, special cases that aren't repo routes
53 config.add_route(
53 config.add_route(
54 name='repo_creating',
54 name='repo_creating',
55 pattern='/{repo_name:.*?[^/]}/repo_creating')
55 pattern='/{repo_name:.*?[^/]}/repo_creating')
56 config.add_view(
56 config.add_view(
57 RepoChecksView,
57 RepoChecksView,
58 attr='repo_creating',
58 attr='repo_creating',
59 route_name='repo_creating', request_method='GET',
59 route_name='repo_creating', request_method='GET',
60 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
60 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
61
61
62 config.add_route(
62 config.add_route(
63 name='repo_creating_check',
63 name='repo_creating_check',
64 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
64 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
65 config.add_view(
65 config.add_view(
66 RepoChecksView,
66 RepoChecksView,
67 attr='repo_creating_check',
67 attr='repo_creating_check',
68 route_name='repo_creating_check', request_method='GET',
68 route_name='repo_creating_check', request_method='GET',
69 renderer='json_ext')
69 renderer='json_ext')
70
70
71 # Summary
71 # Summary
72 # NOTE(marcink): one additional route is defined in very bottom, catch
72 # NOTE(marcink): one additional route is defined in very bottom, catch
73 # all pattern
73 # all pattern
74 config.add_route(
74 config.add_route(
75 name='repo_summary_explicit',
75 name='repo_summary_explicit',
76 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
76 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
77 config.add_view(
77 config.add_view(
78 RepoSummaryView,
78 RepoSummaryView,
79 attr='summary',
79 attr='summary',
80 route_name='repo_summary_explicit', request_method='GET',
80 route_name='repo_summary_explicit', request_method='GET',
81 renderer='rhodecode:templates/summary/summary.mako')
81 renderer='rhodecode:templates/summary/summary.mako')
82
82
83 config.add_route(
83 config.add_route(
84 name='repo_summary_commits',
84 name='repo_summary_commits',
85 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
85 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
86 config.add_view(
86 config.add_view(
87 RepoSummaryView,
87 RepoSummaryView,
88 attr='summary_commits',
88 attr='summary_commits',
89 route_name='repo_summary_commits', request_method='GET',
89 route_name='repo_summary_commits', request_method='GET',
90 renderer='rhodecode:templates/summary/summary_commits.mako')
90 renderer='rhodecode:templates/summary/summary_commits.mako')
91
91
92 # Commits
92 # Commits
93 config.add_route(
93 config.add_route(
94 name='repo_commit',
94 name='repo_commit',
95 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
95 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
96 config.add_view(
96 config.add_view(
97 RepoCommitsView,
97 RepoCommitsView,
98 attr='repo_commit_show',
98 attr='repo_commit_show',
99 route_name='repo_commit', request_method='GET',
99 route_name='repo_commit', request_method='GET',
100 renderer=None)
100 renderer=None)
101
101
102 config.add_route(
102 config.add_route(
103 name='repo_commit_children',
103 name='repo_commit_children',
104 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
104 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
105 config.add_view(
105 config.add_view(
106 RepoCommitsView,
106 RepoCommitsView,
107 attr='repo_commit_children',
107 attr='repo_commit_children',
108 route_name='repo_commit_children', request_method='GET',
108 route_name='repo_commit_children', request_method='GET',
109 renderer='json_ext', xhr=True)
109 renderer='json_ext', xhr=True)
110
110
111 config.add_route(
111 config.add_route(
112 name='repo_commit_parents',
112 name='repo_commit_parents',
113 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
113 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
114 config.add_view(
114 config.add_view(
115 RepoCommitsView,
115 RepoCommitsView,
116 attr='repo_commit_parents',
116 attr='repo_commit_parents',
117 route_name='repo_commit_parents', request_method='GET',
117 route_name='repo_commit_parents', request_method='GET',
118 renderer='json_ext')
118 renderer='json_ext')
119
119
120 config.add_route(
120 config.add_route(
121 name='repo_commit_raw',
121 name='repo_commit_raw',
122 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
122 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
123 config.add_view(
123 config.add_view(
124 RepoCommitsView,
124 RepoCommitsView,
125 attr='repo_commit_raw',
125 attr='repo_commit_raw',
126 route_name='repo_commit_raw', request_method='GET',
126 route_name='repo_commit_raw', request_method='GET',
127 renderer=None)
127 renderer=None)
128
128
129 config.add_route(
129 config.add_route(
130 name='repo_commit_patch',
130 name='repo_commit_patch',
131 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
131 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
132 config.add_view(
132 config.add_view(
133 RepoCommitsView,
133 RepoCommitsView,
134 attr='repo_commit_patch',
134 attr='repo_commit_patch',
135 route_name='repo_commit_patch', request_method='GET',
135 route_name='repo_commit_patch', request_method='GET',
136 renderer=None)
136 renderer=None)
137
137
138 config.add_route(
138 config.add_route(
139 name='repo_commit_download',
139 name='repo_commit_download',
140 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
140 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
141 config.add_view(
141 config.add_view(
142 RepoCommitsView,
142 RepoCommitsView,
143 attr='repo_commit_download',
143 attr='repo_commit_download',
144 route_name='repo_commit_download', request_method='GET',
144 route_name='repo_commit_download', request_method='GET',
145 renderer=None)
145 renderer=None)
146
146
147 config.add_route(
147 config.add_route(
148 name='repo_commit_data',
148 name='repo_commit_data',
149 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
149 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
150 config.add_view(
150 config.add_view(
151 RepoCommitsView,
151 RepoCommitsView,
152 attr='repo_commit_data',
152 attr='repo_commit_data',
153 route_name='repo_commit_data', request_method='GET',
153 route_name='repo_commit_data', request_method='GET',
154 renderer='json_ext', xhr=True)
154 renderer='json_ext', xhr=True)
155
155
156 config.add_route(
156 config.add_route(
157 name='repo_commit_comment_create',
157 name='repo_commit_comment_create',
158 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
158 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
159 config.add_view(
159 config.add_view(
160 RepoCommitsView,
160 RepoCommitsView,
161 attr='repo_commit_comment_create',
161 attr='repo_commit_comment_create',
162 route_name='repo_commit_comment_create', request_method='POST',
162 route_name='repo_commit_comment_create', request_method='POST',
163 renderer='json_ext')
163 renderer='json_ext')
164
164
165 config.add_route(
165 config.add_route(
166 name='repo_commit_comment_preview',
166 name='repo_commit_comment_preview',
167 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
167 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
168 config.add_view(
168 config.add_view(
169 RepoCommitsView,
169 RepoCommitsView,
170 attr='repo_commit_comment_preview',
170 attr='repo_commit_comment_preview',
171 route_name='repo_commit_comment_preview', request_method='POST',
171 route_name='repo_commit_comment_preview', request_method='POST',
172 renderer='string', xhr=True)
172 renderer='string', xhr=True)
173
173
174 config.add_route(
174 config.add_route(
175 name='repo_commit_comment_history_view',
175 name='repo_commit_comment_history_view',
176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True)
176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/history_view/{comment_history_id}', repo_route=True)
177 config.add_view(
177 config.add_view(
178 RepoCommitsView,
178 RepoCommitsView,
179 attr='repo_commit_comment_history_view',
179 attr='repo_commit_comment_history_view',
180 route_name='repo_commit_comment_history_view', request_method='POST',
180 route_name='repo_commit_comment_history_view', request_method='POST',
181 renderer='string', xhr=True)
181 renderer='string', xhr=True)
182
182
183 config.add_route(
183 config.add_route(
184 name='repo_commit_comment_attachment_upload',
184 name='repo_commit_comment_attachment_upload',
185 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
185 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
186 config.add_view(
186 config.add_view(
187 RepoCommitsView,
187 RepoCommitsView,
188 attr='repo_commit_comment_attachment_upload',
188 attr='repo_commit_comment_attachment_upload',
189 route_name='repo_commit_comment_attachment_upload', request_method='POST',
189 route_name='repo_commit_comment_attachment_upload', request_method='POST',
190 renderer='json_ext', xhr=True)
190 renderer='json_ext', xhr=True)
191
191
192 config.add_route(
192 config.add_route(
193 name='repo_commit_comment_delete',
193 name='repo_commit_comment_delete',
194 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
194 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
195 config.add_view(
195 config.add_view(
196 RepoCommitsView,
196 RepoCommitsView,
197 attr='repo_commit_comment_delete',
197 attr='repo_commit_comment_delete',
198 route_name='repo_commit_comment_delete', request_method='POST',
198 route_name='repo_commit_comment_delete', request_method='POST',
199 renderer='json_ext')
199 renderer='json_ext')
200
200
201 config.add_route(
201 config.add_route(
202 name='repo_commit_comment_edit',
202 name='repo_commit_comment_edit',
203 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
203 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
204 config.add_view(
204 config.add_view(
205 RepoCommitsView,
205 RepoCommitsView,
206 attr='repo_commit_comment_edit',
206 attr='repo_commit_comment_edit',
207 route_name='repo_commit_comment_edit', request_method='POST',
207 route_name='repo_commit_comment_edit', request_method='POST',
208 renderer='json_ext')
208 renderer='json_ext')
209
209
210 # still working url for backward compat.
210 # still working url for backward compat.
211 config.add_route(
211 config.add_route(
212 name='repo_commit_raw_deprecated',
212 name='repo_commit_raw_deprecated',
213 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
213 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
214 config.add_view(
214 config.add_view(
215 RepoCommitsView,
215 RepoCommitsView,
216 attr='repo_commit_raw',
216 attr='repo_commit_raw',
217 route_name='repo_commit_raw_deprecated', request_method='GET',
217 route_name='repo_commit_raw_deprecated', request_method='GET',
218 renderer=None)
218 renderer=None)
219
219
220 # Files
220 # Files
221 config.add_route(
221 config.add_route(
222 name='repo_archivefile',
222 name='repo_archivefile',
223 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
223 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
224 config.add_view(
224 config.add_view(
225 RepoFilesView,
225 RepoFilesView,
226 attr='repo_archivefile',
226 attr='repo_archivefile',
227 route_name='repo_archivefile', request_method='GET',
227 route_name='repo_archivefile', request_method='GET',
228 renderer=None)
228 renderer=None)
229
229
230 config.add_route(
230 config.add_route(
231 name='repo_files_diff',
231 name='repo_files_diff',
232 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
232 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
233 config.add_view(
233 config.add_view(
234 RepoFilesView,
234 RepoFilesView,
235 attr='repo_files_diff',
235 attr='repo_files_diff',
236 route_name='repo_files_diff', request_method='GET',
236 route_name='repo_files_diff', request_method='GET',
237 renderer=None)
237 renderer=None)
238
238
239 config.add_route( # legacy route to make old links work
239 config.add_route( # legacy route to make old links work
240 name='repo_files_diff_2way_redirect',
240 name='repo_files_diff_2way_redirect',
241 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
241 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
242 config.add_view(
242 config.add_view(
243 RepoFilesView,
243 RepoFilesView,
244 attr='repo_files_diff_2way_redirect',
244 attr='repo_files_diff_2way_redirect',
245 route_name='repo_files_diff_2way_redirect', request_method='GET',
245 route_name='repo_files_diff_2way_redirect', request_method='GET',
246 renderer=None)
246 renderer=None)
247
247
248 config.add_route(
248 config.add_route(
249 name='repo_files',
249 name='repo_files',
250 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
250 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
251 config.add_view(
251 config.add_view(
252 RepoFilesView,
252 RepoFilesView,
253 attr='repo_files',
253 attr='repo_files',
254 route_name='repo_files', request_method='GET',
254 route_name='repo_files', request_method='GET',
255 renderer=None)
255 renderer=None)
256
256
257 config.add_route(
257 config.add_route(
258 name='repo_files:default_path',
258 name='repo_files:default_path',
259 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
259 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
260 config.add_view(
260 config.add_view(
261 RepoFilesView,
261 RepoFilesView,
262 attr='repo_files',
262 attr='repo_files',
263 route_name='repo_files:default_path', request_method='GET',
263 route_name='repo_files:default_path', request_method='GET',
264 renderer=None)
264 renderer=None)
265
265
266 config.add_route(
266 config.add_route(
267 name='repo_files:default_commit',
267 name='repo_files:default_commit',
268 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
268 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
269 config.add_view(
269 config.add_view(
270 RepoFilesView,
270 RepoFilesView,
271 attr='repo_files',
271 attr='repo_files',
272 route_name='repo_files:default_commit', request_method='GET',
272 route_name='repo_files:default_commit', request_method='GET',
273 renderer=None)
273 renderer=None)
274
274
275 config.add_route(
275 config.add_route(
276 name='repo_files:rendered',
276 name='repo_files:rendered',
277 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
277 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
278 config.add_view(
278 config.add_view(
279 RepoFilesView,
279 RepoFilesView,
280 attr='repo_files',
280 attr='repo_files',
281 route_name='repo_files:rendered', request_method='GET',
281 route_name='repo_files:rendered', request_method='GET',
282 renderer=None)
282 renderer=None)
283
283
284 config.add_route(
284 config.add_route(
285 name='repo_files:annotated',
285 name='repo_files:annotated',
286 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
286 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
287 config.add_view(
287 config.add_view(
288 RepoFilesView,
288 RepoFilesView,
289 attr='repo_files',
289 attr='repo_files',
290 route_name='repo_files:annotated', request_method='GET',
290 route_name='repo_files:annotated', request_method='GET',
291 renderer=None)
291 renderer=None)
292
292
293 config.add_route(
293 config.add_route(
294 name='repo_files:annotated_previous',
294 name='repo_files:annotated_previous',
295 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
295 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
296 config.add_view(
296 config.add_view(
297 RepoFilesView,
297 RepoFilesView,
298 attr='repo_files_annotated_previous',
298 attr='repo_files_annotated_previous',
299 route_name='repo_files:annotated_previous', request_method='GET',
299 route_name='repo_files:annotated_previous', request_method='GET',
300 renderer=None)
300 renderer=None)
301
301
302 config.add_route(
302 config.add_route(
303 name='repo_nodetree_full',
303 name='repo_nodetree_full',
304 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
304 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
305 config.add_view(
305 config.add_view(
306 RepoFilesView,
306 RepoFilesView,
307 attr='repo_nodetree_full',
307 attr='repo_nodetree_full',
308 route_name='repo_nodetree_full', request_method='GET',
308 route_name='repo_nodetree_full', request_method='GET',
309 renderer=None, xhr=True)
309 renderer=None, xhr=True)
310
310
311 config.add_route(
311 config.add_route(
312 name='repo_nodetree_full:default_path',
312 name='repo_nodetree_full:default_path',
313 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
313 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
314 config.add_view(
314 config.add_view(
315 RepoFilesView,
315 RepoFilesView,
316 attr='repo_nodetree_full',
316 attr='repo_nodetree_full',
317 route_name='repo_nodetree_full:default_path', request_method='GET',
317 route_name='repo_nodetree_full:default_path', request_method='GET',
318 renderer=None, xhr=True)
318 renderer=None, xhr=True)
319
319
320 config.add_route(
320 config.add_route(
321 name='repo_files_nodelist',
321 name='repo_files_nodelist',
322 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
322 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
323 config.add_view(
323 config.add_view(
324 RepoFilesView,
324 RepoFilesView,
325 attr='repo_nodelist',
325 attr='repo_nodelist',
326 route_name='repo_files_nodelist', request_method='GET',
326 route_name='repo_files_nodelist', request_method='GET',
327 renderer='json_ext', xhr=True)
327 renderer='json_ext', xhr=True)
328
328
329 config.add_route(
329 config.add_route(
330 name='repo_file_raw',
330 name='repo_file_raw',
331 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
331 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
332 config.add_view(
332 config.add_view(
333 RepoFilesView,
333 RepoFilesView,
334 attr='repo_file_raw',
334 attr='repo_file_raw',
335 route_name='repo_file_raw', request_method='GET',
335 route_name='repo_file_raw', request_method='GET',
336 renderer=None)
336 renderer=None)
337
337
338 config.add_route(
338 config.add_route(
339 name='repo_file_download',
339 name='repo_file_download',
340 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
340 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
341 config.add_view(
341 config.add_view(
342 RepoFilesView,
342 RepoFilesView,
343 attr='repo_file_download',
343 attr='repo_file_download',
344 route_name='repo_file_download', request_method='GET',
344 route_name='repo_file_download', request_method='GET',
345 renderer=None)
345 renderer=None)
346
346
347 config.add_route( # backward compat to keep old links working
347 config.add_route( # backward compat to keep old links working
348 name='repo_file_download:legacy',
348 name='repo_file_download:legacy',
349 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
349 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
350 repo_route=True)
350 repo_route=True)
351 config.add_view(
351 config.add_view(
352 RepoFilesView,
352 RepoFilesView,
353 attr='repo_file_download',
353 attr='repo_file_download',
354 route_name='repo_file_download:legacy', request_method='GET',
354 route_name='repo_file_download:legacy', request_method='GET',
355 renderer=None)
355 renderer=None)
356
356
357 config.add_route(
357 config.add_route(
358 name='repo_file_history',
358 name='repo_file_history',
359 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
359 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
360 config.add_view(
360 config.add_view(
361 RepoFilesView,
361 RepoFilesView,
362 attr='repo_file_history',
362 attr='repo_file_history',
363 route_name='repo_file_history', request_method='GET',
363 route_name='repo_file_history', request_method='GET',
364 renderer='json_ext')
364 renderer='json_ext')
365
365
366 config.add_route(
366 config.add_route(
367 name='repo_file_authors',
367 name='repo_file_authors',
368 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
368 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
369 config.add_view(
369 config.add_view(
370 RepoFilesView,
370 RepoFilesView,
371 attr='repo_file_authors',
371 attr='repo_file_authors',
372 route_name='repo_file_authors', request_method='GET',
372 route_name='repo_file_authors', request_method='GET',
373 renderer='rhodecode:templates/files/file_authors_box.mako')
373 renderer='rhodecode:templates/files/file_authors_box.mako')
374
374
375 config.add_route(
375 config.add_route(
376 name='repo_files_check_head',
376 name='repo_files_check_head',
377 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
377 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
378 repo_route=True)
378 repo_route=True)
379 config.add_view(
379 config.add_view(
380 RepoFilesView,
380 RepoFilesView,
381 attr='repo_files_check_head',
381 attr='repo_files_check_head',
382 route_name='repo_files_check_head', request_method='POST',
382 route_name='repo_files_check_head', request_method='POST',
383 renderer='json_ext', xhr=True)
383 renderer='json_ext', xhr=True)
384
384
385 config.add_route(
385 config.add_route(
386 name='repo_files_remove_file',
386 name='repo_files_remove_file',
387 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
387 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
388 repo_route=True)
388 repo_route=True)
389 config.add_view(
389 config.add_view(
390 RepoFilesView,
390 RepoFilesView,
391 attr='repo_files_remove_file',
391 attr='repo_files_remove_file',
392 route_name='repo_files_remove_file', request_method='GET',
392 route_name='repo_files_remove_file', request_method='GET',
393 renderer='rhodecode:templates/files/files_delete.mako')
393 renderer='rhodecode:templates/files/files_delete.mako')
394
394
395 config.add_route(
395 config.add_route(
396 name='repo_files_delete_file',
396 name='repo_files_delete_file',
397 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
397 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
398 repo_route=True)
398 repo_route=True)
399 config.add_view(
399 config.add_view(
400 RepoFilesView,
400 RepoFilesView,
401 attr='repo_files_delete_file',
401 attr='repo_files_delete_file',
402 route_name='repo_files_delete_file', request_method='POST',
402 route_name='repo_files_delete_file', request_method='POST',
403 renderer=None)
403 renderer=None)
404
404
405 config.add_route(
405 config.add_route(
406 name='repo_files_edit_file',
406 name='repo_files_edit_file',
407 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
407 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
408 repo_route=True)
408 repo_route=True)
409 config.add_view(
409 config.add_view(
410 RepoFilesView,
410 RepoFilesView,
411 attr='repo_files_edit_file',
411 attr='repo_files_edit_file',
412 route_name='repo_files_edit_file', request_method='GET',
412 route_name='repo_files_edit_file', request_method='GET',
413 renderer='rhodecode:templates/files/files_edit.mako')
413 renderer='rhodecode:templates/files/files_edit.mako')
414
414
415 config.add_route(
415 config.add_route(
416 name='repo_files_update_file',
416 name='repo_files_update_file',
417 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
417 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
418 repo_route=True)
418 repo_route=True)
419 config.add_view(
419 config.add_view(
420 RepoFilesView,
420 RepoFilesView,
421 attr='repo_files_update_file',
421 attr='repo_files_update_file',
422 route_name='repo_files_update_file', request_method='POST',
422 route_name='repo_files_update_file', request_method='POST',
423 renderer=None)
423 renderer=None)
424
424
425 config.add_route(
425 config.add_route(
426 name='repo_files_add_file',
426 name='repo_files_add_file',
427 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
427 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
428 repo_route=True)
428 repo_route=True)
429 config.add_view(
429 config.add_view(
430 RepoFilesView,
430 RepoFilesView,
431 attr='repo_files_add_file',
431 attr='repo_files_add_file',
432 route_name='repo_files_add_file', request_method='GET',
432 route_name='repo_files_add_file', request_method='GET',
433 renderer='rhodecode:templates/files/files_add.mako')
433 renderer='rhodecode:templates/files/files_add.mako')
434
434
435 config.add_route(
435 config.add_route(
436 name='repo_files_upload_file',
436 name='repo_files_upload_file',
437 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
437 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
438 repo_route=True)
438 repo_route=True)
439 config.add_view(
439 config.add_view(
440 RepoFilesView,
440 RepoFilesView,
441 attr='repo_files_add_file',
441 attr='repo_files_add_file',
442 route_name='repo_files_upload_file', request_method='GET',
442 route_name='repo_files_upload_file', request_method='GET',
443 renderer='rhodecode:templates/files/files_upload.mako')
443 renderer='rhodecode:templates/files/files_upload.mako')
444 config.add_view( # POST creates
444 config.add_view( # POST creates
445 RepoFilesView,
445 RepoFilesView,
446 attr='repo_files_upload_file',
446 attr='repo_files_upload_file',
447 route_name='repo_files_upload_file', request_method='POST',
447 route_name='repo_files_upload_file', request_method='POST',
448 renderer='json_ext')
448 renderer='json_ext')
449
449
450 config.add_route(
450 config.add_route(
451 name='repo_files_create_file',
451 name='repo_files_create_file',
452 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
452 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
453 repo_route=True)
453 repo_route=True)
454 config.add_view( # POST creates
454 config.add_view( # POST creates
455 RepoFilesView,
455 RepoFilesView,
456 attr='repo_files_create_file',
456 attr='repo_files_create_file',
457 route_name='repo_files_create_file', request_method='POST',
457 route_name='repo_files_create_file', request_method='POST',
458 renderer=None)
458 renderer=None)
459
459
460 # Refs data
460 # Refs data
461 config.add_route(
461 config.add_route(
462 name='repo_refs_data',
462 name='repo_refs_data',
463 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
463 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
464 config.add_view(
464 config.add_view(
465 RepoSummaryView,
465 RepoSummaryView,
466 attr='repo_refs_data',
466 attr='repo_refs_data',
467 route_name='repo_refs_data', request_method='GET',
467 route_name='repo_refs_data', request_method='GET',
468 renderer='json_ext')
468 renderer='json_ext')
469
469
470 config.add_route(
470 config.add_route(
471 name='repo_refs_changelog_data',
471 name='repo_refs_changelog_data',
472 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
472 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
473 config.add_view(
473 config.add_view(
474 RepoSummaryView,
474 RepoSummaryView,
475 attr='repo_refs_changelog_data',
475 attr='repo_refs_changelog_data',
476 route_name='repo_refs_changelog_data', request_method='GET',
476 route_name='repo_refs_changelog_data', request_method='GET',
477 renderer='json_ext')
477 renderer='json_ext')
478
478
479 config.add_route(
479 config.add_route(
480 name='repo_stats',
480 name='repo_stats',
481 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
481 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
482 config.add_view(
482 config.add_view(
483 RepoSummaryView,
483 RepoSummaryView,
484 attr='repo_stats',
484 attr='repo_stats',
485 route_name='repo_stats', request_method='GET',
485 route_name='repo_stats', request_method='GET',
486 renderer='json_ext')
486 renderer='json_ext')
487
487
488 # Commits
488 # Commits
489 config.add_route(
489 config.add_route(
490 name='repo_commits',
490 name='repo_commits',
491 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
491 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
492 config.add_view(
492 config.add_view(
493 RepoChangelogView,
493 RepoChangelogView,
494 attr='repo_changelog',
494 attr='repo_changelog',
495 route_name='repo_commits', request_method='GET',
495 route_name='repo_commits', request_method='GET',
496 renderer='rhodecode:templates/commits/changelog.mako')
496 renderer='rhodecode:templates/commits/changelog.mako')
497 # old routes for backward compat
497 # old routes for backward compat
498 config.add_view(
498 config.add_view(
499 RepoChangelogView,
499 RepoChangelogView,
500 attr='repo_changelog',
500 attr='repo_changelog',
501 route_name='repo_changelog', request_method='GET',
501 route_name='repo_changelog', request_method='GET',
502 renderer='rhodecode:templates/commits/changelog.mako')
502 renderer='rhodecode:templates/commits/changelog.mako')
503
503
504 config.add_route(
504 config.add_route(
505 name='repo_commits_elements',
505 name='repo_commits_elements',
506 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
506 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
507 config.add_view(
507 config.add_view(
508 RepoChangelogView,
508 RepoChangelogView,
509 attr='repo_commits_elements',
509 attr='repo_commits_elements',
510 route_name='repo_commits_elements', request_method=('GET', 'POST'),
510 route_name='repo_commits_elements', request_method=('GET', 'POST'),
511 renderer='rhodecode:templates/commits/changelog_elements.mako',
511 renderer='rhodecode:templates/commits/changelog_elements.mako',
512 xhr=True)
512 xhr=True)
513
513
514 config.add_route(
514 config.add_route(
515 name='repo_commits_elements_file',
515 name='repo_commits_elements_file',
516 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
516 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
517 config.add_view(
517 config.add_view(
518 RepoChangelogView,
518 RepoChangelogView,
519 attr='repo_commits_elements',
519 attr='repo_commits_elements',
520 route_name='repo_commits_elements_file', request_method=('GET', 'POST'),
520 route_name='repo_commits_elements_file', request_method=('GET', 'POST'),
521 renderer='rhodecode:templates/commits/changelog_elements.mako',
521 renderer='rhodecode:templates/commits/changelog_elements.mako',
522 xhr=True)
522 xhr=True)
523
523
524 config.add_route(
524 config.add_route(
525 name='repo_commits_file',
525 name='repo_commits_file',
526 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
526 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
527 config.add_view(
527 config.add_view(
528 RepoChangelogView,
528 RepoChangelogView,
529 attr='repo_changelog',
529 attr='repo_changelog',
530 route_name='repo_commits_file', request_method='GET',
530 route_name='repo_commits_file', request_method='GET',
531 renderer='rhodecode:templates/commits/changelog.mako')
531 renderer='rhodecode:templates/commits/changelog.mako')
532 # old routes for backward compat
532 # old routes for backward compat
533 config.add_view(
533 config.add_view(
534 RepoChangelogView,
534 RepoChangelogView,
535 attr='repo_changelog',
535 attr='repo_changelog',
536 route_name='repo_changelog_file', request_method='GET',
536 route_name='repo_changelog_file', request_method='GET',
537 renderer='rhodecode:templates/commits/changelog.mako')
537 renderer='rhodecode:templates/commits/changelog.mako')
538
538
539 # Changelog (old deprecated name for commits page)
539 # Changelog (old deprecated name for commits page)
540 config.add_route(
540 config.add_route(
541 name='repo_changelog',
541 name='repo_changelog',
542 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
542 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
543 config.add_route(
543 config.add_route(
544 name='repo_changelog_file',
544 name='repo_changelog_file',
545 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
545 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
546
546
547 # Compare
547 # Compare
548 config.add_route(
548 config.add_route(
549 name='repo_compare_select',
549 name='repo_compare_select',
550 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
550 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
551 config.add_view(
551 config.add_view(
552 RepoCompareView,
552 RepoCompareView,
553 attr='compare_select',
553 attr='compare_select',
554 route_name='repo_compare_select', request_method='GET',
554 route_name='repo_compare_select', request_method='GET',
555 renderer='rhodecode:templates/compare/compare_diff.mako')
555 renderer='rhodecode:templates/compare/compare_diff.mako')
556
556
557 config.add_route(
557 config.add_route(
558 name='repo_compare',
558 name='repo_compare',
559 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
559 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
560 config.add_view(
560 config.add_view(
561 RepoCompareView,
561 RepoCompareView,
562 attr='compare',
562 attr='compare',
563 route_name='repo_compare', request_method='GET',
563 route_name='repo_compare', request_method='GET',
564 renderer=None)
564 renderer=None)
565
565
566 # Tags
566 # Tags
567 config.add_route(
567 config.add_route(
568 name='tags_home',
568 name='tags_home',
569 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
569 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
570 config.add_view(
570 config.add_view(
571 RepoTagsView,
571 RepoTagsView,
572 attr='tags',
572 attr='tags',
573 route_name='tags_home', request_method='GET',
573 route_name='tags_home', request_method='GET',
574 renderer='rhodecode:templates/tags/tags.mako')
574 renderer='rhodecode:templates/tags/tags.mako')
575
575
576 # Branches
576 # Branches
577 config.add_route(
577 config.add_route(
578 name='branches_home',
578 name='branches_home',
579 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
579 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
580 config.add_view(
580 config.add_view(
581 RepoBranchesView,
581 RepoBranchesView,
582 attr='branches',
582 attr='branches',
583 route_name='branches_home', request_method='GET',
583 route_name='branches_home', request_method='GET',
584 renderer='rhodecode:templates/branches/branches.mako')
584 renderer='rhodecode:templates/branches/branches.mako')
585
585
586 # Bookmarks
586 # Bookmarks
587 config.add_route(
587 config.add_route(
588 name='bookmarks_home',
588 name='bookmarks_home',
589 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
589 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
590 config.add_view(
590 config.add_view(
591 RepoBookmarksView,
591 RepoBookmarksView,
592 attr='bookmarks',
592 attr='bookmarks',
593 route_name='bookmarks_home', request_method='GET',
593 route_name='bookmarks_home', request_method='GET',
594 renderer='rhodecode:templates/bookmarks/bookmarks.mako')
594 renderer='rhodecode:templates/bookmarks/bookmarks.mako')
595
595
596 # Forks
596 # Forks
597 config.add_route(
597 config.add_route(
598 name='repo_fork_new',
598 name='repo_fork_new',
599 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
599 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
600 repo_forbid_when_archived=True,
600 repo_forbid_when_archived=True,
601 repo_accepted_types=['hg', 'git'])
601 repo_accepted_types=['hg', 'git'])
602 config.add_view(
602 config.add_view(
603 RepoForksView,
603 RepoForksView,
604 attr='repo_fork_new',
604 attr='repo_fork_new',
605 route_name='repo_fork_new', request_method='GET',
605 route_name='repo_fork_new', request_method='GET',
606 renderer='rhodecode:templates/forks/forks.mako')
606 renderer='rhodecode:templates/forks/forks.mako')
607
607
608 config.add_route(
608 config.add_route(
609 name='repo_fork_create',
609 name='repo_fork_create',
610 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
610 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
611 repo_forbid_when_archived=True,
611 repo_forbid_when_archived=True,
612 repo_accepted_types=['hg', 'git'])
612 repo_accepted_types=['hg', 'git'])
613 config.add_view(
613 config.add_view(
614 RepoForksView,
614 RepoForksView,
615 attr='repo_fork_create',
615 attr='repo_fork_create',
616 route_name='repo_fork_create', request_method='POST',
616 route_name='repo_fork_create', request_method='POST',
617 renderer='rhodecode:templates/forks/fork.mako')
617 renderer='rhodecode:templates/forks/fork.mako')
618
618
619 config.add_route(
619 config.add_route(
620 name='repo_forks_show_all',
620 name='repo_forks_show_all',
621 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
621 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
622 repo_accepted_types=['hg', 'git'])
622 repo_accepted_types=['hg', 'git'])
623 config.add_view(
623 config.add_view(
624 RepoForksView,
624 RepoForksView,
625 attr='repo_forks_show_all',
625 attr='repo_forks_show_all',
626 route_name='repo_forks_show_all', request_method='GET',
626 route_name='repo_forks_show_all', request_method='GET',
627 renderer='rhodecode:templates/forks/forks.mako')
627 renderer='rhodecode:templates/forks/forks.mako')
628
628
629 config.add_route(
629 config.add_route(
630 name='repo_forks_data',
630 name='repo_forks_data',
631 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
631 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
632 repo_accepted_types=['hg', 'git'])
632 repo_accepted_types=['hg', 'git'])
633 config.add_view(
633 config.add_view(
634 RepoForksView,
634 RepoForksView,
635 attr='repo_forks_data',
635 attr='repo_forks_data',
636 route_name='repo_forks_data', request_method='GET',
636 route_name='repo_forks_data', request_method='GET',
637 renderer='json_ext', xhr=True)
637 renderer='json_ext', xhr=True)
638
638
639 # Pull Requests
639 # Pull Requests
640 config.add_route(
640 config.add_route(
641 name='pullrequest_show',
641 name='pullrequest_show',
642 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
642 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
643 repo_route=True)
643 repo_route=True)
644 config.add_view(
644 config.add_view(
645 RepoPullRequestsView,
645 RepoPullRequestsView,
646 attr='pull_request_show',
646 attr='pull_request_show',
647 route_name='pullrequest_show', request_method='GET',
647 route_name='pullrequest_show', request_method='GET',
648 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
648 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
649
649
650 config.add_route(
650 config.add_route(
651 name='pullrequest_show_all',
651 name='pullrequest_show_all',
652 pattern='/{repo_name:.*?[^/]}/pull-request',
652 pattern='/{repo_name:.*?[^/]}/pull-request',
653 repo_route=True, repo_accepted_types=['hg', 'git'])
653 repo_route=True, repo_accepted_types=['hg', 'git'])
654 config.add_view(
654 config.add_view(
655 RepoPullRequestsView,
655 RepoPullRequestsView,
656 attr='pull_request_list',
656 attr='pull_request_list',
657 route_name='pullrequest_show_all', request_method='GET',
657 route_name='pullrequest_show_all', request_method='GET',
658 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
658 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
659
659
660 config.add_route(
660 config.add_route(
661 name='pullrequest_show_all_data',
661 name='pullrequest_show_all_data',
662 pattern='/{repo_name:.*?[^/]}/pull-request-data',
662 pattern='/{repo_name:.*?[^/]}/pull-request-data',
663 repo_route=True, repo_accepted_types=['hg', 'git'])
663 repo_route=True, repo_accepted_types=['hg', 'git'])
664 config.add_view(
664 config.add_view(
665 RepoPullRequestsView,
665 RepoPullRequestsView,
666 attr='pull_request_list_data',
666 attr='pull_request_list_data',
667 route_name='pullrequest_show_all_data', request_method='GET',
667 route_name='pullrequest_show_all_data', request_method='GET',
668 renderer='json_ext', xhr=True)
668 renderer='json_ext', xhr=True)
669
669
670 config.add_route(
670 config.add_route(
671 name='pullrequest_repo_refs',
671 name='pullrequest_repo_refs',
672 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
672 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
673 repo_route=True)
673 repo_route=True)
674 config.add_view(
674 config.add_view(
675 RepoPullRequestsView,
675 RepoPullRequestsView,
676 attr='pull_request_repo_refs',
676 attr='pull_request_repo_refs',
677 route_name='pullrequest_repo_refs', request_method='GET',
677 route_name='pullrequest_repo_refs', request_method='GET',
678 renderer='json_ext', xhr=True)
678 renderer='json_ext', xhr=True)
679
679
680 config.add_route(
680 config.add_route(
681 name='pullrequest_repo_targets',
681 name='pullrequest_repo_targets',
682 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
682 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
683 repo_route=True)
683 repo_route=True)
684 config.add_view(
684 config.add_view(
685 RepoPullRequestsView,
685 RepoPullRequestsView,
686 attr='pullrequest_repo_targets',
686 attr='pullrequest_repo_targets',
687 route_name='pullrequest_repo_targets', request_method='GET',
687 route_name='pullrequest_repo_targets', request_method='GET',
688 renderer='json_ext', xhr=True)
688 renderer='json_ext', xhr=True)
689
689
690 config.add_route(
690 config.add_route(
691 name='pullrequest_new',
691 name='pullrequest_new',
692 pattern='/{repo_name:.*?[^/]}/pull-request/new',
692 pattern='/{repo_name:.*?[^/]}/pull-request/new',
693 repo_route=True, repo_accepted_types=['hg', 'git'],
693 repo_route=True, repo_accepted_types=['hg', 'git'],
694 repo_forbid_when_archived=True)
694 repo_forbid_when_archived=True)
695 config.add_view(
695 config.add_view(
696 RepoPullRequestsView,
696 RepoPullRequestsView,
697 attr='pull_request_new',
697 attr='pull_request_new',
698 route_name='pullrequest_new', request_method='GET',
698 route_name='pullrequest_new', request_method='GET',
699 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
699 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
700
700
701 config.add_route(
701 config.add_route(
702 name='pullrequest_create',
702 name='pullrequest_create',
703 pattern='/{repo_name:.*?[^/]}/pull-request/create',
703 pattern='/{repo_name:.*?[^/]}/pull-request/create',
704 repo_route=True, repo_accepted_types=['hg', 'git'],
704 repo_route=True, repo_accepted_types=['hg', 'git'],
705 repo_forbid_when_archived=True)
705 repo_forbid_when_archived=True)
706 config.add_view(
706 config.add_view(
707 RepoPullRequestsView,
707 RepoPullRequestsView,
708 attr='pull_request_create',
708 attr='pull_request_create',
709 route_name='pullrequest_create', request_method='POST',
709 route_name='pullrequest_create', request_method='POST',
710 renderer=None)
710 renderer=None)
711
711
712 config.add_route(
712 config.add_route(
713 name='pullrequest_update',
713 name='pullrequest_update',
714 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
714 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
715 repo_route=True, repo_forbid_when_archived=True)
715 repo_route=True, repo_forbid_when_archived=True)
716 config.add_view(
716 config.add_view(
717 RepoPullRequestsView,
717 RepoPullRequestsView,
718 attr='pull_request_update',
718 attr='pull_request_update',
719 route_name='pullrequest_update', request_method='POST',
719 route_name='pullrequest_update', request_method='POST',
720 renderer='json_ext')
720 renderer='json_ext')
721
721
722 config.add_route(
722 config.add_route(
723 name='pullrequest_merge',
723 name='pullrequest_merge',
724 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
724 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
725 repo_route=True, repo_forbid_when_archived=True)
725 repo_route=True, repo_forbid_when_archived=True)
726 config.add_view(
726 config.add_view(
727 RepoPullRequestsView,
727 RepoPullRequestsView,
728 attr='pull_request_merge',
728 attr='pull_request_merge',
729 route_name='pullrequest_merge', request_method='POST',
729 route_name='pullrequest_merge', request_method='POST',
730 renderer='json_ext')
730 renderer='json_ext')
731
731
732 config.add_route(
732 config.add_route(
733 name='pullrequest_delete',
733 name='pullrequest_delete',
734 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
734 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
735 repo_route=True, repo_forbid_when_archived=True)
735 repo_route=True, repo_forbid_when_archived=True)
736 config.add_view(
736 config.add_view(
737 RepoPullRequestsView,
737 RepoPullRequestsView,
738 attr='pull_request_delete',
738 attr='pull_request_delete',
739 route_name='pullrequest_delete', request_method='POST',
739 route_name='pullrequest_delete', request_method='POST',
740 renderer='json_ext')
740 renderer='json_ext')
741
741
742 config.add_route(
742 config.add_route(
743 name='pullrequest_comment_create',
743 name='pullrequest_comment_create',
744 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
744 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
745 repo_route=True)
745 repo_route=True)
746 config.add_view(
746 config.add_view(
747 RepoPullRequestsView,
747 RepoPullRequestsView,
748 attr='pull_request_comment_create',
748 attr='pull_request_comment_create',
749 route_name='pullrequest_comment_create', request_method='POST',
749 route_name='pullrequest_comment_create', request_method='POST',
750 renderer='json_ext')
750 renderer='json_ext')
751
751
752 config.add_route(
752 config.add_route(
753 name='pullrequest_comment_edit',
753 name='pullrequest_comment_edit',
754 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
754 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
755 repo_route=True, repo_accepted_types=['hg', 'git'])
755 repo_route=True, repo_accepted_types=['hg', 'git'])
756 config.add_view(
756 config.add_view(
757 RepoPullRequestsView,
757 RepoPullRequestsView,
758 attr='pull_request_comment_edit',
758 attr='pull_request_comment_edit',
759 route_name='pullrequest_comment_edit', request_method='POST',
759 route_name='pullrequest_comment_edit', request_method='POST',
760 renderer='json_ext')
760 renderer='json_ext')
761
761
762 config.add_route(
762 config.add_route(
763 name='pullrequest_comment_delete',
763 name='pullrequest_comment_delete',
764 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
764 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
765 repo_route=True, repo_accepted_types=['hg', 'git'])
765 repo_route=True, repo_accepted_types=['hg', 'git'])
766 config.add_view(
766 config.add_view(
767 RepoPullRequestsView,
767 RepoPullRequestsView,
768 attr='pull_request_comment_delete',
768 attr='pull_request_comment_delete',
769 route_name='pullrequest_comment_delete', request_method='POST',
769 route_name='pullrequest_comment_delete', request_method='POST',
770 renderer='json_ext')
770 renderer='json_ext')
771
771
772 config.add_route(
772 config.add_route(
773 name='pullrequest_comments',
773 name='pullrequest_comments',
774 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments',
774 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments',
775 repo_route=True)
775 repo_route=True)
776 config.add_view(
776 config.add_view(
777 RepoPullRequestsView,
777 RepoPullRequestsView,
778 attr='pullrequest_comments',
778 attr='pullrequest_comments',
779 route_name='pullrequest_comments', request_method='POST',
779 route_name='pullrequest_comments', request_method='POST',
780 renderer='string_html', xhr=True)
780 renderer='string_html', xhr=True)
781
781
782 config.add_route(
782 config.add_route(
783 name='pullrequest_todos',
783 name='pullrequest_todos',
784 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos',
784 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos',
785 repo_route=True)
785 repo_route=True)
786 config.add_view(
786 config.add_view(
787 RepoPullRequestsView,
787 RepoPullRequestsView,
788 attr='pullrequest_todos',
788 attr='pullrequest_todos',
789 route_name='pullrequest_todos', request_method='POST',
789 route_name='pullrequest_todos', request_method='POST',
790 renderer='string_html', xhr=True)
790 renderer='string_html', xhr=True)
791
791
792 config.add_route(
792 config.add_route(
793 name='pullrequest_drafts',
793 name='pullrequest_drafts',
794 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/drafts',
794 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/drafts',
795 repo_route=True)
795 repo_route=True)
796 config.add_view(
796 config.add_view(
797 RepoPullRequestsView,
797 RepoPullRequestsView,
798 attr='pullrequest_drafts',
798 attr='pullrequest_drafts',
799 route_name='pullrequest_drafts', request_method='POST',
799 route_name='pullrequest_drafts', request_method='POST',
800 renderer='string_html', xhr=True)
800 renderer='string_html', xhr=True)
801
801
802 # Artifacts, (EE feature)
802 # Artifacts, (EE feature)
803 config.add_route(
803 config.add_route(
804 name='repo_artifacts_list',
804 name='repo_artifacts_list',
805 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
805 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
806 config.add_view(
806 config.add_view(
807 RepoArtifactsView,
807 RepoArtifactsView,
808 attr='repo_artifacts',
808 attr='repo_artifacts',
809 route_name='repo_artifacts_list', request_method='GET',
809 route_name='repo_artifacts_list', request_method='GET',
810 renderer='rhodecode:templates/artifacts/artifact_list.mako')
810 renderer='rhodecode:templates/artifacts/artifact_list.mako')
811
811
812 # Settings
812 # Settings
813 config.add_route(
813 config.add_route(
814 name='edit_repo',
814 name='edit_repo',
815 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
815 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
816 config.add_view(
816 config.add_view(
817 RepoSettingsView,
817 RepoSettingsView,
818 attr='edit_settings',
818 attr='edit_settings',
819 route_name='edit_repo', request_method='GET',
819 route_name='edit_repo', request_method='GET',
820 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
820 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
821 # update is POST on edit_repo
821 # update is POST on edit_repo
822 config.add_view(
822 config.add_view(
823 RepoSettingsView,
823 RepoSettingsView,
824 attr='edit_settings_update',
824 attr='edit_settings_update',
825 route_name='edit_repo', request_method='POST',
825 route_name='edit_repo', request_method='POST',
826 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
826 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
827
827
828 # Settings advanced
828 # Settings advanced
829 config.add_route(
829 config.add_route(
830 name='edit_repo_advanced',
830 name='edit_repo_advanced',
831 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
831 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
832 config.add_view(
832 config.add_view(
833 RepoSettingsAdvancedView,
833 RepoSettingsAdvancedView,
834 attr='edit_advanced',
834 attr='edit_advanced',
835 route_name='edit_repo_advanced', request_method='GET',
835 route_name='edit_repo_advanced', request_method='GET',
836 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
836 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
837
837
838 config.add_route(
838 config.add_route(
839 name='edit_repo_advanced_archive',
839 name='edit_repo_advanced_archive',
840 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
840 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
841 config.add_view(
841 config.add_view(
842 RepoSettingsAdvancedView,
842 RepoSettingsAdvancedView,
843 attr='edit_advanced_archive',
843 attr='edit_advanced_archive',
844 route_name='edit_repo_advanced_archive', request_method='POST',
844 route_name='edit_repo_advanced_archive', request_method='POST',
845 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
845 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
846
846
847 config.add_route(
847 config.add_route(
848 name='edit_repo_advanced_delete',
848 name='edit_repo_advanced_delete',
849 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
849 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
850 config.add_view(
850 config.add_view(
851 RepoSettingsAdvancedView,
851 RepoSettingsAdvancedView,
852 attr='edit_advanced_delete',
852 attr='edit_advanced_delete',
853 route_name='edit_repo_advanced_delete', request_method='POST',
853 route_name='edit_repo_advanced_delete', request_method='POST',
854 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
854 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
855
855
856 config.add_route(
856 config.add_route(
857 name='edit_repo_advanced_locking',
857 name='edit_repo_advanced_locking',
858 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
858 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
859 config.add_view(
859 config.add_view(
860 RepoSettingsAdvancedView,
860 RepoSettingsAdvancedView,
861 attr='edit_advanced_toggle_locking',
861 attr='edit_advanced_toggle_locking',
862 route_name='edit_repo_advanced_locking', request_method='POST',
862 route_name='edit_repo_advanced_locking', request_method='POST',
863 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
863 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
864
864
865 config.add_route(
865 config.add_route(
866 name='edit_repo_advanced_journal',
866 name='edit_repo_advanced_journal',
867 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
867 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
868 config.add_view(
868 config.add_view(
869 RepoSettingsAdvancedView,
869 RepoSettingsAdvancedView,
870 attr='edit_advanced_journal',
870 attr='edit_advanced_journal',
871 route_name='edit_repo_advanced_journal', request_method='POST',
871 route_name='edit_repo_advanced_journal', request_method='POST',
872 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
872 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
873
873
874 config.add_route(
874 config.add_route(
875 name='edit_repo_advanced_fork',
875 name='edit_repo_advanced_fork',
876 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
876 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
877 config.add_view(
877 config.add_view(
878 RepoSettingsAdvancedView,
878 RepoSettingsAdvancedView,
879 attr='edit_advanced_fork',
879 attr='edit_advanced_fork',
880 route_name='edit_repo_advanced_fork', request_method='POST',
880 route_name='edit_repo_advanced_fork', request_method='POST',
881 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
881 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
882
882
883 config.add_route(
883 config.add_route(
884 name='edit_repo_advanced_hooks',
884 name='edit_repo_advanced_hooks',
885 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
885 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
886 config.add_view(
886 config.add_view(
887 RepoSettingsAdvancedView,
887 RepoSettingsAdvancedView,
888 attr='edit_advanced_install_hooks',
888 attr='edit_advanced_install_hooks',
889 route_name='edit_repo_advanced_hooks', request_method='GET',
889 route_name='edit_repo_advanced_hooks', request_method='GET',
890 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
890 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
891
891
892 # Caches
892 # Caches
893 config.add_route(
893 config.add_route(
894 name='edit_repo_caches',
894 name='edit_repo_caches',
895 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
895 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
896 config.add_view(
896 config.add_view(
897 RepoCachesView,
897 RepoCachesView,
898 attr='repo_caches',
898 attr='repo_caches',
899 route_name='edit_repo_caches', request_method='GET',
899 route_name='edit_repo_caches', request_method='GET',
900 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
900 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
901 config.add_view(
901 config.add_view(
902 RepoCachesView,
902 RepoCachesView,
903 attr='repo_caches_purge',
903 attr='repo_caches_purge',
904 route_name='edit_repo_caches', request_method='POST')
904 route_name='edit_repo_caches', request_method='POST')
905
905
906 # Permissions
906 # Permissions
907 config.add_route(
907 config.add_route(
908 name='edit_repo_perms',
908 name='edit_repo_perms',
909 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
909 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
910 config.add_view(
910 config.add_view(
911 RepoSettingsPermissionsView,
911 RepoSettingsPermissionsView,
912 attr='edit_permissions',
912 attr='edit_permissions',
913 route_name='edit_repo_perms', request_method='GET',
913 route_name='edit_repo_perms', request_method='GET',
914 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
914 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
915 config.add_view(
915 config.add_view(
916 RepoSettingsPermissionsView,
916 RepoSettingsPermissionsView,
917 attr='edit_permissions_update',
917 attr='edit_permissions_update',
918 route_name='edit_repo_perms', request_method='POST',
918 route_name='edit_repo_perms', request_method='POST',
919 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
919 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
920
920
921 config.add_route(
921 config.add_route(
922 name='edit_repo_perms_set_private',
922 name='edit_repo_perms_set_private',
923 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
923 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
924 config.add_view(
924 config.add_view(
925 RepoSettingsPermissionsView,
925 RepoSettingsPermissionsView,
926 attr='edit_permissions_set_private_repo',
926 attr='edit_permissions_set_private_repo',
927 route_name='edit_repo_perms_set_private', request_method='POST',
927 route_name='edit_repo_perms_set_private', request_method='POST',
928 renderer='json_ext')
928 renderer='json_ext')
929
929
930 # Permissions Branch (EE feature)
930 # Permissions Branch (EE feature)
931 config.add_route(
931 config.add_route(
932 name='edit_repo_perms_branch',
932 name='edit_repo_perms_branch',
933 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
933 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
934 config.add_view(
934 config.add_view(
935 RepoSettingsBranchPermissionsView,
935 RepoSettingsBranchPermissionsView,
936 attr='branch_permissions',
936 attr='branch_permissions',
937 route_name='edit_repo_perms_branch', request_method='GET',
937 route_name='edit_repo_perms_branch', request_method='GET',
938 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
938 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
939
939
940 config.add_route(
940 config.add_route(
941 name='edit_repo_perms_branch_delete',
941 name='edit_repo_perms_branch_delete',
942 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
942 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
943 repo_route=True)
943 repo_route=True)
944 ## Only implemented in EE
944 ## Only implemented in EE
945
945
946 # Maintenance
946 # Maintenance
947 config.add_route(
947 config.add_route(
948 name='edit_repo_maintenance',
948 name='edit_repo_maintenance',
949 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
949 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
950 config.add_view(
950 config.add_view(
951 RepoMaintenanceView,
951 RepoMaintenanceView,
952 attr='repo_maintenance',
952 attr='repo_maintenance',
953 route_name='edit_repo_maintenance', request_method='GET',
953 route_name='edit_repo_maintenance', request_method='GET',
954 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
954 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
955
955
956 config.add_route(
956 config.add_route(
957 name='edit_repo_maintenance_execute',
957 name='edit_repo_maintenance_execute',
958 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
958 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
959 config.add_view(
959 config.add_view(
960 RepoMaintenanceView,
960 RepoMaintenanceView,
961 attr='repo_maintenance_execute',
961 attr='repo_maintenance_execute',
962 route_name='edit_repo_maintenance_execute', request_method='GET',
962 route_name='edit_repo_maintenance_execute', request_method='GET',
963 renderer='json', xhr=True)
963 renderer='json', xhr=True)
964
964
965 # Fields
965 # Fields
966 config.add_route(
966 config.add_route(
967 name='edit_repo_fields',
967 name='edit_repo_fields',
968 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
968 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
969 config.add_view(
969 config.add_view(
970 RepoSettingsFieldsView,
970 RepoSettingsFieldsView,
971 attr='repo_field_edit',
971 attr='repo_field_edit',
972 route_name='edit_repo_fields', request_method='GET',
972 route_name='edit_repo_fields', request_method='GET',
973 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
973 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
974
974
975 config.add_route(
975 config.add_route(
976 name='edit_repo_fields_create',
976 name='edit_repo_fields_create',
977 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
977 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
978 config.add_view(
978 config.add_view(
979 RepoSettingsFieldsView,
979 RepoSettingsFieldsView,
980 attr='repo_field_create',
980 attr='repo_field_create',
981 route_name='edit_repo_fields_create', request_method='POST',
981 route_name='edit_repo_fields_create', request_method='POST',
982 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
982 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
983
983
984 config.add_route(
984 config.add_route(
985 name='edit_repo_fields_delete',
985 name='edit_repo_fields_delete',
986 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
986 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
987 config.add_view(
987 config.add_view(
988 RepoSettingsFieldsView,
988 RepoSettingsFieldsView,
989 attr='repo_field_delete',
989 attr='repo_field_delete',
990 route_name='edit_repo_fields_delete', request_method='POST',
990 route_name='edit_repo_fields_delete', request_method='POST',
991 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
991 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
992
992
993 # Locking
993 # Locking
994 config.add_route(
994 config.add_route(
995 name='repo_edit_toggle_locking',
995 name='repo_edit_toggle_locking',
996 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
996 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
997 config.add_view(
997 config.add_view(
998 RepoSettingsView,
998 RepoSettingsView,
999 attr='edit_advanced_toggle_locking',
999 attr='edit_advanced_toggle_locking',
1000 route_name='repo_edit_toggle_locking', request_method='GET',
1000 route_name='repo_edit_toggle_locking', request_method='GET',
1001 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1001 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1002
1002
1003 # Remote
1003 # Remote
1004 config.add_route(
1004 config.add_route(
1005 name='edit_repo_remote',
1005 name='edit_repo_remote',
1006 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
1006 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
1007 config.add_view(
1007 config.add_view(
1008 RepoSettingsRemoteView,
1008 RepoSettingsRemoteView,
1009 attr='repo_remote_edit_form',
1009 attr='repo_remote_edit_form',
1010 route_name='edit_repo_remote', request_method='GET',
1010 route_name='edit_repo_remote', request_method='GET',
1011 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1011 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1012
1012
1013 config.add_route(
1013 config.add_route(
1014 name='edit_repo_remote_pull',
1014 name='edit_repo_remote_pull',
1015 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
1015 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
1016 config.add_view(
1016 config.add_view(
1017 RepoSettingsRemoteView,
1017 RepoSettingsRemoteView,
1018 attr='repo_remote_pull_changes',
1018 attr='repo_remote_pull_changes',
1019 route_name='edit_repo_remote_pull', request_method='POST',
1019 route_name='edit_repo_remote_pull', request_method='POST',
1020 renderer=None)
1020 renderer=None)
1021
1021
1022 config.add_route(
1022 config.add_route(
1023 name='edit_repo_remote_push',
1023 name='edit_repo_remote_push',
1024 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
1024 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
1025
1025
1026 # Statistics
1026 # Statistics
1027 config.add_route(
1027 config.add_route(
1028 name='edit_repo_statistics',
1028 name='edit_repo_statistics',
1029 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
1029 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
1030 config.add_view(
1030 config.add_view(
1031 RepoSettingsView,
1031 RepoSettingsView,
1032 attr='edit_statistics_form',
1032 attr='edit_statistics_form',
1033 route_name='edit_repo_statistics', request_method='GET',
1033 route_name='edit_repo_statistics', request_method='GET',
1034 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1034 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1035
1035
1036 config.add_route(
1036 config.add_route(
1037 name='edit_repo_statistics_reset',
1037 name='edit_repo_statistics_reset',
1038 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
1038 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
1039 config.add_view(
1039 config.add_view(
1040 RepoSettingsView,
1040 RepoSettingsView,
1041 attr='repo_statistics_reset',
1041 attr='repo_statistics_reset',
1042 route_name='edit_repo_statistics_reset', request_method='POST',
1042 route_name='edit_repo_statistics_reset', request_method='POST',
1043 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1043 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1044
1044
1045 # Issue trackers
1045 # Issue trackers
1046 config.add_route(
1046 config.add_route(
1047 name='edit_repo_issuetracker',
1047 name='edit_repo_issuetracker',
1048 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
1048 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
1049 config.add_view(
1049 config.add_view(
1050 RepoSettingsIssueTrackersView,
1050 RepoSettingsIssueTrackersView,
1051 attr='repo_issuetracker',
1051 attr='repo_issuetracker',
1052 route_name='edit_repo_issuetracker', request_method='GET',
1052 route_name='edit_repo_issuetracker', request_method='GET',
1053 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1053 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1054
1054
1055 config.add_route(
1055 config.add_route(
1056 name='edit_repo_issuetracker_test',
1056 name='edit_repo_issuetracker_test',
1057 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
1057 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
1058 config.add_view(
1058 config.add_view(
1059 RepoSettingsIssueTrackersView,
1059 RepoSettingsIssueTrackersView,
1060 attr='repo_issuetracker_test',
1060 attr='repo_issuetracker_test',
1061 route_name='edit_repo_issuetracker_test', request_method='POST',
1061 route_name='edit_repo_issuetracker_test', request_method='POST',
1062 renderer='string', xhr=True)
1062 renderer='string', xhr=True)
1063
1063
1064 config.add_route(
1064 config.add_route(
1065 name='edit_repo_issuetracker_delete',
1065 name='edit_repo_issuetracker_delete',
1066 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
1066 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
1067 config.add_view(
1067 config.add_view(
1068 RepoSettingsIssueTrackersView,
1068 RepoSettingsIssueTrackersView,
1069 attr='repo_issuetracker_delete',
1069 attr='repo_issuetracker_delete',
1070 route_name='edit_repo_issuetracker_delete', request_method='POST',
1070 route_name='edit_repo_issuetracker_delete', request_method='POST',
1071 renderer='json_ext', xhr=True)
1071 renderer='json_ext', xhr=True)
1072
1072
1073 config.add_route(
1073 config.add_route(
1074 name='edit_repo_issuetracker_update',
1074 name='edit_repo_issuetracker_update',
1075 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
1075 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
1076 config.add_view(
1076 config.add_view(
1077 RepoSettingsIssueTrackersView,
1077 RepoSettingsIssueTrackersView,
1078 attr='repo_issuetracker_update',
1078 attr='repo_issuetracker_update',
1079 route_name='edit_repo_issuetracker_update', request_method='POST',
1079 route_name='edit_repo_issuetracker_update', request_method='POST',
1080 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1080 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1081
1081
1082 # VCS Settings
1082 # VCS Settings
1083 config.add_route(
1083 config.add_route(
1084 name='edit_repo_vcs',
1084 name='edit_repo_vcs',
1085 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
1085 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
1086 config.add_view(
1086 config.add_view(
1087 RepoSettingsVcsView,
1087 RepoSettingsVcsView,
1088 attr='repo_vcs_settings',
1088 attr='repo_vcs_settings',
1089 route_name='edit_repo_vcs', request_method='GET',
1089 route_name='edit_repo_vcs', request_method='GET',
1090 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1090 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1091
1091
1092 config.add_route(
1092 config.add_route(
1093 name='edit_repo_vcs_update',
1093 name='edit_repo_vcs_update',
1094 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
1094 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
1095 config.add_view(
1095 config.add_view(
1096 RepoSettingsVcsView,
1096 RepoSettingsVcsView,
1097 attr='repo_settings_vcs_update',
1097 attr='repo_settings_vcs_update',
1098 route_name='edit_repo_vcs_update', request_method='POST',
1098 route_name='edit_repo_vcs_update', request_method='POST',
1099 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1099 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1100
1100
1101 # svn pattern
1101 # svn pattern
1102 config.add_route(
1102 config.add_route(
1103 name='edit_repo_vcs_svn_pattern_delete',
1103 name='edit_repo_vcs_svn_pattern_delete',
1104 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
1104 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
1105 config.add_view(
1105 config.add_view(
1106 RepoSettingsVcsView,
1106 RepoSettingsVcsView,
1107 attr='repo_settings_delete_svn_pattern',
1107 attr='repo_settings_delete_svn_pattern',
1108 route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST',
1108 route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST',
1109 renderer='json_ext', xhr=True)
1109 renderer='json_ext', xhr=True)
1110
1110
1111 # Repo Review Rules (EE feature)
1111 # Repo Review Rules (EE feature)
1112 config.add_route(
1112 config.add_route(
1113 name='repo_reviewers',
1113 name='repo_reviewers',
1114 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
1114 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
1115 config.add_view(
1115 config.add_view(
1116 RepoReviewRulesView,
1116 RepoReviewRulesView,
1117 attr='repo_review_rules',
1117 attr='repo_review_rules',
1118 route_name='repo_reviewers', request_method='GET',
1118 route_name='repo_reviewers', request_method='GET',
1119 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1119 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1120
1120
1121 config.add_route(
1121 config.add_route(
1122 name='repo_default_reviewers_data',
1122 name='repo_default_reviewers_data',
1123 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
1123 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
1124 config.add_view(
1124 config.add_view(
1125 RepoReviewRulesView,
1125 RepoReviewRulesView,
1126 attr='repo_default_reviewers_data',
1126 attr='repo_default_reviewers_data',
1127 route_name='repo_default_reviewers_data', request_method='GET',
1127 route_name='repo_default_reviewers_data', request_method='GET',
1128 renderer='json_ext')
1128 renderer='json_ext')
1129
1129
1130 # Repo Automation (EE feature)
1130 # Repo Automation (EE feature)
1131 config.add_route(
1131 config.add_route(
1132 name='repo_automation',
1132 name='repo_automation',
1133 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
1133 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
1134 config.add_view(
1134 config.add_view(
1135 RepoAutomationView,
1135 RepoAutomationView,
1136 attr='repo_automation',
1136 attr='repo_automation',
1137 route_name='repo_automation', request_method='GET',
1137 route_name='repo_automation', request_method='GET',
1138 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1138 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1139
1139
1140 # Strip
1140 # Strip
1141 config.add_route(
1141 config.add_route(
1142 name='edit_repo_strip',
1142 name='edit_repo_strip',
1143 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
1143 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
1144 config.add_view(
1144 config.add_view(
1145 RepoStripView,
1145 RepoStripView,
1146 attr='strip',
1146 attr='strip',
1147 route_name='edit_repo_strip', request_method='GET',
1147 route_name='edit_repo_strip', request_method='GET',
1148 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1148 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1149
1149
1150 config.add_route(
1150 config.add_route(
1151 name='strip_check',
1151 name='strip_check',
1152 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
1152 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
1153 config.add_view(
1153 config.add_view(
1154 RepoStripView,
1154 RepoStripView,
1155 attr='strip_check',
1155 attr='strip_check',
1156 route_name='strip_check', request_method='POST',
1156 route_name='strip_check', request_method='POST',
1157 renderer='json', xhr=True)
1157 renderer='json', xhr=True)
1158
1158
1159 config.add_route(
1159 config.add_route(
1160 name='strip_execute',
1160 name='strip_execute',
1161 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
1161 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
1162 config.add_view(
1162 config.add_view(
1163 RepoStripView,
1163 RepoStripView,
1164 attr='strip_execute',
1164 attr='strip_execute',
1165 route_name='strip_execute', request_method='POST',
1165 route_name='strip_execute', request_method='POST',
1166 renderer='json', xhr=True)
1166 renderer='json', xhr=True)
1167
1167
1168 # Audit logs
1168 # Audit logs
1169 config.add_route(
1169 config.add_route(
1170 name='edit_repo_audit_logs',
1170 name='edit_repo_audit_logs',
1171 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
1171 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
1172 config.add_view(
1172 config.add_view(
1173 AuditLogsView,
1173 AuditLogsView,
1174 attr='repo_audit_logs',
1174 attr='repo_audit_logs',
1175 route_name='edit_repo_audit_logs', request_method='GET',
1175 route_name='edit_repo_audit_logs', request_method='GET',
1176 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1176 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1177
1177
1178 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
1178 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
1179 config.add_route(
1179 config.add_route(
1180 name='rss_feed_home',
1180 name='rss_feed_home',
1181 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
1181 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
1182 config.add_view(
1182 config.add_view(
1183 RepoFeedView,
1183 RepoFeedView,
1184 attr='rss',
1184 attr='rss',
1185 route_name='rss_feed_home', request_method='GET', renderer=None)
1185 route_name='rss_feed_home', request_method='GET', renderer=None)
1186
1186
1187 config.add_route(
1187 config.add_route(
1188 name='rss_feed_home_old',
1188 name='rss_feed_home_old',
1189 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
1189 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
1190 config.add_view(
1190 config.add_view(
1191 RepoFeedView,
1191 RepoFeedView,
1192 attr='rss',
1192 attr='rss',
1193 route_name='rss_feed_home_old', request_method='GET', renderer=None)
1193 route_name='rss_feed_home_old', request_method='GET', renderer=None)
1194
1194
1195 config.add_route(
1195 config.add_route(
1196 name='atom_feed_home',
1196 name='atom_feed_home',
1197 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
1197 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
1198 config.add_view(
1198 config.add_view(
1199 RepoFeedView,
1199 RepoFeedView,
1200 attr='atom',
1200 attr='atom',
1201 route_name='atom_feed_home', request_method='GET', renderer=None)
1201 route_name='atom_feed_home', request_method='GET', renderer=None)
1202
1202
1203 config.add_route(
1203 config.add_route(
1204 name='atom_feed_home_old',
1204 name='atom_feed_home_old',
1205 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
1205 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
1206 config.add_view(
1206 config.add_view(
1207 RepoFeedView,
1207 RepoFeedView,
1208 attr='atom',
1208 attr='atom',
1209 route_name='atom_feed_home_old', request_method='GET', renderer=None)
1209 route_name='atom_feed_home_old', request_method='GET', renderer=None)
1210
1210
1211 # NOTE(marcink): needs to be at the end for catch-all
1211 # NOTE(marcink): needs to be at the end for catch-all
1212 add_route_with_slash(
1212 add_route_with_slash(
1213 config,
1213 config,
1214 name='repo_summary',
1214 name='repo_summary',
1215 pattern='/{repo_name:.*?[^/]}', repo_route=True)
1215 pattern='/{repo_name:.*?[^/]}', repo_route=True)
1216 config.add_view(
1216 config.add_view(
1217 RepoSummaryView,
1217 RepoSummaryView,
1218 attr='summary',
1218 attr='summary',
1219 route_name='repo_summary', request_method='GET',
1219 route_name='repo_summary', request_method='GET',
1220 renderer='rhodecode:templates/summary/summary.mako')
1220 renderer='rhodecode:templates/summary/summary.mako')
1221
1221
1222 # TODO(marcink): there's no such route??
1222 # TODO(marcink): there's no such route??
1223 config.add_view(
1223 config.add_view(
1224 RepoSummaryView,
1224 RepoSummaryView,
1225 attr='summary',
1225 attr='summary',
1226 route_name='repo_summary_slash', request_method='GET',
1226 route_name='repo_summary_slash', request_method='GET',
1227 renderer='rhodecode:templates/summary/summary.mako') No newline at end of file
1227 renderer='rhodecode:templates/summary/summary.mako')
@@ -1,358 +1,358 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from pyramid.httpexceptions import HTTPNotFound, HTTPFound
24 from pyramid.httpexceptions import HTTPNotFound, HTTPFound
25
25
26 from pyramid.renderers import render
26 from pyramid.renderers import render
27 from pyramid.response import Response
27 from pyramid.response import Response
28
28
29 from rhodecode.apps._base import RepoAppView
29 from rhodecode.apps._base import RepoAppView
30 import rhodecode.lib.helpers as h
30 import rhodecode.lib.helpers as h
31 from rhodecode.lib.auth import (
31 from rhodecode.lib.auth import (
32 LoginRequired, HasRepoPermissionAnyDecorator)
32 LoginRequired, HasRepoPermissionAnyDecorator)
33
33
34 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.ext_json import json
35 from rhodecode.lib.graphmod import _colored, _dagwalker
35 from rhodecode.lib.graphmod import _colored, _dagwalker
36 from rhodecode.lib.helpers import RepoPage
36 from rhodecode.lib.helpers import RepoPage
37 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
37 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
38 from rhodecode.lib.vcs.exceptions import (
38 from rhodecode.lib.vcs.exceptions import (
39 RepositoryError, CommitDoesNotExistError,
39 RepositoryError, CommitDoesNotExistError,
40 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
40 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44 DEFAULT_CHANGELOG_SIZE = 20
44 DEFAULT_CHANGELOG_SIZE = 20
45
45
46
46
47 class RepoChangelogView(RepoAppView):
47 class RepoChangelogView(RepoAppView):
48
48
49 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
49 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
50 """
50 """
51 This is a safe way to get commit. If an error occurs it redirects to
51 This is a safe way to get commit. If an error occurs it redirects to
52 tip with proper message
52 tip with proper message
53
53
54 :param commit_id: id of commit to fetch
54 :param commit_id: id of commit to fetch
55 :param redirect_after: toggle redirection
55 :param redirect_after: toggle redirection
56 """
56 """
57 _ = self.request.translate
57 _ = self.request.translate
58
58
59 try:
59 try:
60 return self.rhodecode_vcs_repo.get_commit(commit_id)
60 return self.rhodecode_vcs_repo.get_commit(commit_id)
61 except EmptyRepositoryError:
61 except EmptyRepositoryError:
62 if not redirect_after:
62 if not redirect_after:
63 return None
63 return None
64
64
65 h.flash(h.literal(
65 h.flash(h.literal(
66 _('There are no commits yet')), category='warning')
66 _('There are no commits yet')), category='warning')
67 raise HTTPFound(
67 raise HTTPFound(
68 h.route_path('repo_summary', repo_name=self.db_repo_name))
68 h.route_path('repo_summary', repo_name=self.db_repo_name))
69
69
70 except (CommitDoesNotExistError, LookupError):
70 except (CommitDoesNotExistError, LookupError):
71 msg = _('No such commit exists for this repository')
71 msg = _('No such commit exists for this repository')
72 h.flash(msg, category='error')
72 h.flash(msg, category='error')
73 raise HTTPNotFound()
73 raise HTTPNotFound()
74 except RepositoryError as e:
74 except RepositoryError as e:
75 h.flash(safe_str(h.escape(e)), category='error')
75 h.flash(h.escape(safe_str(e)), category='error')
76 raise HTTPNotFound()
76 raise HTTPNotFound()
77
77
78 def _graph(self, repo, commits, prev_data=None, next_data=None):
78 def _graph(self, repo, commits, prev_data=None, next_data=None):
79 """
79 """
80 Generates a DAG graph for repo
80 Generates a DAG graph for repo
81
81
82 :param repo: repo instance
82 :param repo: repo instance
83 :param commits: list of commits
83 :param commits: list of commits
84 """
84 """
85 if not commits:
85 if not commits:
86 return json.dumps([]), json.dumps([])
86 return json.dumps([]), json.dumps([])
87
87
88 def serialize(commit, parents=True):
88 def serialize(commit, parents=True):
89 data = dict(
89 data = dict(
90 raw_id=commit.raw_id,
90 raw_id=commit.raw_id,
91 idx=commit.idx,
91 idx=commit.idx,
92 branch=None,
92 branch=None,
93 )
93 )
94 if parents:
94 if parents:
95 data['parents'] = [
95 data['parents'] = [
96 serialize(x, parents=False) for x in commit.parents]
96 serialize(x, parents=False) for x in commit.parents]
97 return data
97 return data
98
98
99 prev_data = prev_data or []
99 prev_data = prev_data or []
100 next_data = next_data or []
100 next_data = next_data or []
101
101
102 current = [serialize(x) for x in commits]
102 current = [serialize(x) for x in commits]
103 commits = prev_data + current + next_data
103 commits = prev_data + current + next_data
104
104
105 dag = _dagwalker(repo, commits)
105 dag = _dagwalker(repo, commits)
106
106
107 data = [[commit_id, vtx, edges, branch]
107 data = [[commit_id, vtx, edges, branch]
108 for commit_id, vtx, edges, branch in _colored(dag)]
108 for commit_id, vtx, edges, branch in _colored(dag)]
109 return json.dumps(data), json.dumps(current)
109 return json.dumps(data), json.dumps(current)
110
110
111 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
111 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
112 if branch_name not in self.rhodecode_vcs_repo.branches_all:
112 if branch_name not in self.rhodecode_vcs_repo.branches_all:
113 h.flash(u'Branch {} is not found.'.format(h.escape(safe_unicode(branch_name))),
113 h.flash(u'Branch {} is not found.'.format(h.escape(safe_unicode(branch_name))),
114 category='warning')
114 category='warning')
115 redirect_url = h.route_path(
115 redirect_url = h.route_path(
116 'repo_commits_file', repo_name=repo_name,
116 'repo_commits_file', repo_name=repo_name,
117 commit_id=branch_name, f_path=f_path or '')
117 commit_id=branch_name, f_path=f_path or '')
118 raise HTTPFound(redirect_url)
118 raise HTTPFound(redirect_url)
119
119
120 def _load_changelog_data(
120 def _load_changelog_data(
121 self, c, collection, page, chunk_size, branch_name=None,
121 self, c, collection, page, chunk_size, branch_name=None,
122 dynamic=False, f_path=None, commit_id=None):
122 dynamic=False, f_path=None, commit_id=None):
123
123
124 def url_generator(page_num):
124 def url_generator(page_num):
125 query_params = {
125 query_params = {
126 'page': page_num
126 'page': page_num
127 }
127 }
128
128
129 if branch_name:
129 if branch_name:
130 query_params.update({
130 query_params.update({
131 'branch': branch_name
131 'branch': branch_name
132 })
132 })
133
133
134 if f_path:
134 if f_path:
135 # changelog for file
135 # changelog for file
136 return h.route_path(
136 return h.route_path(
137 'repo_commits_file',
137 'repo_commits_file',
138 repo_name=c.rhodecode_db_repo.repo_name,
138 repo_name=c.rhodecode_db_repo.repo_name,
139 commit_id=commit_id, f_path=f_path,
139 commit_id=commit_id, f_path=f_path,
140 _query=query_params)
140 _query=query_params)
141 else:
141 else:
142 return h.route_path(
142 return h.route_path(
143 'repo_commits',
143 'repo_commits',
144 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
144 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
145
145
146 c.total_cs = len(collection)
146 c.total_cs = len(collection)
147 c.showing_commits = min(chunk_size, c.total_cs)
147 c.showing_commits = min(chunk_size, c.total_cs)
148 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
148 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
149 items_per_page=chunk_size, url_maker=url_generator)
149 items_per_page=chunk_size, url_maker=url_generator)
150
150
151 c.next_page = c.pagination.next_page
151 c.next_page = c.pagination.next_page
152 c.prev_page = c.pagination.previous_page
152 c.prev_page = c.pagination.previous_page
153
153
154 if dynamic:
154 if dynamic:
155 if self.request.GET.get('chunk') != 'next':
155 if self.request.GET.get('chunk') != 'next':
156 c.next_page = None
156 c.next_page = None
157 if self.request.GET.get('chunk') != 'prev':
157 if self.request.GET.get('chunk') != 'prev':
158 c.prev_page = None
158 c.prev_page = None
159
159
160 page_commit_ids = [x.raw_id for x in c.pagination]
160 page_commit_ids = [x.raw_id for x in c.pagination]
161 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
161 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
162 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
162 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
163
163
164 def load_default_context(self):
164 def load_default_context(self):
165 c = self._get_local_tmpl_context(include_app_defaults=True)
165 c = self._get_local_tmpl_context(include_app_defaults=True)
166
166
167 c.rhodecode_repo = self.rhodecode_vcs_repo
167 c.rhodecode_repo = self.rhodecode_vcs_repo
168
168
169 return c
169 return c
170
170
171 def _get_preload_attrs(self):
171 def _get_preload_attrs(self):
172 pre_load = ['author', 'branch', 'date', 'message', 'parents',
172 pre_load = ['author', 'branch', 'date', 'message', 'parents',
173 'obsolete', 'phase', 'hidden']
173 'obsolete', 'phase', 'hidden']
174 return pre_load
174 return pre_load
175
175
176 @LoginRequired()
176 @LoginRequired()
177 @HasRepoPermissionAnyDecorator(
177 @HasRepoPermissionAnyDecorator(
178 'repository.read', 'repository.write', 'repository.admin')
178 'repository.read', 'repository.write', 'repository.admin')
179 def repo_changelog(self):
179 def repo_changelog(self):
180 c = self.load_default_context()
180 c = self.load_default_context()
181
181
182 commit_id = self.request.matchdict.get('commit_id')
182 commit_id = self.request.matchdict.get('commit_id')
183 f_path = self._get_f_path(self.request.matchdict)
183 f_path = self._get_f_path(self.request.matchdict)
184 show_hidden = str2bool(self.request.GET.get('evolve'))
184 show_hidden = str2bool(self.request.GET.get('evolve'))
185
185
186 chunk_size = 20
186 chunk_size = 20
187
187
188 c.branch_name = branch_name = self.request.GET.get('branch') or ''
188 c.branch_name = branch_name = self.request.GET.get('branch') or ''
189 c.book_name = book_name = self.request.GET.get('bookmark') or ''
189 c.book_name = book_name = self.request.GET.get('bookmark') or ''
190 c.f_path = f_path
190 c.f_path = f_path
191 c.commit_id = commit_id
191 c.commit_id = commit_id
192 c.show_hidden = show_hidden
192 c.show_hidden = show_hidden
193
193
194 hist_limit = safe_int(self.request.GET.get('limit')) or None
194 hist_limit = safe_int(self.request.GET.get('limit')) or None
195
195
196 p = safe_int(self.request.GET.get('page', 1), 1)
196 p = safe_int(self.request.GET.get('page', 1), 1)
197
197
198 c.selected_name = branch_name or book_name
198 c.selected_name = branch_name or book_name
199 if not commit_id and branch_name:
199 if not commit_id and branch_name:
200 self._check_if_valid_branch(branch_name, self.db_repo_name, f_path)
200 self._check_if_valid_branch(branch_name, self.db_repo_name, f_path)
201
201
202 c.changelog_for_path = f_path
202 c.changelog_for_path = f_path
203 pre_load = self._get_preload_attrs()
203 pre_load = self._get_preload_attrs()
204
204
205 partial_xhr = self.request.environ.get('HTTP_X_PARTIAL_XHR')
205 partial_xhr = self.request.environ.get('HTTP_X_PARTIAL_XHR')
206
206
207 try:
207 try:
208 if f_path:
208 if f_path:
209 log.debug('generating changelog for path %s', f_path)
209 log.debug('generating changelog for path %s', f_path)
210 # get the history for the file !
210 # get the history for the file !
211 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
211 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
212
212
213 try:
213 try:
214 collection = base_commit.get_path_history(
214 collection = base_commit.get_path_history(
215 f_path, limit=hist_limit, pre_load=pre_load)
215 f_path, limit=hist_limit, pre_load=pre_load)
216 if collection and partial_xhr:
216 if collection and partial_xhr:
217 # for ajax call we remove first one since we're looking
217 # for ajax call we remove first one since we're looking
218 # at it right now in the context of a file commit
218 # at it right now in the context of a file commit
219 collection.pop(0)
219 collection.pop(0)
220 except (NodeDoesNotExistError, CommitError):
220 except (NodeDoesNotExistError, CommitError):
221 # this node is not present at tip!
221 # this node is not present at tip!
222 try:
222 try:
223 commit = self._get_commit_or_redirect(commit_id)
223 commit = self._get_commit_or_redirect(commit_id)
224 collection = commit.get_path_history(f_path)
224 collection = commit.get_path_history(f_path)
225 except RepositoryError as e:
225 except RepositoryError as e:
226 h.flash(safe_str(e), category='warning')
226 h.flash(safe_str(e), category='warning')
227 redirect_url = h.route_path(
227 redirect_url = h.route_path(
228 'repo_commits', repo_name=self.db_repo_name)
228 'repo_commits', repo_name=self.db_repo_name)
229 raise HTTPFound(redirect_url)
229 raise HTTPFound(redirect_url)
230 collection = list(reversed(collection))
230 collection = list(reversed(collection))
231 else:
231 else:
232 collection = self.rhodecode_vcs_repo.get_commits(
232 collection = self.rhodecode_vcs_repo.get_commits(
233 branch_name=branch_name, show_hidden=show_hidden,
233 branch_name=branch_name, show_hidden=show_hidden,
234 pre_load=pre_load, translate_tags=False)
234 pre_load=pre_load, translate_tags=False)
235
235
236 self._load_changelog_data(
236 self._load_changelog_data(
237 c, collection, p, chunk_size, c.branch_name,
237 c, collection, p, chunk_size, c.branch_name,
238 f_path=f_path, commit_id=commit_id)
238 f_path=f_path, commit_id=commit_id)
239
239
240 except EmptyRepositoryError as e:
240 except EmptyRepositoryError as e:
241 h.flash(safe_str(h.escape(e)), category='warning')
241 h.flash(h.escape(safe_str(e)), category='warning')
242 raise HTTPFound(
242 raise HTTPFound(
243 h.route_path('repo_summary', repo_name=self.db_repo_name))
243 h.route_path('repo_summary', repo_name=self.db_repo_name))
244 except HTTPFound:
244 except HTTPFound:
245 raise
245 raise
246 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
246 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
247 log.exception(safe_str(e))
247 log.exception(safe_str(e))
248 h.flash(safe_str(h.escape(e)), category='error')
248 h.flash(h.escape(safe_str(e)), category='error')
249
249
250 if commit_id:
250 if commit_id:
251 # from single commit page, we redirect to main commits
251 # from single commit page, we redirect to main commits
252 raise HTTPFound(
252 raise HTTPFound(
253 h.route_path('repo_commits', repo_name=self.db_repo_name))
253 h.route_path('repo_commits', repo_name=self.db_repo_name))
254 else:
254 else:
255 # otherwise we redirect to summary
255 # otherwise we redirect to summary
256 raise HTTPFound(
256 raise HTTPFound(
257 h.route_path('repo_summary', repo_name=self.db_repo_name))
257 h.route_path('repo_summary', repo_name=self.db_repo_name))
258
258
259 if partial_xhr or self.request.environ.get('HTTP_X_PJAX'):
259 if partial_xhr or self.request.environ.get('HTTP_X_PJAX'):
260 # case when loading dynamic file history in file view
260 # case when loading dynamic file history in file view
261 # loading from ajax, we don't want the first result, it's popped
261 # loading from ajax, we don't want the first result, it's popped
262 # in the code above
262 # in the code above
263 html = render(
263 html = render(
264 'rhodecode:templates/commits/changelog_file_history.mako',
264 'rhodecode:templates/commits/changelog_file_history.mako',
265 self._get_template_context(c), self.request)
265 self._get_template_context(c), self.request)
266 return Response(html)
266 return Response(html)
267
267
268 commit_ids = []
268 commit_ids = []
269 if not f_path:
269 if not f_path:
270 # only load graph data when not in file history mode
270 # only load graph data when not in file history mode
271 commit_ids = c.pagination
271 commit_ids = c.pagination
272
272
273 c.graph_data, c.graph_commits = self._graph(
273 c.graph_data, c.graph_commits = self._graph(
274 self.rhodecode_vcs_repo, commit_ids)
274 self.rhodecode_vcs_repo, commit_ids)
275
275
276 return self._get_template_context(c)
276 return self._get_template_context(c)
277
277
278 @LoginRequired()
278 @LoginRequired()
279 @HasRepoPermissionAnyDecorator(
279 @HasRepoPermissionAnyDecorator(
280 'repository.read', 'repository.write', 'repository.admin')
280 'repository.read', 'repository.write', 'repository.admin')
281 def repo_commits_elements(self):
281 def repo_commits_elements(self):
282 c = self.load_default_context()
282 c = self.load_default_context()
283 commit_id = self.request.matchdict.get('commit_id')
283 commit_id = self.request.matchdict.get('commit_id')
284 f_path = self._get_f_path(self.request.matchdict)
284 f_path = self._get_f_path(self.request.matchdict)
285 show_hidden = str2bool(self.request.GET.get('evolve'))
285 show_hidden = str2bool(self.request.GET.get('evolve'))
286
286
287 chunk_size = 20
287 chunk_size = 20
288 hist_limit = safe_int(self.request.GET.get('limit')) or None
288 hist_limit = safe_int(self.request.GET.get('limit')) or None
289
289
290 def wrap_for_error(err):
290 def wrap_for_error(err):
291 html = '<tr>' \
291 html = '<tr>' \
292 '<td colspan="9" class="alert alert-error">ERROR: {}</td>' \
292 '<td colspan="9" class="alert alert-error">ERROR: {}</td>' \
293 '</tr>'.format(err)
293 '</tr>'.format(err)
294 return Response(html)
294 return Response(html)
295
295
296 c.branch_name = branch_name = self.request.GET.get('branch') or ''
296 c.branch_name = branch_name = self.request.GET.get('branch') or ''
297 c.book_name = book_name = self.request.GET.get('bookmark') or ''
297 c.book_name = book_name = self.request.GET.get('bookmark') or ''
298 c.f_path = f_path
298 c.f_path = f_path
299 c.commit_id = commit_id
299 c.commit_id = commit_id
300 c.show_hidden = show_hidden
300 c.show_hidden = show_hidden
301
301
302 c.selected_name = branch_name or book_name
302 c.selected_name = branch_name or book_name
303 if branch_name and branch_name not in self.rhodecode_vcs_repo.branches_all:
303 if branch_name and branch_name not in self.rhodecode_vcs_repo.branches_all:
304 return wrap_for_error(
304 return wrap_for_error(
305 safe_str('Branch: {} is not valid'.format(branch_name)))
305 safe_str('Branch: {} is not valid'.format(branch_name)))
306
306
307 pre_load = self._get_preload_attrs()
307 pre_load = self._get_preload_attrs()
308
308
309 if f_path:
309 if f_path:
310 try:
310 try:
311 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
311 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
312 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
312 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
313 log.exception(safe_str(e))
313 log.exception(safe_str(e))
314 raise HTTPFound(
314 raise HTTPFound(
315 h.route_path('repo_commits', repo_name=self.db_repo_name))
315 h.route_path('repo_commits', repo_name=self.db_repo_name))
316
316
317 collection = base_commit.get_path_history(
317 collection = base_commit.get_path_history(
318 f_path, limit=hist_limit, pre_load=pre_load)
318 f_path, limit=hist_limit, pre_load=pre_load)
319 collection = list(reversed(collection))
319 collection = list(reversed(collection))
320 else:
320 else:
321 collection = self.rhodecode_vcs_repo.get_commits(
321 collection = self.rhodecode_vcs_repo.get_commits(
322 branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load,
322 branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load,
323 translate_tags=False)
323 translate_tags=False)
324
324
325 p = safe_int(self.request.GET.get('page', 1), 1)
325 p = safe_int(self.request.GET.get('page', 1), 1)
326 try:
326 try:
327 self._load_changelog_data(
327 self._load_changelog_data(
328 c, collection, p, chunk_size, dynamic=True,
328 c, collection, p, chunk_size, dynamic=True,
329 f_path=f_path, commit_id=commit_id)
329 f_path=f_path, commit_id=commit_id)
330 except EmptyRepositoryError as e:
330 except EmptyRepositoryError as e:
331 return wrap_for_error(safe_str(e))
331 return wrap_for_error(safe_str(e))
332 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
332 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
333 log.exception('Failed to fetch commits')
333 log.exception('Failed to fetch commits')
334 return wrap_for_error(safe_str(e))
334 return wrap_for_error(safe_str(e))
335
335
336 prev_data = None
336 prev_data = None
337 next_data = None
337 next_data = None
338
338
339 try:
339 try:
340 prev_graph = json.loads(self.request.POST.get('graph') or '{}')
340 prev_graph = json.loads(self.request.POST.get('graph') or '{}')
341 except json.JSONDecodeError:
341 except json.JSONDecodeError:
342 prev_graph = {}
342 prev_graph = {}
343
343
344 if self.request.GET.get('chunk') == 'prev':
344 if self.request.GET.get('chunk') == 'prev':
345 next_data = prev_graph
345 next_data = prev_graph
346 elif self.request.GET.get('chunk') == 'next':
346 elif self.request.GET.get('chunk') == 'next':
347 prev_data = prev_graph
347 prev_data = prev_graph
348
348
349 commit_ids = []
349 commit_ids = []
350 if not f_path:
350 if not f_path:
351 # only load graph data when not in file history mode
351 # only load graph data when not in file history mode
352 commit_ids = c.pagination
352 commit_ids = c.pagination
353
353
354 c.graph_data, c.graph_commits = self._graph(
354 c.graph_data, c.graph_commits = self._graph(
355 self.rhodecode_vcs_repo, commit_ids,
355 self.rhodecode_vcs_repo, commit_ids,
356 prev_data=prev_data, next_data=next_data)
356 prev_data=prev_data, next_data=next_data)
357
357
358 return self._get_template_context(c)
358 return self._get_template_context(c)
@@ -1,818 +1,819 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 from pyramid.renderers import render
26 from pyramid.renderers import render
27 from pyramid.response import Response
27 from pyramid.response import Response
28
28
29 from rhodecode.apps._base import RepoAppView
29 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps.file_store import utils as store_utils
30 from rhodecode.apps.file_store import utils as store_utils
31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32
32
33 from rhodecode.lib import diffs, codeblocks, channelstream
33 from rhodecode.lib import diffs, codeblocks, channelstream
34 from rhodecode.lib.auth import (
34 from rhodecode.lib.auth import (
35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.diffs import (
38 from rhodecode.lib.diffs import (
39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 get_diff_whitespace_flag)
40 get_diff_whitespace_flag)
41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 import rhodecode.lib.helpers as h
42 import rhodecode.lib.helpers as h
43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 RepositoryError, CommitDoesNotExistError)
46 RepositoryError, CommitDoesNotExistError)
47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 ChangesetCommentHistory
48 ChangesetCommentHistory
49 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.comment import CommentsModel
50 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 def _update_with_GET(params, request):
57 def _update_with_GET(params, request):
58 for k in ['diff1', 'diff2', 'diff']:
58 for k in ['diff1', 'diff2', 'diff']:
59 params[k] += request.GET.getall(k)
59 params[k] += request.GET.getall(k)
60
60
61
61
62 class RepoCommitsView(RepoAppView):
62 class RepoCommitsView(RepoAppView):
63 def load_default_context(self):
63 def load_default_context(self):
64 c = self._get_local_tmpl_context(include_app_defaults=True)
64 c = self._get_local_tmpl_context(include_app_defaults=True)
65 c.rhodecode_repo = self.rhodecode_vcs_repo
65 c.rhodecode_repo = self.rhodecode_vcs_repo
66
66
67 return c
67 return c
68
68
69 def _is_diff_cache_enabled(self, target_repo):
69 def _is_diff_cache_enabled(self, target_repo):
70 caching_enabled = self._get_general_setting(
70 caching_enabled = self._get_general_setting(
71 target_repo, 'rhodecode_diff_cache')
71 target_repo, 'rhodecode_diff_cache')
72 log.debug('Diff caching enabled: %s', caching_enabled)
72 log.debug('Diff caching enabled: %s', caching_enabled)
73 return caching_enabled
73 return caching_enabled
74
74
75 def _commit(self, commit_id_range, method):
75 def _commit(self, commit_id_range, method):
76 _ = self.request.translate
76 _ = self.request.translate
77 c = self.load_default_context()
77 c = self.load_default_context()
78 c.fulldiff = self.request.GET.get('fulldiff')
78 c.fulldiff = self.request.GET.get('fulldiff')
79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
80
80
81 # fetch global flags of ignore ws or context lines
81 # fetch global flags of ignore ws or context lines
82 diff_context = get_diff_context(self.request)
82 diff_context = get_diff_context(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84
84
85 # diff_limit will cut off the whole diff if the limit is applied
85 # diff_limit will cut off the whole diff if the limit is applied
86 # otherwise it will just hide the big files from the front-end
86 # otherwise it will just hide the big files from the front-end
87 diff_limit = c.visual.cut_off_limit_diff
87 diff_limit = c.visual.cut_off_limit_diff
88 file_limit = c.visual.cut_off_limit_file
88 file_limit = c.visual.cut_off_limit_file
89
89
90 # get ranges of commit ids if preset
90 # get ranges of commit ids if preset
91 commit_range = commit_id_range.split('...')[:2]
91 commit_range = commit_id_range.split('...')[:2]
92
92
93 try:
93 try:
94 pre_load = ['affected_files', 'author', 'branch', 'date',
94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 'message', 'parents']
95 'message', 'parents']
96 if self.rhodecode_vcs_repo.alias == 'hg':
96 if self.rhodecode_vcs_repo.alias == 'hg':
97 pre_load += ['hidden', 'obsolete', 'phase']
97 pre_load += ['hidden', 'obsolete', 'phase']
98
98
99 if len(commit_range) == 2:
99 if len(commit_range) == 2:
100 commits = self.rhodecode_vcs_repo.get_commits(
100 commits = self.rhodecode_vcs_repo.get_commits(
101 start_id=commit_range[0], end_id=commit_range[1],
101 start_id=commit_range[0], end_id=commit_range[1],
102 pre_load=pre_load, translate_tags=False)
102 pre_load=pre_load, translate_tags=False)
103 commits = list(commits)
103 commits = list(commits)
104 else:
104 else:
105 commits = [self.rhodecode_vcs_repo.get_commit(
105 commits = [self.rhodecode_vcs_repo.get_commit(
106 commit_id=commit_id_range, pre_load=pre_load)]
106 commit_id=commit_id_range, pre_load=pre_load)]
107
107
108 c.commit_ranges = commits
108 c.commit_ranges = commits
109 if not c.commit_ranges:
109 if not c.commit_ranges:
110 raise RepositoryError('The commit range returned an empty result')
110 raise RepositoryError('The commit range returned an empty result')
111 except CommitDoesNotExistError as e:
111 except CommitDoesNotExistError as e:
112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
113 h.flash(msg, category='error')
113 h.flash(msg, category='error')
114 raise HTTPNotFound()
114 raise HTTPNotFound()
115 except Exception:
115 except Exception:
116 log.exception("General failure")
116 log.exception("General failure")
117 raise HTTPNotFound()
117 raise HTTPNotFound()
118 single_commit = len(c.commit_ranges) == 1
118 single_commit = len(c.commit_ranges) == 1
119
119
120 if redirect_to_combined and not single_commit:
120 if redirect_to_combined and not single_commit:
121 source_ref = getattr(c.commit_ranges[0].parents[0]
121 source_ref = getattr(c.commit_ranges[0].parents[0]
122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
123 target_ref = c.commit_ranges[-1].raw_id
123 target_ref = c.commit_ranges[-1].raw_id
124 next_url = h.route_path(
124 next_url = h.route_path(
125 'repo_compare',
125 'repo_compare',
126 repo_name=c.repo_name,
126 repo_name=c.repo_name,
127 source_ref_type='rev',
127 source_ref_type='rev',
128 source_ref=source_ref,
128 source_ref=source_ref,
129 target_ref_type='rev',
129 target_ref_type='rev',
130 target_ref=target_ref)
130 target_ref=target_ref)
131 raise HTTPFound(next_url)
131 raise HTTPFound(next_url)
132
132
133 c.changes = OrderedDict()
133 c.changes = OrderedDict()
134 c.lines_added = 0
134 c.lines_added = 0
135 c.lines_deleted = 0
135 c.lines_deleted = 0
136
136
137 # auto collapse if we have more than limit
137 # auto collapse if we have more than limit
138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
140
140
141 c.commit_statuses = ChangesetStatus.STATUSES
141 c.commit_statuses = ChangesetStatus.STATUSES
142 c.inline_comments = []
142 c.inline_comments = []
143 c.files = []
143 c.files = []
144
144
145 c.comments = []
145 c.comments = []
146 c.unresolved_comments = []
146 c.unresolved_comments = []
147 c.resolved_comments = []
147 c.resolved_comments = []
148
148
149 # Single commit
149 # Single commit
150 if single_commit:
150 if single_commit:
151 commit = c.commit_ranges[0]
151 commit = c.commit_ranges[0]
152 c.comments = CommentsModel().get_comments(
152 c.comments = CommentsModel().get_comments(
153 self.db_repo.repo_id,
153 self.db_repo.repo_id,
154 revision=commit.raw_id)
154 revision=commit.raw_id)
155
155
156 # comments from PR
156 # comments from PR
157 statuses = ChangesetStatusModel().get_statuses(
157 statuses = ChangesetStatusModel().get_statuses(
158 self.db_repo.repo_id, commit.raw_id,
158 self.db_repo.repo_id, commit.raw_id,
159 with_revisions=True)
159 with_revisions=True)
160
160
161 prs = set()
161 prs = set()
162 reviewers = list()
162 reviewers = list()
163 reviewers_duplicates = set() # to not have duplicates from multiple votes
163 reviewers_duplicates = set() # to not have duplicates from multiple votes
164 for c_status in statuses:
164 for c_status in statuses:
165
165
166 # extract associated pull-requests from votes
166 # extract associated pull-requests from votes
167 if c_status.pull_request:
167 if c_status.pull_request:
168 prs.add(c_status.pull_request)
168 prs.add(c_status.pull_request)
169
169
170 # extract reviewers
170 # extract reviewers
171 _user_id = c_status.author.user_id
171 _user_id = c_status.author.user_id
172 if _user_id not in reviewers_duplicates:
172 if _user_id not in reviewers_duplicates:
173 reviewers.append(
173 reviewers.append(
174 StrictAttributeDict({
174 StrictAttributeDict({
175 'user': c_status.author,
175 'user': c_status.author,
176
176
177 # fake attributed for commit, page that we don't have
177 # fake attributed for commit, page that we don't have
178 # but we share the display with PR page
178 # but we share the display with PR page
179 'mandatory': False,
179 'mandatory': False,
180 'reasons': [],
180 'reasons': [],
181 'rule_user_group_data': lambda: None
181 'rule_user_group_data': lambda: None
182 })
182 })
183 )
183 )
184 reviewers_duplicates.add(_user_id)
184 reviewers_duplicates.add(_user_id)
185
185
186 c.reviewers_count = len(reviewers)
186 c.reviewers_count = len(reviewers)
187 c.observers_count = 0
187 c.observers_count = 0
188
188
189 # from associated statuses, check the pull requests, and
189 # from associated statuses, check the pull requests, and
190 # show comments from them
190 # show comments from them
191 for pr in prs:
191 for pr in prs:
192 c.comments.extend(pr.comments)
192 c.comments.extend(pr.comments)
193
193
194 c.unresolved_comments = CommentsModel()\
194 c.unresolved_comments = CommentsModel()\
195 .get_commit_unresolved_todos(commit.raw_id)
195 .get_commit_unresolved_todos(commit.raw_id)
196 c.resolved_comments = CommentsModel()\
196 c.resolved_comments = CommentsModel()\
197 .get_commit_resolved_todos(commit.raw_id)
197 .get_commit_resolved_todos(commit.raw_id)
198
198
199 c.inline_comments_flat = CommentsModel()\
199 c.inline_comments_flat = CommentsModel()\
200 .get_commit_inline_comments(commit.raw_id)
200 .get_commit_inline_comments(commit.raw_id)
201
201
202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
203 statuses, reviewers)
203 statuses, reviewers)
204
204
205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
206
206
207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
208
208
209 for review_obj, member, reasons, mandatory, status in review_statuses:
209 for review_obj, member, reasons, mandatory, status in review_statuses:
210 member_reviewer = h.reviewer_as_json(
210 member_reviewer = h.reviewer_as_json(
211 member, reasons=reasons, mandatory=mandatory, role=None,
211 member, reasons=reasons, mandatory=mandatory, role=None,
212 user_group=None
212 user_group=None
213 )
213 )
214
214
215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
216 member_reviewer['review_status'] = current_review_status
216 member_reviewer['review_status'] = current_review_status
217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
218 member_reviewer['allowed_to_update'] = False
218 member_reviewer['allowed_to_update'] = False
219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
220
220
221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
222
222
223 # NOTE(marcink): this uses the same voting logic as in pull-requests
223 # NOTE(marcink): this uses the same voting logic as in pull-requests
224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
226
226
227 diff = None
227 diff = None
228 # Iterate over ranges (default commit view is always one commit)
228 # Iterate over ranges (default commit view is always one commit)
229 for commit in c.commit_ranges:
229 for commit in c.commit_ranges:
230 c.changes[commit.raw_id] = []
230 c.changes[commit.raw_id] = []
231
231
232 commit2 = commit
232 commit2 = commit
233 commit1 = commit.first_parent
233 commit1 = commit.first_parent
234
234
235 if method == 'show':
235 if method == 'show':
236 inline_comments = CommentsModel().get_inline_comments(
236 inline_comments = CommentsModel().get_inline_comments(
237 self.db_repo.repo_id, revision=commit.raw_id)
237 self.db_repo.repo_id, revision=commit.raw_id)
238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
239 inline_comments))
239 inline_comments))
240 c.inline_comments = inline_comments
240 c.inline_comments = inline_comments
241
241
242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
243 self.db_repo)
243 self.db_repo)
244 cache_file_path = diff_cache_exist(
244 cache_file_path = diff_cache_exist(
245 cache_path, 'diff', commit.raw_id,
245 cache_path, 'diff', commit.raw_id,
246 hide_whitespace_changes, diff_context, c.fulldiff)
246 hide_whitespace_changes, diff_context, c.fulldiff)
247
247
248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
249 force_recache = str2bool(self.request.GET.get('force_recache'))
249 force_recache = str2bool(self.request.GET.get('force_recache'))
250
250
251 cached_diff = None
251 cached_diff = None
252 if caching_enabled:
252 if caching_enabled:
253 cached_diff = load_cached_diff(cache_file_path)
253 cached_diff = load_cached_diff(cache_file_path)
254
254
255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
256 if not force_recache and has_proper_diff_cache:
256 if not force_recache and has_proper_diff_cache:
257 diffset = cached_diff['diff']
257 diffset = cached_diff['diff']
258 else:
258 else:
259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
260 commit1, commit2,
260 commit1, commit2,
261 ignore_whitespace=hide_whitespace_changes,
261 ignore_whitespace=hide_whitespace_changes,
262 context=diff_context)
262 context=diff_context)
263
263
264 diff_processor = diffs.DiffProcessor(
264 diff_processor = diffs.DiffProcessor(
265 vcs_diff, format='newdiff', diff_limit=diff_limit,
265 vcs_diff, format='newdiff', diff_limit=diff_limit,
266 file_limit=file_limit, show_full_diff=c.fulldiff)
266 file_limit=file_limit, show_full_diff=c.fulldiff)
267
267
268 _parsed = diff_processor.prepare()
268 _parsed = diff_processor.prepare()
269
269
270 diffset = codeblocks.DiffSet(
270 diffset = codeblocks.DiffSet(
271 repo_name=self.db_repo_name,
271 repo_name=self.db_repo_name,
272 source_node_getter=codeblocks.diffset_node_getter(commit1),
272 source_node_getter=codeblocks.diffset_node_getter(commit1),
273 target_node_getter=codeblocks.diffset_node_getter(commit2))
273 target_node_getter=codeblocks.diffset_node_getter(commit2))
274
274
275 diffset = self.path_filter.render_patchset_filtered(
275 diffset = self.path_filter.render_patchset_filtered(
276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
277
277
278 # save cached diff
278 # save cached diff
279 if caching_enabled:
279 if caching_enabled:
280 cache_diff(cache_file_path, diffset, None)
280 cache_diff(cache_file_path, diffset, None)
281
281
282 c.limited_diff = diffset.limited_diff
282 c.limited_diff = diffset.limited_diff
283 c.changes[commit.raw_id] = diffset
283 c.changes[commit.raw_id] = diffset
284 else:
284 else:
285 # TODO(marcink): no cache usage here...
285 # TODO(marcink): no cache usage here...
286 _diff = self.rhodecode_vcs_repo.get_diff(
286 _diff = self.rhodecode_vcs_repo.get_diff(
287 commit1, commit2,
287 commit1, commit2,
288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289 diff_processor = diffs.DiffProcessor(
289 diff_processor = diffs.DiffProcessor(
290 _diff, format='newdiff', diff_limit=diff_limit,
290 _diff, format='newdiff', diff_limit=diff_limit,
291 file_limit=file_limit, show_full_diff=c.fulldiff)
291 file_limit=file_limit, show_full_diff=c.fulldiff)
292 # downloads/raw we only need RAW diff nothing else
292 # downloads/raw we only need RAW diff nothing else
293 diff = self.path_filter.get_raw_patch(diff_processor)
293 diff = self.path_filter.get_raw_patch(diff_processor)
294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
295
295
296 # sort comments by how they were generated
296 # sort comments by how they were generated
297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
298 c.at_version_num = None
298 c.at_version_num = None
299
299
300 if len(c.commit_ranges) == 1:
300 if len(c.commit_ranges) == 1:
301 c.commit = c.commit_ranges[0]
301 c.commit = c.commit_ranges[0]
302 c.parent_tmpl = ''.join(
302 c.parent_tmpl = ''.join(
303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
304
304
305 if method == 'download':
305 if method == 'download':
306 response = Response(diff)
306 response = Response(diff)
307 response.content_type = 'text/plain'
307 response.content_type = 'text/plain'
308 response.content_disposition = (
308 response.content_disposition = (
309 'attachment; filename=%s.diff' % commit_id_range[:12])
309 'attachment; filename=%s.diff' % commit_id_range[:12])
310 return response
310 return response
311 elif method == 'patch':
311 elif method == 'patch':
312 c.diff = safe_unicode(diff)
312 c.diff = safe_unicode(diff)
313 patch = render(
313 patch = render(
314 'rhodecode:templates/changeset/patch_changeset.mako',
314 'rhodecode:templates/changeset/patch_changeset.mako',
315 self._get_template_context(c), self.request)
315 self._get_template_context(c), self.request)
316 response = Response(patch)
316 response = Response(patch)
317 response.content_type = 'text/plain'
317 response.content_type = 'text/plain'
318 return response
318 return response
319 elif method == 'raw':
319 elif method == 'raw':
320 response = Response(diff)
320 response = Response(diff)
321 response.content_type = 'text/plain'
321 response.content_type = 'text/plain'
322 return response
322 return response
323 elif method == 'show':
323 elif method == 'show':
324 if len(c.commit_ranges) == 1:
324 if len(c.commit_ranges) == 1:
325 html = render(
325 html = render(
326 'rhodecode:templates/changeset/changeset.mako',
326 'rhodecode:templates/changeset/changeset.mako',
327 self._get_template_context(c), self.request)
327 self._get_template_context(c), self.request)
328 return Response(html)
328 return Response(html)
329 else:
329 else:
330 c.ancestor = None
330 c.ancestor = None
331 c.target_repo = self.db_repo
331 c.target_repo = self.db_repo
332 html = render(
332 html = render(
333 'rhodecode:templates/changeset/changeset_range.mako',
333 'rhodecode:templates/changeset/changeset_range.mako',
334 self._get_template_context(c), self.request)
334 self._get_template_context(c), self.request)
335 return Response(html)
335 return Response(html)
336
336
337 raise HTTPBadRequest()
337 raise HTTPBadRequest()
338
338
339 @LoginRequired()
339 @LoginRequired()
340 @HasRepoPermissionAnyDecorator(
340 @HasRepoPermissionAnyDecorator(
341 'repository.read', 'repository.write', 'repository.admin')
341 'repository.read', 'repository.write', 'repository.admin')
342 def repo_commit_show(self):
342 def repo_commit_show(self):
343 commit_id = self.request.matchdict['commit_id']
343 commit_id = self.request.matchdict['commit_id']
344 return self._commit(commit_id, method='show')
344 return self._commit(commit_id, method='show')
345
345
346 @LoginRequired()
346 @LoginRequired()
347 @HasRepoPermissionAnyDecorator(
347 @HasRepoPermissionAnyDecorator(
348 'repository.read', 'repository.write', 'repository.admin')
348 'repository.read', 'repository.write', 'repository.admin')
349 def repo_commit_raw(self):
349 def repo_commit_raw(self):
350 commit_id = self.request.matchdict['commit_id']
350 commit_id = self.request.matchdict['commit_id']
351 return self._commit(commit_id, method='raw')
351 return self._commit(commit_id, method='raw')
352
352
353 @LoginRequired()
353 @LoginRequired()
354 @HasRepoPermissionAnyDecorator(
354 @HasRepoPermissionAnyDecorator(
355 'repository.read', 'repository.write', 'repository.admin')
355 'repository.read', 'repository.write', 'repository.admin')
356 def repo_commit_patch(self):
356 def repo_commit_patch(self):
357 commit_id = self.request.matchdict['commit_id']
357 commit_id = self.request.matchdict['commit_id']
358 return self._commit(commit_id, method='patch')
358 return self._commit(commit_id, method='patch')
359
359
360 @LoginRequired()
360 @LoginRequired()
361 @HasRepoPermissionAnyDecorator(
361 @HasRepoPermissionAnyDecorator(
362 'repository.read', 'repository.write', 'repository.admin')
362 'repository.read', 'repository.write', 'repository.admin')
363 def repo_commit_download(self):
363 def repo_commit_download(self):
364 commit_id = self.request.matchdict['commit_id']
364 commit_id = self.request.matchdict['commit_id']
365 return self._commit(commit_id, method='download')
365 return self._commit(commit_id, method='download')
366
366
367 def _commit_comments_create(self, commit_id, comments):
367 def _commit_comments_create(self, commit_id, comments):
368 _ = self.request.translate
368 _ = self.request.translate
369 data = {}
369 data = {}
370 if not comments:
370 if not comments:
371 return
371 return
372
372
373 commit = self.db_repo.get_commit(commit_id)
373 commit = self.db_repo.get_commit(commit_id)
374
374
375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
376 for entry in comments:
376 for entry in comments:
377 c = self.load_default_context()
377 c = self.load_default_context()
378 comment_type = entry['comment_type']
378 comment_type = entry['comment_type']
379 text = entry['text']
379 text = entry['text']
380 status = entry['status']
380 status = entry['status']
381 is_draft = str2bool(entry['is_draft'])
381 is_draft = str2bool(entry['is_draft'])
382 resolves_comment_id = entry['resolves_comment_id']
382 resolves_comment_id = entry['resolves_comment_id']
383 f_path = entry['f_path']
383 f_path = entry['f_path']
384 line_no = entry['line']
384 line_no = entry['line']
385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
386
386
387 if status:
387 if status:
388 text = text or (_('Status change %(transition_icon)s %(status)s')
388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 % {'transition_icon': '>',
389 % {'transition_icon': '>',
390 'status': ChangesetStatus.get_status_lbl(status)})
390 'status': ChangesetStatus.get_status_lbl(status)})
391
391
392 comment = CommentsModel().create(
392 comment = CommentsModel().create(
393 text=text,
393 text=text,
394 repo=self.db_repo.repo_id,
394 repo=self.db_repo.repo_id,
395 user=self._rhodecode_db_user.user_id,
395 user=self._rhodecode_db_user.user_id,
396 commit_id=commit_id,
396 commit_id=commit_id,
397 f_path=f_path,
397 f_path=f_path,
398 line_no=line_no,
398 line_no=line_no,
399 status_change=(ChangesetStatus.get_status_lbl(status)
399 status_change=(ChangesetStatus.get_status_lbl(status)
400 if status else None),
400 if status else None),
401 status_change_type=status,
401 status_change_type=status,
402 comment_type=comment_type,
402 comment_type=comment_type,
403 is_draft=is_draft,
403 is_draft=is_draft,
404 resolves_comment_id=resolves_comment_id,
404 resolves_comment_id=resolves_comment_id,
405 auth_user=self._rhodecode_user,
405 auth_user=self._rhodecode_user,
406 send_email=not is_draft, # skip notification for draft comments
406 send_email=not is_draft, # skip notification for draft comments
407 )
407 )
408 is_inline = comment.is_inline
408 is_inline = comment.is_inline
409
409
410 # get status if set !
410 # get status if set !
411 if status:
411 if status:
412 # `dont_allow_on_closed_pull_request = True` means
412 # `dont_allow_on_closed_pull_request = True` means
413 # if latest status was from pull request and it's closed
413 # if latest status was from pull request and it's closed
414 # disallow changing status !
414 # disallow changing status !
415
415
416 try:
416 try:
417 ChangesetStatusModel().set_status(
417 ChangesetStatusModel().set_status(
418 self.db_repo.repo_id,
418 self.db_repo.repo_id,
419 status,
419 status,
420 self._rhodecode_db_user.user_id,
420 self._rhodecode_db_user.user_id,
421 comment,
421 comment,
422 revision=commit_id,
422 revision=commit_id,
423 dont_allow_on_closed_pull_request=True
423 dont_allow_on_closed_pull_request=True
424 )
424 )
425 except StatusChangeOnClosedPullRequestError:
425 except StatusChangeOnClosedPullRequestError:
426 msg = _('Changing the status of a commit associated with '
426 msg = _('Changing the status of a commit associated with '
427 'a closed pull request is not allowed')
427 'a closed pull request is not allowed')
428 log.exception(msg)
428 log.exception(msg)
429 h.flash(msg, category='warning')
429 h.flash(msg, category='warning')
430 raise HTTPFound(h.route_path(
430 raise HTTPFound(h.route_path(
431 'repo_commit', repo_name=self.db_repo_name,
431 'repo_commit', repo_name=self.db_repo_name,
432 commit_id=commit_id))
432 commit_id=commit_id))
433
433
434 Session().flush()
434 Session().flush()
435 # this is somehow required to get access to some relationship
435 # this is somehow required to get access to some relationship
436 # loaded on comment
436 # loaded on comment
437 Session().refresh(comment)
437 Session().refresh(comment)
438
438
439 # skip notifications for drafts
439 # skip notifications for drafts
440 if not is_draft:
440 if not is_draft:
441 CommentsModel().trigger_commit_comment_hook(
441 CommentsModel().trigger_commit_comment_hook(
442 self.db_repo, self._rhodecode_user, 'create',
442 self.db_repo, self._rhodecode_user, 'create',
443 data={'comment': comment, 'commit': commit})
443 data={'comment': comment, 'commit': commit})
444
444
445 comment_id = comment.comment_id
445 comment_id = comment.comment_id
446 data[comment_id] = {
446 data[comment_id] = {
447 'target_id': target_elem_id
447 'target_id': target_elem_id
448 }
448 }
449 Session().flush()
449 Session().flush()
450
450
451 c.co = comment
451 c.co = comment
452 c.at_version_num = 0
452 c.at_version_num = 0
453 c.is_new = True
453 c.is_new = True
454 rendered_comment = render(
454 rendered_comment = render(
455 'rhodecode:templates/changeset/changeset_comment_block.mako',
455 'rhodecode:templates/changeset/changeset_comment_block.mako',
456 self._get_template_context(c), self.request)
456 self._get_template_context(c), self.request)
457
457
458 data[comment_id].update(comment.get_dict())
458 data[comment_id].update(comment.get_dict())
459 data[comment_id].update({'rendered_text': rendered_comment})
459 data[comment_id].update({'rendered_text': rendered_comment})
460
460
461 # finalize, commit and redirect
461 # finalize, commit and redirect
462 Session().commit()
462 Session().commit()
463
463
464 # skip channelstream for draft comments
464 # skip channelstream for draft comments
465 if not all_drafts:
465 if not all_drafts:
466 comment_broadcast_channel = channelstream.comment_channel(
466 comment_broadcast_channel = channelstream.comment_channel(
467 self.db_repo_name, commit_obj=commit)
467 self.db_repo_name, commit_obj=commit)
468
468
469 comment_data = data
469 comment_data = data
470 posted_comment_type = 'inline' if is_inline else 'general'
470 posted_comment_type = 'inline' if is_inline else 'general'
471 if len(data) == 1:
471 if len(data) == 1:
472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
473 else:
473 else:
474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
475
475
476 channelstream.comment_channelstream_push(
476 channelstream.comment_channelstream_push(
477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
478 comment_data=comment_data)
478 comment_data=comment_data)
479
479
480 return data
480 return data
481
481
482 @LoginRequired()
482 @LoginRequired()
483 @NotAnonymous()
483 @NotAnonymous()
484 @HasRepoPermissionAnyDecorator(
484 @HasRepoPermissionAnyDecorator(
485 'repository.read', 'repository.write', 'repository.admin')
485 'repository.read', 'repository.write', 'repository.admin')
486 @CSRFRequired()
486 @CSRFRequired()
487 def repo_commit_comment_create(self):
487 def repo_commit_comment_create(self):
488 _ = self.request.translate
488 _ = self.request.translate
489 commit_id = self.request.matchdict['commit_id']
489 commit_id = self.request.matchdict['commit_id']
490
490
491 multi_commit_ids = []
491 multi_commit_ids = []
492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
494 if _commit_id not in multi_commit_ids:
494 if _commit_id not in multi_commit_ids:
495 multi_commit_ids.append(_commit_id)
495 multi_commit_ids.append(_commit_id)
496
496
497 commit_ids = multi_commit_ids or [commit_id]
497 commit_ids = multi_commit_ids or [commit_id]
498
498
499 data = []
499 data = []
500 # Multiple comments for each passed commit id
500 # Multiple comments for each passed commit id
501 for current_id in filter(None, commit_ids):
501 for current_id in filter(None, commit_ids):
502 comment_data = {
502 comment_data = {
503 'comment_type': self.request.POST.get('comment_type'),
503 'comment_type': self.request.POST.get('comment_type'),
504 'text': self.request.POST.get('text'),
504 'text': self.request.POST.get('text'),
505 'status': self.request.POST.get('changeset_status', None),
505 'status': self.request.POST.get('changeset_status', None),
506 'is_draft': self.request.POST.get('draft'),
506 'is_draft': self.request.POST.get('draft'),
507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
508 'close_pull_request': self.request.POST.get('close_pull_request'),
508 'close_pull_request': self.request.POST.get('close_pull_request'),
509 'f_path': self.request.POST.get('f_path'),
509 'f_path': self.request.POST.get('f_path'),
510 'line': self.request.POST.get('line'),
510 'line': self.request.POST.get('line'),
511 }
511 }
512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
513 data.append(comment)
513 data.append(comment)
514
514
515 return data if len(data) > 1 else data[0]
515 return data if len(data) > 1 else data[0]
516
516
517 @LoginRequired()
517 @LoginRequired()
518 @NotAnonymous()
518 @NotAnonymous()
519 @HasRepoPermissionAnyDecorator(
519 @HasRepoPermissionAnyDecorator(
520 'repository.read', 'repository.write', 'repository.admin')
520 'repository.read', 'repository.write', 'repository.admin')
521 @CSRFRequired()
521 @CSRFRequired()
522 def repo_commit_comment_preview(self):
522 def repo_commit_comment_preview(self):
523 # Technically a CSRF token is not needed as no state changes with this
523 # Technically a CSRF token is not needed as no state changes with this
524 # call. However, as this is a POST is better to have it, so automated
524 # call. However, as this is a POST is better to have it, so automated
525 # tools don't flag it as potential CSRF.
525 # tools don't flag it as potential CSRF.
526 # Post is required because the payload could be bigger than the maximum
526 # Post is required because the payload could be bigger than the maximum
527 # allowed by GET.
527 # allowed by GET.
528
528
529 text = self.request.POST.get('text')
529 text = self.request.POST.get('text')
530 renderer = self.request.POST.get('renderer') or 'rst'
530 renderer = self.request.POST.get('renderer') or 'rst'
531 if text:
531 if text:
532 return h.render(text, renderer=renderer, mentions=True,
532 return h.render(text, renderer=renderer, mentions=True,
533 repo_name=self.db_repo_name)
533 repo_name=self.db_repo_name)
534 return ''
534 return ''
535
535
536 @LoginRequired()
536 @LoginRequired()
537 @HasRepoPermissionAnyDecorator(
537 @HasRepoPermissionAnyDecorator(
538 'repository.read', 'repository.write', 'repository.admin')
538 'repository.read', 'repository.write', 'repository.admin')
539 @CSRFRequired()
539 @CSRFRequired()
540 def repo_commit_comment_history_view(self):
540 def repo_commit_comment_history_view(self):
541 c = self.load_default_context()
541 c = self.load_default_context()
542 comment_id = self.request.matchdict['comment_id']
542 comment_history_id = self.request.matchdict['comment_history_id']
543 comment_history_id = self.request.matchdict['comment_history_id']
543
544
544 comment = ChangesetComment.get_or_404(comment_history_id)
545 comment = ChangesetComment.get_or_404(comment_id)
545 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
546 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
546 if comment.draft and not comment_owner:
547 if comment.draft and not comment_owner:
547 # if we see draft comments history, we only allow this for owner
548 # if we see draft comments history, we only allow this for owner
548 raise HTTPNotFound()
549 raise HTTPNotFound()
549
550
550 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
551 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
551 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
552 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
552
553
553 if is_repo_comment:
554 if is_repo_comment:
554 c.comment_history = comment_history
555 c.comment_history = comment_history
555
556
556 rendered_comment = render(
557 rendered_comment = render(
557 'rhodecode:templates/changeset/comment_history.mako',
558 'rhodecode:templates/changeset/comment_history.mako',
558 self._get_template_context(c), self.request)
559 self._get_template_context(c), self.request)
559 return rendered_comment
560 return rendered_comment
560 else:
561 else:
561 log.warning('No permissions for user %s to show comment_history_id: %s',
562 log.warning('No permissions for user %s to show comment_history_id: %s',
562 self._rhodecode_db_user, comment_history_id)
563 self._rhodecode_db_user, comment_history_id)
563 raise HTTPNotFound()
564 raise HTTPNotFound()
564
565
565 @LoginRequired()
566 @LoginRequired()
566 @NotAnonymous()
567 @NotAnonymous()
567 @HasRepoPermissionAnyDecorator(
568 @HasRepoPermissionAnyDecorator(
568 'repository.read', 'repository.write', 'repository.admin')
569 'repository.read', 'repository.write', 'repository.admin')
569 @CSRFRequired()
570 @CSRFRequired()
570 def repo_commit_comment_attachment_upload(self):
571 def repo_commit_comment_attachment_upload(self):
571 c = self.load_default_context()
572 c = self.load_default_context()
572 upload_key = 'attachment'
573 upload_key = 'attachment'
573
574
574 file_obj = self.request.POST.get(upload_key)
575 file_obj = self.request.POST.get(upload_key)
575
576
576 if file_obj is None:
577 if file_obj is None:
577 self.request.response.status = 400
578 self.request.response.status = 400
578 return {'store_fid': None,
579 return {'store_fid': None,
579 'access_path': None,
580 'access_path': None,
580 'error': '{} data field is missing'.format(upload_key)}
581 'error': '{} data field is missing'.format(upload_key)}
581
582
582 if not hasattr(file_obj, 'filename'):
583 if not hasattr(file_obj, 'filename'):
583 self.request.response.status = 400
584 self.request.response.status = 400
584 return {'store_fid': None,
585 return {'store_fid': None,
585 'access_path': None,
586 'access_path': None,
586 'error': 'filename cannot be read from the data field'}
587 'error': 'filename cannot be read from the data field'}
587
588
588 filename = file_obj.filename
589 filename = file_obj.filename
589 file_display_name = filename
590 file_display_name = filename
590
591
591 metadata = {
592 metadata = {
592 'user_uploaded': {'username': self._rhodecode_user.username,
593 'user_uploaded': {'username': self._rhodecode_user.username,
593 'user_id': self._rhodecode_user.user_id,
594 'user_id': self._rhodecode_user.user_id,
594 'ip': self._rhodecode_user.ip_addr}}
595 'ip': self._rhodecode_user.ip_addr}}
595
596
596 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
597 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
597 allowed_extensions = [
598 allowed_extensions = [
598 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
599 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
599 '.pptx', '.txt', '.xlsx', '.zip']
600 '.pptx', '.txt', '.xlsx', '.zip']
600 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
601 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
601
602
602 try:
603 try:
603 storage = store_utils.get_file_storage(self.request.registry.settings)
604 storage = store_utils.get_file_storage(self.request.registry.settings)
604 store_uid, metadata = storage.save_file(
605 store_uid, metadata = storage.save_file(
605 file_obj.file, filename, extra_metadata=metadata,
606 file_obj.file, filename, extra_metadata=metadata,
606 extensions=allowed_extensions, max_filesize=max_file_size)
607 extensions=allowed_extensions, max_filesize=max_file_size)
607 except FileNotAllowedException:
608 except FileNotAllowedException:
608 self.request.response.status = 400
609 self.request.response.status = 400
609 permitted_extensions = ', '.join(allowed_extensions)
610 permitted_extensions = ', '.join(allowed_extensions)
610 error_msg = 'File `{}` is not allowed. ' \
611 error_msg = 'File `{}` is not allowed. ' \
611 'Only following extensions are permitted: {}'.format(
612 'Only following extensions are permitted: {}'.format(
612 filename, permitted_extensions)
613 filename, permitted_extensions)
613 return {'store_fid': None,
614 return {'store_fid': None,
614 'access_path': None,
615 'access_path': None,
615 'error': error_msg}
616 'error': error_msg}
616 except FileOverSizeException:
617 except FileOverSizeException:
617 self.request.response.status = 400
618 self.request.response.status = 400
618 limit_mb = h.format_byte_size_binary(max_file_size)
619 limit_mb = h.format_byte_size_binary(max_file_size)
619 return {'store_fid': None,
620 return {'store_fid': None,
620 'access_path': None,
621 'access_path': None,
621 'error': 'File {} is exceeding allowed limit of {}.'.format(
622 'error': 'File {} is exceeding allowed limit of {}.'.format(
622 filename, limit_mb)}
623 filename, limit_mb)}
623
624
624 try:
625 try:
625 entry = FileStore.create(
626 entry = FileStore.create(
626 file_uid=store_uid, filename=metadata["filename"],
627 file_uid=store_uid, filename=metadata["filename"],
627 file_hash=metadata["sha256"], file_size=metadata["size"],
628 file_hash=metadata["sha256"], file_size=metadata["size"],
628 file_display_name=file_display_name,
629 file_display_name=file_display_name,
629 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
630 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
630 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
631 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
631 scope_repo_id=self.db_repo.repo_id
632 scope_repo_id=self.db_repo.repo_id
632 )
633 )
633 Session().add(entry)
634 Session().add(entry)
634 Session().commit()
635 Session().commit()
635 log.debug('Stored upload in DB as %s', entry)
636 log.debug('Stored upload in DB as %s', entry)
636 except Exception:
637 except Exception:
637 log.exception('Failed to store file %s', filename)
638 log.exception('Failed to store file %s', filename)
638 self.request.response.status = 400
639 self.request.response.status = 400
639 return {'store_fid': None,
640 return {'store_fid': None,
640 'access_path': None,
641 'access_path': None,
641 'error': 'File {} failed to store in DB.'.format(filename)}
642 'error': 'File {} failed to store in DB.'.format(filename)}
642
643
643 Session().commit()
644 Session().commit()
644
645
645 return {
646 return {
646 'store_fid': store_uid,
647 'store_fid': store_uid,
647 'access_path': h.route_path(
648 'access_path': h.route_path(
648 'download_file', fid=store_uid),
649 'download_file', fid=store_uid),
649 'fqn_access_path': h.route_url(
650 'fqn_access_path': h.route_url(
650 'download_file', fid=store_uid),
651 'download_file', fid=store_uid),
651 'repo_access_path': h.route_path(
652 'repo_access_path': h.route_path(
652 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
653 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
653 'repo_fqn_access_path': h.route_url(
654 'repo_fqn_access_path': h.route_url(
654 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
655 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
655 }
656 }
656
657
657 @LoginRequired()
658 @LoginRequired()
658 @NotAnonymous()
659 @NotAnonymous()
659 @HasRepoPermissionAnyDecorator(
660 @HasRepoPermissionAnyDecorator(
660 'repository.read', 'repository.write', 'repository.admin')
661 'repository.read', 'repository.write', 'repository.admin')
661 @CSRFRequired()
662 @CSRFRequired()
662 def repo_commit_comment_delete(self):
663 def repo_commit_comment_delete(self):
663 commit_id = self.request.matchdict['commit_id']
664 commit_id = self.request.matchdict['commit_id']
664 comment_id = self.request.matchdict['comment_id']
665 comment_id = self.request.matchdict['comment_id']
665
666
666 comment = ChangesetComment.get_or_404(comment_id)
667 comment = ChangesetComment.get_or_404(comment_id)
667 if not comment:
668 if not comment:
668 log.debug('Comment with id:%s not found, skipping', comment_id)
669 log.debug('Comment with id:%s not found, skipping', comment_id)
669 # comment already deleted in another call probably
670 # comment already deleted in another call probably
670 return True
671 return True
671
672
672 if comment.immutable:
673 if comment.immutable:
673 # don't allow deleting comments that are immutable
674 # don't allow deleting comments that are immutable
674 raise HTTPForbidden()
675 raise HTTPForbidden()
675
676
676 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
677 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
677 super_admin = h.HasPermissionAny('hg.admin')()
678 super_admin = h.HasPermissionAny('hg.admin')()
678 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
679 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
679 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
680 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
680 comment_repo_admin = is_repo_admin and is_repo_comment
681 comment_repo_admin = is_repo_admin and is_repo_comment
681
682
682 if comment.draft and not comment_owner:
683 if comment.draft and not comment_owner:
683 # We never allow to delete draft comments for other than owners
684 # We never allow to delete draft comments for other than owners
684 raise HTTPNotFound()
685 raise HTTPNotFound()
685
686
686 if super_admin or comment_owner or comment_repo_admin:
687 if super_admin or comment_owner or comment_repo_admin:
687 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
688 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
688 Session().commit()
689 Session().commit()
689 return True
690 return True
690 else:
691 else:
691 log.warning('No permissions for user %s to delete comment_id: %s',
692 log.warning('No permissions for user %s to delete comment_id: %s',
692 self._rhodecode_db_user, comment_id)
693 self._rhodecode_db_user, comment_id)
693 raise HTTPNotFound()
694 raise HTTPNotFound()
694
695
695 @LoginRequired()
696 @LoginRequired()
696 @NotAnonymous()
697 @NotAnonymous()
697 @HasRepoPermissionAnyDecorator(
698 @HasRepoPermissionAnyDecorator(
698 'repository.read', 'repository.write', 'repository.admin')
699 'repository.read', 'repository.write', 'repository.admin')
699 @CSRFRequired()
700 @CSRFRequired()
700 def repo_commit_comment_edit(self):
701 def repo_commit_comment_edit(self):
701 self.load_default_context()
702 self.load_default_context()
702
703
703 commit_id = self.request.matchdict['commit_id']
704 commit_id = self.request.matchdict['commit_id']
704 comment_id = self.request.matchdict['comment_id']
705 comment_id = self.request.matchdict['comment_id']
705 comment = ChangesetComment.get_or_404(comment_id)
706 comment = ChangesetComment.get_or_404(comment_id)
706
707
707 if comment.immutable:
708 if comment.immutable:
708 # don't allow deleting comments that are immutable
709 # don't allow deleting comments that are immutable
709 raise HTTPForbidden()
710 raise HTTPForbidden()
710
711
711 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
712 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
712 super_admin = h.HasPermissionAny('hg.admin')()
713 super_admin = h.HasPermissionAny('hg.admin')()
713 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
714 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
714 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
715 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
715 comment_repo_admin = is_repo_admin and is_repo_comment
716 comment_repo_admin = is_repo_admin and is_repo_comment
716
717
717 if super_admin or comment_owner or comment_repo_admin:
718 if super_admin or comment_owner or comment_repo_admin:
718 text = self.request.POST.get('text')
719 text = self.request.POST.get('text')
719 version = self.request.POST.get('version')
720 version = self.request.POST.get('version')
720 if text == comment.text:
721 if text == comment.text:
721 log.warning(
722 log.warning(
722 'Comment(repo): '
723 'Comment(repo): '
723 'Trying to create new version '
724 'Trying to create new version '
724 'with the same comment body {}'.format(
725 'with the same comment body {}'.format(
725 comment_id,
726 comment_id,
726 )
727 )
727 )
728 )
728 raise HTTPNotFound()
729 raise HTTPNotFound()
729
730
730 if version.isdigit():
731 if version.isdigit():
731 version = int(version)
732 version = int(version)
732 else:
733 else:
733 log.warning(
734 log.warning(
734 'Comment(repo): Wrong version type {} {} '
735 'Comment(repo): Wrong version type {} {} '
735 'for comment {}'.format(
736 'for comment {}'.format(
736 version,
737 version,
737 type(version),
738 type(version),
738 comment_id,
739 comment_id,
739 )
740 )
740 )
741 )
741 raise HTTPNotFound()
742 raise HTTPNotFound()
742
743
743 try:
744 try:
744 comment_history = CommentsModel().edit(
745 comment_history = CommentsModel().edit(
745 comment_id=comment_id,
746 comment_id=comment_id,
746 text=text,
747 text=text,
747 auth_user=self._rhodecode_user,
748 auth_user=self._rhodecode_user,
748 version=version,
749 version=version,
749 )
750 )
750 except CommentVersionMismatch:
751 except CommentVersionMismatch:
751 raise HTTPConflict()
752 raise HTTPConflict()
752
753
753 if not comment_history:
754 if not comment_history:
754 raise HTTPNotFound()
755 raise HTTPNotFound()
755
756
756 if not comment.draft:
757 if not comment.draft:
757 commit = self.db_repo.get_commit(commit_id)
758 commit = self.db_repo.get_commit(commit_id)
758 CommentsModel().trigger_commit_comment_hook(
759 CommentsModel().trigger_commit_comment_hook(
759 self.db_repo, self._rhodecode_user, 'edit',
760 self.db_repo, self._rhodecode_user, 'edit',
760 data={'comment': comment, 'commit': commit})
761 data={'comment': comment, 'commit': commit})
761
762
762 Session().commit()
763 Session().commit()
763 return {
764 return {
764 'comment_history_id': comment_history.comment_history_id,
765 'comment_history_id': comment_history.comment_history_id,
765 'comment_id': comment.comment_id,
766 'comment_id': comment.comment_id,
766 'comment_version': comment_history.version,
767 'comment_version': comment_history.version,
767 'comment_author_username': comment_history.author.username,
768 'comment_author_username': comment_history.author.username,
768 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
769 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
769 'comment_created_on': h.age_component(comment_history.created_on,
770 'comment_created_on': h.age_component(comment_history.created_on,
770 time_is_local=True),
771 time_is_local=True),
771 }
772 }
772 else:
773 else:
773 log.warning('No permissions for user %s to edit comment_id: %s',
774 log.warning('No permissions for user %s to edit comment_id: %s',
774 self._rhodecode_db_user, comment_id)
775 self._rhodecode_db_user, comment_id)
775 raise HTTPNotFound()
776 raise HTTPNotFound()
776
777
777 @LoginRequired()
778 @LoginRequired()
778 @HasRepoPermissionAnyDecorator(
779 @HasRepoPermissionAnyDecorator(
779 'repository.read', 'repository.write', 'repository.admin')
780 'repository.read', 'repository.write', 'repository.admin')
780 def repo_commit_data(self):
781 def repo_commit_data(self):
781 commit_id = self.request.matchdict['commit_id']
782 commit_id = self.request.matchdict['commit_id']
782 self.load_default_context()
783 self.load_default_context()
783
784
784 try:
785 try:
785 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
786 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
786 except CommitDoesNotExistError as e:
787 except CommitDoesNotExistError as e:
787 return EmptyCommit(message=str(e))
788 return EmptyCommit(message=str(e))
788
789
789 @LoginRequired()
790 @LoginRequired()
790 @HasRepoPermissionAnyDecorator(
791 @HasRepoPermissionAnyDecorator(
791 'repository.read', 'repository.write', 'repository.admin')
792 'repository.read', 'repository.write', 'repository.admin')
792 def repo_commit_children(self):
793 def repo_commit_children(self):
793 commit_id = self.request.matchdict['commit_id']
794 commit_id = self.request.matchdict['commit_id']
794 self.load_default_context()
795 self.load_default_context()
795
796
796 try:
797 try:
797 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
798 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
798 children = commit.children
799 children = commit.children
799 except CommitDoesNotExistError:
800 except CommitDoesNotExistError:
800 children = []
801 children = []
801
802
802 result = {"results": children}
803 result = {"results": children}
803 return result
804 return result
804
805
805 @LoginRequired()
806 @LoginRequired()
806 @HasRepoPermissionAnyDecorator(
807 @HasRepoPermissionAnyDecorator(
807 'repository.read', 'repository.write', 'repository.admin')
808 'repository.read', 'repository.write', 'repository.admin')
808 def repo_commit_parents(self):
809 def repo_commit_parents(self):
809 commit_id = self.request.matchdict['commit_id']
810 commit_id = self.request.matchdict['commit_id']
810 self.load_default_context()
811 self.load_default_context()
811
812
812 try:
813 try:
813 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
814 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
814 parents = commit.parents
815 parents = commit.parents
815 except CommitDoesNotExistError:
816 except CommitDoesNotExistError:
816 parents = []
817 parents = []
817 result = {"results": parents}
818 result = {"results": parents}
818 return result
819 return result
@@ -1,305 +1,305 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
25
25
26 from pyramid.renderers import render
26 from pyramid.renderers import render
27 from pyramid.response import Response
27 from pyramid.response import Response
28
28
29 from rhodecode.apps._base import RepoAppView
29 from rhodecode.apps._base import RepoAppView
30
30
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib import diffs, codeblocks
32 from rhodecode.lib import diffs, codeblocks
33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 from rhodecode.lib.utils import safe_str
34 from rhodecode.lib.utils import safe_str
35 from rhodecode.lib.utils2 import safe_unicode, str2bool
35 from rhodecode.lib.utils2 import safe_unicode, str2bool
36 from rhodecode.lib.view_utils import parse_path_ref, get_commit_from_ref_name
36 from rhodecode.lib.view_utils import parse_path_ref, get_commit_from_ref_name
37 from rhodecode.lib.vcs.exceptions import (
37 from rhodecode.lib.vcs.exceptions import (
38 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
38 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
39 NodeDoesNotExistError)
39 NodeDoesNotExistError)
40 from rhodecode.model.db import Repository, ChangesetStatus
40 from rhodecode.model.db import Repository, ChangesetStatus
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 class RepoCompareView(RepoAppView):
45 class RepoCompareView(RepoAppView):
46 def load_default_context(self):
46 def load_default_context(self):
47 c = self._get_local_tmpl_context(include_app_defaults=True)
47 c = self._get_local_tmpl_context(include_app_defaults=True)
48 c.rhodecode_repo = self.rhodecode_vcs_repo
48 c.rhodecode_repo = self.rhodecode_vcs_repo
49 return c
49 return c
50
50
51 def _get_commit_or_redirect(
51 def _get_commit_or_redirect(
52 self, ref, ref_type, repo, redirect_after=True, partial=False):
52 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 """
53 """
54 This is a safe way to get a commit. If an error occurs it
54 This is a safe way to get a commit. If an error occurs it
55 redirects to a commit with a proper message. If partial is set
55 redirects to a commit with a proper message. If partial is set
56 then it does not do redirect raise and throws an exception instead.
56 then it does not do redirect raise and throws an exception instead.
57 """
57 """
58 _ = self.request.translate
58 _ = self.request.translate
59 try:
59 try:
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
61 except EmptyRepositoryError:
61 except EmptyRepositoryError:
62 if not redirect_after:
62 if not redirect_after:
63 return repo.scm_instance().EMPTY_COMMIT
63 return repo.scm_instance().EMPTY_COMMIT
64 h.flash(h.literal(_('There are no commits yet')),
64 h.flash(h.literal(_('There are no commits yet')),
65 category='warning')
65 category='warning')
66 if not partial:
66 if not partial:
67 raise HTTPFound(
67 raise HTTPFound(
68 h.route_path('repo_summary', repo_name=repo.repo_name))
68 h.route_path('repo_summary', repo_name=repo.repo_name))
69 raise HTTPBadRequest()
69 raise HTTPBadRequest()
70
70
71 except RepositoryError as e:
71 except RepositoryError as e:
72 log.exception(safe_str(e))
72 log.exception(safe_str(e))
73 h.flash(safe_str(h.escape(e)), category='warning')
73 h.flash(h.escape(safe_str(e)), category='warning')
74 if not partial:
74 if not partial:
75 raise HTTPFound(
75 raise HTTPFound(
76 h.route_path('repo_summary', repo_name=repo.repo_name))
76 h.route_path('repo_summary', repo_name=repo.repo_name))
77 raise HTTPBadRequest()
77 raise HTTPBadRequest()
78
78
79 @LoginRequired()
79 @LoginRequired()
80 @HasRepoPermissionAnyDecorator(
80 @HasRepoPermissionAnyDecorator(
81 'repository.read', 'repository.write', 'repository.admin')
81 'repository.read', 'repository.write', 'repository.admin')
82 def compare_select(self):
82 def compare_select(self):
83 _ = self.request.translate
83 _ = self.request.translate
84 c = self.load_default_context()
84 c = self.load_default_context()
85
85
86 source_repo = self.db_repo_name
86 source_repo = self.db_repo_name
87 target_repo = self.request.GET.get('target_repo', source_repo)
87 target_repo = self.request.GET.get('target_repo', source_repo)
88 c.source_repo = Repository.get_by_repo_name(source_repo)
88 c.source_repo = Repository.get_by_repo_name(source_repo)
89 c.target_repo = Repository.get_by_repo_name(target_repo)
89 c.target_repo = Repository.get_by_repo_name(target_repo)
90
90
91 if c.source_repo is None or c.target_repo is None:
91 if c.source_repo is None or c.target_repo is None:
92 raise HTTPNotFound()
92 raise HTTPNotFound()
93
93
94 c.compare_home = True
94 c.compare_home = True
95 c.commit_ranges = []
95 c.commit_ranges = []
96 c.collapse_all_commits = False
96 c.collapse_all_commits = False
97 c.diffset = None
97 c.diffset = None
98 c.limited_diff = False
98 c.limited_diff = False
99 c.source_ref = c.target_ref = _('Select commit')
99 c.source_ref = c.target_ref = _('Select commit')
100 c.source_ref_type = ""
100 c.source_ref_type = ""
101 c.target_ref_type = ""
101 c.target_ref_type = ""
102 c.commit_statuses = ChangesetStatus.STATUSES
102 c.commit_statuses = ChangesetStatus.STATUSES
103 c.preview_mode = False
103 c.preview_mode = False
104 c.file_path = None
104 c.file_path = None
105
105
106 return self._get_template_context(c)
106 return self._get_template_context(c)
107
107
108 @LoginRequired()
108 @LoginRequired()
109 @HasRepoPermissionAnyDecorator(
109 @HasRepoPermissionAnyDecorator(
110 'repository.read', 'repository.write', 'repository.admin')
110 'repository.read', 'repository.write', 'repository.admin')
111 def compare(self):
111 def compare(self):
112 _ = self.request.translate
112 _ = self.request.translate
113 c = self.load_default_context()
113 c = self.load_default_context()
114
114
115 source_ref_type = self.request.matchdict['source_ref_type']
115 source_ref_type = self.request.matchdict['source_ref_type']
116 source_ref = self.request.matchdict['source_ref']
116 source_ref = self.request.matchdict['source_ref']
117 target_ref_type = self.request.matchdict['target_ref_type']
117 target_ref_type = self.request.matchdict['target_ref_type']
118 target_ref = self.request.matchdict['target_ref']
118 target_ref = self.request.matchdict['target_ref']
119
119
120 # source_ref will be evaluated in source_repo
120 # source_ref will be evaluated in source_repo
121 source_repo_name = self.db_repo_name
121 source_repo_name = self.db_repo_name
122 source_path, source_id = parse_path_ref(source_ref)
122 source_path, source_id = parse_path_ref(source_ref)
123
123
124 # target_ref will be evaluated in target_repo
124 # target_ref will be evaluated in target_repo
125 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
125 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
126 target_path, target_id = parse_path_ref(
126 target_path, target_id = parse_path_ref(
127 target_ref, default_path=self.request.GET.get('f_path', ''))
127 target_ref, default_path=self.request.GET.get('f_path', ''))
128
128
129 # if merge is True
129 # if merge is True
130 # Show what changes since the shared ancestor commit of target/source
130 # Show what changes since the shared ancestor commit of target/source
131 # the source would get if it was merged with target. Only commits
131 # the source would get if it was merged with target. Only commits
132 # which are in target but not in source will be shown.
132 # which are in target but not in source will be shown.
133 merge = str2bool(self.request.GET.get('merge'))
133 merge = str2bool(self.request.GET.get('merge'))
134 # if merge is False
134 # if merge is False
135 # Show a raw diff of source/target refs even if no ancestor exists
135 # Show a raw diff of source/target refs even if no ancestor exists
136
136
137 # c.fulldiff disables cut_off_limit
137 # c.fulldiff disables cut_off_limit
138 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
138 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
139
139
140 # fetch global flags of ignore ws or context lines
140 # fetch global flags of ignore ws or context lines
141 diff_context = diffs.get_diff_context(self.request)
141 diff_context = diffs.get_diff_context(self.request)
142 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
142 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
143
143
144 c.file_path = target_path
144 c.file_path = target_path
145 c.commit_statuses = ChangesetStatus.STATUSES
145 c.commit_statuses = ChangesetStatus.STATUSES
146
146
147 # if partial, returns just compare_commits.html (commits log)
147 # if partial, returns just compare_commits.html (commits log)
148 partial = self.request.is_xhr
148 partial = self.request.is_xhr
149
149
150 # swap url for compare_diff page
150 # swap url for compare_diff page
151 c.swap_url = h.route_path(
151 c.swap_url = h.route_path(
152 'repo_compare',
152 'repo_compare',
153 repo_name=target_repo_name,
153 repo_name=target_repo_name,
154 source_ref_type=target_ref_type,
154 source_ref_type=target_ref_type,
155 source_ref=target_ref,
155 source_ref=target_ref,
156 target_repo=source_repo_name,
156 target_repo=source_repo_name,
157 target_ref_type=source_ref_type,
157 target_ref_type=source_ref_type,
158 target_ref=source_ref,
158 target_ref=source_ref,
159 _query=dict(merge=merge and '1' or '', f_path=target_path))
159 _query=dict(merge=merge and '1' or '', f_path=target_path))
160
160
161 source_repo = Repository.get_by_repo_name(source_repo_name)
161 source_repo = Repository.get_by_repo_name(source_repo_name)
162 target_repo = Repository.get_by_repo_name(target_repo_name)
162 target_repo = Repository.get_by_repo_name(target_repo_name)
163
163
164 if source_repo is None:
164 if source_repo is None:
165 log.error('Could not find the source repo: {}'
165 log.error('Could not find the source repo: {}'
166 .format(source_repo_name))
166 .format(source_repo_name))
167 h.flash(_('Could not find the source repo: `{}`')
167 h.flash(_('Could not find the source repo: `{}`')
168 .format(h.escape(source_repo_name)), category='error')
168 .format(h.escape(source_repo_name)), category='error')
169 raise HTTPFound(
169 raise HTTPFound(
170 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
170 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
171
171
172 if target_repo is None:
172 if target_repo is None:
173 log.error('Could not find the target repo: {}'
173 log.error('Could not find the target repo: {}'
174 .format(source_repo_name))
174 .format(source_repo_name))
175 h.flash(_('Could not find the target repo: `{}`')
175 h.flash(_('Could not find the target repo: `{}`')
176 .format(h.escape(target_repo_name)), category='error')
176 .format(h.escape(target_repo_name)), category='error')
177 raise HTTPFound(
177 raise HTTPFound(
178 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
178 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
179
179
180 source_scm = source_repo.scm_instance()
180 source_scm = source_repo.scm_instance()
181 target_scm = target_repo.scm_instance()
181 target_scm = target_repo.scm_instance()
182
182
183 source_alias = source_scm.alias
183 source_alias = source_scm.alias
184 target_alias = target_scm.alias
184 target_alias = target_scm.alias
185 if source_alias != target_alias:
185 if source_alias != target_alias:
186 msg = _('The comparison of two different kinds of remote repos '
186 msg = _('The comparison of two different kinds of remote repos '
187 'is not available')
187 'is not available')
188 log.error(msg)
188 log.error(msg)
189 h.flash(msg, category='error')
189 h.flash(msg, category='error')
190 raise HTTPFound(
190 raise HTTPFound(
191 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
191 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
192
192
193 source_commit = self._get_commit_or_redirect(
193 source_commit = self._get_commit_or_redirect(
194 ref=source_id, ref_type=source_ref_type, repo=source_repo,
194 ref=source_id, ref_type=source_ref_type, repo=source_repo,
195 partial=partial)
195 partial=partial)
196 target_commit = self._get_commit_or_redirect(
196 target_commit = self._get_commit_or_redirect(
197 ref=target_id, ref_type=target_ref_type, repo=target_repo,
197 ref=target_id, ref_type=target_ref_type, repo=target_repo,
198 partial=partial)
198 partial=partial)
199
199
200 c.compare_home = False
200 c.compare_home = False
201 c.source_repo = source_repo
201 c.source_repo = source_repo
202 c.target_repo = target_repo
202 c.target_repo = target_repo
203 c.source_ref = source_ref
203 c.source_ref = source_ref
204 c.target_ref = target_ref
204 c.target_ref = target_ref
205 c.source_ref_type = source_ref_type
205 c.source_ref_type = source_ref_type
206 c.target_ref_type = target_ref_type
206 c.target_ref_type = target_ref_type
207
207
208 pre_load = ["author", "date", "message", "branch"]
208 pre_load = ["author", "date", "message", "branch"]
209 c.ancestor = None
209 c.ancestor = None
210
210
211 try:
211 try:
212 c.commit_ranges = source_scm.compare(
212 c.commit_ranges = source_scm.compare(
213 source_commit.raw_id, target_commit.raw_id,
213 source_commit.raw_id, target_commit.raw_id,
214 target_scm, merge, pre_load=pre_load) or []
214 target_scm, merge, pre_load=pre_load) or []
215 if merge:
215 if merge:
216 c.ancestor = source_scm.get_common_ancestor(
216 c.ancestor = source_scm.get_common_ancestor(
217 source_commit.raw_id, target_commit.raw_id, target_scm)
217 source_commit.raw_id, target_commit.raw_id, target_scm)
218 except RepositoryRequirementError:
218 except RepositoryRequirementError:
219 msg = _('Could not compare repos with different '
219 msg = _('Could not compare repos with different '
220 'large file settings')
220 'large file settings')
221 log.error(msg)
221 log.error(msg)
222 if partial:
222 if partial:
223 return Response(msg)
223 return Response(msg)
224 h.flash(msg, category='error')
224 h.flash(msg, category='error')
225 raise HTTPFound(
225 raise HTTPFound(
226 h.route_path('repo_compare_select',
226 h.route_path('repo_compare_select',
227 repo_name=self.db_repo_name))
227 repo_name=self.db_repo_name))
228
228
229 c.statuses = self.db_repo.statuses(
229 c.statuses = self.db_repo.statuses(
230 [x.raw_id for x in c.commit_ranges])
230 [x.raw_id for x in c.commit_ranges])
231
231
232 # auto collapse if we have more than limit
232 # auto collapse if we have more than limit
233 collapse_limit = diffs.DiffProcessor._collapse_commits_over
233 collapse_limit = diffs.DiffProcessor._collapse_commits_over
234 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
234 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
235
235
236 if partial: # for PR ajax commits loader
236 if partial: # for PR ajax commits loader
237 if not c.ancestor:
237 if not c.ancestor:
238 return Response('') # cannot merge if there is no ancestor
238 return Response('') # cannot merge if there is no ancestor
239
239
240 html = render(
240 html = render(
241 'rhodecode:templates/compare/compare_commits.mako',
241 'rhodecode:templates/compare/compare_commits.mako',
242 self._get_template_context(c), self.request)
242 self._get_template_context(c), self.request)
243 return Response(html)
243 return Response(html)
244
244
245 if c.ancestor:
245 if c.ancestor:
246 # case we want a simple diff without incoming commits,
246 # case we want a simple diff without incoming commits,
247 # previewing what will be merged.
247 # previewing what will be merged.
248 # Make the diff on target repo (which is known to have target_ref)
248 # Make the diff on target repo (which is known to have target_ref)
249 log.debug('Using ancestor %s as source_ref instead of %s',
249 log.debug('Using ancestor %s as source_ref instead of %s',
250 c.ancestor, source_ref)
250 c.ancestor, source_ref)
251 source_repo = target_repo
251 source_repo = target_repo
252 source_commit = target_repo.get_commit(commit_id=c.ancestor)
252 source_commit = target_repo.get_commit(commit_id=c.ancestor)
253
253
254 # diff_limit will cut off the whole diff if the limit is applied
254 # diff_limit will cut off the whole diff if the limit is applied
255 # otherwise it will just hide the big files from the front-end
255 # otherwise it will just hide the big files from the front-end
256 diff_limit = c.visual.cut_off_limit_diff
256 diff_limit = c.visual.cut_off_limit_diff
257 file_limit = c.visual.cut_off_limit_file
257 file_limit = c.visual.cut_off_limit_file
258
258
259 log.debug('calculating diff between '
259 log.debug('calculating diff between '
260 'source_ref:%s and target_ref:%s for repo `%s`',
260 'source_ref:%s and target_ref:%s for repo `%s`',
261 source_commit, target_commit,
261 source_commit, target_commit,
262 safe_unicode(source_repo.scm_instance().path))
262 safe_unicode(source_repo.scm_instance().path))
263
263
264 if source_commit.repository != target_commit.repository:
264 if source_commit.repository != target_commit.repository:
265 msg = _(
265 msg = _(
266 "Repositories unrelated. "
266 "Repositories unrelated. "
267 "Cannot compare commit %(commit1)s from repository %(repo1)s "
267 "Cannot compare commit %(commit1)s from repository %(repo1)s "
268 "with commit %(commit2)s from repository %(repo2)s.") % {
268 "with commit %(commit2)s from repository %(repo2)s.") % {
269 'commit1': h.show_id(source_commit),
269 'commit1': h.show_id(source_commit),
270 'repo1': source_repo.repo_name,
270 'repo1': source_repo.repo_name,
271 'commit2': h.show_id(target_commit),
271 'commit2': h.show_id(target_commit),
272 'repo2': target_repo.repo_name,
272 'repo2': target_repo.repo_name,
273 }
273 }
274 h.flash(msg, category='error')
274 h.flash(msg, category='error')
275 raise HTTPFound(
275 raise HTTPFound(
276 h.route_path('repo_compare_select',
276 h.route_path('repo_compare_select',
277 repo_name=self.db_repo_name))
277 repo_name=self.db_repo_name))
278
278
279 txt_diff = source_repo.scm_instance().get_diff(
279 txt_diff = source_repo.scm_instance().get_diff(
280 commit1=source_commit, commit2=target_commit,
280 commit1=source_commit, commit2=target_commit,
281 path=target_path, path1=source_path,
281 path=target_path, path1=source_path,
282 ignore_whitespace=hide_whitespace_changes, context=diff_context)
282 ignore_whitespace=hide_whitespace_changes, context=diff_context)
283
283
284 diff_processor = diffs.DiffProcessor(
284 diff_processor = diffs.DiffProcessor(
285 txt_diff, format='newdiff', diff_limit=diff_limit,
285 txt_diff, format='newdiff', diff_limit=diff_limit,
286 file_limit=file_limit, show_full_diff=c.fulldiff)
286 file_limit=file_limit, show_full_diff=c.fulldiff)
287 _parsed = diff_processor.prepare()
287 _parsed = diff_processor.prepare()
288
288
289 diffset = codeblocks.DiffSet(
289 diffset = codeblocks.DiffSet(
290 repo_name=source_repo.repo_name,
290 repo_name=source_repo.repo_name,
291 source_node_getter=codeblocks.diffset_node_getter(source_commit),
291 source_node_getter=codeblocks.diffset_node_getter(source_commit),
292 target_repo_name=self.db_repo_name,
292 target_repo_name=self.db_repo_name,
293 target_node_getter=codeblocks.diffset_node_getter(target_commit),
293 target_node_getter=codeblocks.diffset_node_getter(target_commit),
294 )
294 )
295 c.diffset = self.path_filter.render_patchset_filtered(
295 c.diffset = self.path_filter.render_patchset_filtered(
296 diffset, _parsed, source_ref, target_ref)
296 diffset, _parsed, source_ref, target_ref)
297
297
298 c.preview_mode = merge
298 c.preview_mode = merge
299 c.source_commit = source_commit
299 c.source_commit = source_commit
300 c.target_commit = target_commit
300 c.target_commit = target_commit
301
301
302 html = render(
302 html = render(
303 'rhodecode:templates/compare/compare_diff.mako',
303 'rhodecode:templates/compare/compare_diff.mako',
304 self._get_template_context(c), self.request)
304 self._get_template_context(c), self.request)
305 return Response(html) No newline at end of file
305 return Response(html)
@@ -1,1581 +1,1581 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import itertools
21 import itertools
22 import logging
22 import logging
23 import os
23 import os
24 import shutil
24 import shutil
25 import tempfile
25 import tempfile
26 import collections
26 import collections
27 import urllib
27 import urllib
28 import pathlib2
28 import pathlib2
29
29
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31
31
32 from pyramid.renderers import render
32 from pyramid.renderers import render
33 from pyramid.response import Response
33 from pyramid.response import Response
34
34
35 import rhodecode
35 import rhodecode
36 from rhodecode.apps._base import RepoAppView
36 from rhodecode.apps._base import RepoAppView
37
37
38
38
39 from rhodecode.lib import diffs, helpers as h, rc_cache
39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 from rhodecode.lib import audit_logger
40 from rhodecode.lib import audit_logger
41 from rhodecode.lib.view_utils import parse_path_ref
41 from rhodecode.lib.view_utils import parse_path_ref
42 from rhodecode.lib.exceptions import NonRelativePathError
42 from rhodecode.lib.exceptions import NonRelativePathError
43 from rhodecode.lib.codeblocks import (
43 from rhodecode.lib.codeblocks import (
44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
45 from rhodecode.lib.utils2 import (
45 from rhodecode.lib.utils2 import (
46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
47 from rhodecode.lib.auth import (
47 from rhodecode.lib.auth import (
48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
49 from rhodecode.lib.vcs import path as vcspath
49 from rhodecode.lib.vcs import path as vcspath
50 from rhodecode.lib.vcs.backends.base import EmptyCommit
50 from rhodecode.lib.vcs.backends.base import EmptyCommit
51 from rhodecode.lib.vcs.conf import settings
51 from rhodecode.lib.vcs.conf import settings
52 from rhodecode.lib.vcs.nodes import FileNode
52 from rhodecode.lib.vcs.nodes import FileNode
53 from rhodecode.lib.vcs.exceptions import (
53 from rhodecode.lib.vcs.exceptions import (
54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
56 NodeDoesNotExistError, CommitError, NodeError)
56 NodeDoesNotExistError, CommitError, NodeError)
57
57
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.db import Repository
59 from rhodecode.model.db import Repository
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class RepoFilesView(RepoAppView):
64 class RepoFilesView(RepoAppView):
65
65
66 @staticmethod
66 @staticmethod
67 def adjust_file_path_for_svn(f_path, repo):
67 def adjust_file_path_for_svn(f_path, repo):
68 """
68 """
69 Computes the relative path of `f_path`.
69 Computes the relative path of `f_path`.
70
70
71 This is mainly based on prefix matching of the recognized tags and
71 This is mainly based on prefix matching of the recognized tags and
72 branches in the underlying repository.
72 branches in the underlying repository.
73 """
73 """
74 tags_and_branches = itertools.chain(
74 tags_and_branches = itertools.chain(
75 repo.branches.iterkeys(),
75 repo.branches.iterkeys(),
76 repo.tags.iterkeys())
76 repo.tags.iterkeys())
77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
78
78
79 for name in tags_and_branches:
79 for name in tags_and_branches:
80 if f_path.startswith('{}/'.format(name)):
80 if f_path.startswith('{}/'.format(name)):
81 f_path = vcspath.relpath(f_path, name)
81 f_path = vcspath.relpath(f_path, name)
82 break
82 break
83 return f_path
83 return f_path
84
84
85 def load_default_context(self):
85 def load_default_context(self):
86 c = self._get_local_tmpl_context(include_app_defaults=True)
86 c = self._get_local_tmpl_context(include_app_defaults=True)
87 c.rhodecode_repo = self.rhodecode_vcs_repo
87 c.rhodecode_repo = self.rhodecode_vcs_repo
88 c.enable_downloads = self.db_repo.enable_downloads
88 c.enable_downloads = self.db_repo.enable_downloads
89 return c
89 return c
90
90
91 def _ensure_not_locked(self, commit_id='tip'):
91 def _ensure_not_locked(self, commit_id='tip'):
92 _ = self.request.translate
92 _ = self.request.translate
93
93
94 repo = self.db_repo
94 repo = self.db_repo
95 if repo.enable_locking and repo.locked[0]:
95 if repo.enable_locking and repo.locked[0]:
96 h.flash(_('This repository has been locked by %s on %s')
96 h.flash(_('This repository has been locked by %s on %s')
97 % (h.person_by_id(repo.locked[0]),
97 % (h.person_by_id(repo.locked[0]),
98 h.format_date(h.time_to_datetime(repo.locked[1]))),
98 h.format_date(h.time_to_datetime(repo.locked[1]))),
99 'warning')
99 'warning')
100 files_url = h.route_path(
100 files_url = h.route_path(
101 'repo_files:default_path',
101 'repo_files:default_path',
102 repo_name=self.db_repo_name, commit_id=commit_id)
102 repo_name=self.db_repo_name, commit_id=commit_id)
103 raise HTTPFound(files_url)
103 raise HTTPFound(files_url)
104
104
105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
106 _ = self.request.translate
106 _ = self.request.translate
107
107
108 if not is_head:
108 if not is_head:
109 message = _('Cannot modify file. '
109 message = _('Cannot modify file. '
110 'Given commit `{}` is not head of a branch.').format(commit_id)
110 'Given commit `{}` is not head of a branch.').format(commit_id)
111 h.flash(message, category='warning')
111 h.flash(message, category='warning')
112
112
113 if json_mode:
113 if json_mode:
114 return message
114 return message
115
115
116 files_url = h.route_path(
116 files_url = h.route_path(
117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
118 f_path=f_path)
118 f_path=f_path)
119 raise HTTPFound(files_url)
119 raise HTTPFound(files_url)
120
120
121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
122 _ = self.request.translate
122 _ = self.request.translate
123
123
124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
125 self.db_repo_name, branch_name)
125 self.db_repo_name, branch_name)
126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
127 message = _('Branch `{}` changes forbidden by rule {}.').format(
127 message = _('Branch `{}` changes forbidden by rule {}.').format(
128 h.escape(branch_name), h.escape(rule))
128 h.escape(branch_name), h.escape(rule))
129 h.flash(message, 'warning')
129 h.flash(message, 'warning')
130
130
131 if json_mode:
131 if json_mode:
132 return message
132 return message
133
133
134 files_url = h.route_path(
134 files_url = h.route_path(
135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
136
136
137 raise HTTPFound(files_url)
137 raise HTTPFound(files_url)
138
138
139 def _get_commit_and_path(self):
139 def _get_commit_and_path(self):
140 default_commit_id = self.db_repo.landing_ref_name
140 default_commit_id = self.db_repo.landing_ref_name
141 default_f_path = '/'
141 default_f_path = '/'
142
142
143 commit_id = self.request.matchdict.get(
143 commit_id = self.request.matchdict.get(
144 'commit_id', default_commit_id)
144 'commit_id', default_commit_id)
145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
146 return commit_id, f_path
146 return commit_id, f_path
147
147
148 def _get_default_encoding(self, c):
148 def _get_default_encoding(self, c):
149 enc_list = getattr(c, 'default_encodings', [])
149 enc_list = getattr(c, 'default_encodings', [])
150 return enc_list[0] if enc_list else 'UTF-8'
150 return enc_list[0] if enc_list else 'UTF-8'
151
151
152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
153 """
153 """
154 This is a safe way to get commit. If an error occurs it redirects to
154 This is a safe way to get commit. If an error occurs it redirects to
155 tip with proper message
155 tip with proper message
156
156
157 :param commit_id: id of commit to fetch
157 :param commit_id: id of commit to fetch
158 :param redirect_after: toggle redirection
158 :param redirect_after: toggle redirection
159 """
159 """
160 _ = self.request.translate
160 _ = self.request.translate
161
161
162 try:
162 try:
163 return self.rhodecode_vcs_repo.get_commit(commit_id)
163 return self.rhodecode_vcs_repo.get_commit(commit_id)
164 except EmptyRepositoryError:
164 except EmptyRepositoryError:
165 if not redirect_after:
165 if not redirect_after:
166 return None
166 return None
167
167
168 _url = h.route_path(
168 _url = h.route_path(
169 'repo_files_add_file',
169 'repo_files_add_file',
170 repo_name=self.db_repo_name, commit_id=0, f_path='')
170 repo_name=self.db_repo_name, commit_id=0, f_path='')
171
171
172 if h.HasRepoPermissionAny(
172 if h.HasRepoPermissionAny(
173 'repository.write', 'repository.admin')(self.db_repo_name):
173 'repository.write', 'repository.admin')(self.db_repo_name):
174 add_new = h.link_to(
174 add_new = h.link_to(
175 _('Click here to add a new file.'), _url, class_="alert-link")
175 _('Click here to add a new file.'), _url, class_="alert-link")
176 else:
176 else:
177 add_new = ""
177 add_new = ""
178
178
179 h.flash(h.literal(
179 h.flash(h.literal(
180 _('There are no files yet. %s') % add_new), category='warning')
180 _('There are no files yet. %s') % add_new), category='warning')
181 raise HTTPFound(
181 raise HTTPFound(
182 h.route_path('repo_summary', repo_name=self.db_repo_name))
182 h.route_path('repo_summary', repo_name=self.db_repo_name))
183
183
184 except (CommitDoesNotExistError, LookupError) as e:
184 except (CommitDoesNotExistError, LookupError) as e:
185 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
185 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
186 h.flash(msg, category='error')
186 h.flash(msg, category='error')
187 raise HTTPNotFound()
187 raise HTTPNotFound()
188 except RepositoryError as e:
188 except RepositoryError as e:
189 h.flash(safe_str(h.escape(e)), category='error')
189 h.flash(h.escape(safe_str(e)), category='error')
190 raise HTTPNotFound()
190 raise HTTPNotFound()
191
191
192 def _get_filenode_or_redirect(self, commit_obj, path):
192 def _get_filenode_or_redirect(self, commit_obj, path):
193 """
193 """
194 Returns file_node, if error occurs or given path is directory,
194 Returns file_node, if error occurs or given path is directory,
195 it'll redirect to top level path
195 it'll redirect to top level path
196 """
196 """
197 _ = self.request.translate
197 _ = self.request.translate
198
198
199 try:
199 try:
200 file_node = commit_obj.get_node(path)
200 file_node = commit_obj.get_node(path)
201 if file_node.is_dir():
201 if file_node.is_dir():
202 raise RepositoryError('The given path is a directory')
202 raise RepositoryError('The given path is a directory')
203 except CommitDoesNotExistError:
203 except CommitDoesNotExistError:
204 log.exception('No such commit exists for this repository')
204 log.exception('No such commit exists for this repository')
205 h.flash(_('No such commit exists for this repository'), category='error')
205 h.flash(_('No such commit exists for this repository'), category='error')
206 raise HTTPNotFound()
206 raise HTTPNotFound()
207 except RepositoryError as e:
207 except RepositoryError as e:
208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
209 h.flash(safe_str(h.escape(e)), category='error')
209 h.flash(h.escape(safe_str(e)), category='error')
210 raise HTTPNotFound()
210 raise HTTPNotFound()
211
211
212 return file_node
212 return file_node
213
213
214 def _is_valid_head(self, commit_id, repo, landing_ref):
214 def _is_valid_head(self, commit_id, repo, landing_ref):
215 branch_name = sha_commit_id = ''
215 branch_name = sha_commit_id = ''
216 is_head = False
216 is_head = False
217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
218
218
219 for _branch_name, branch_commit_id in repo.branches.items():
219 for _branch_name, branch_commit_id in repo.branches.items():
220 # simple case we pass in branch name, it's a HEAD
220 # simple case we pass in branch name, it's a HEAD
221 if commit_id == _branch_name:
221 if commit_id == _branch_name:
222 is_head = True
222 is_head = True
223 branch_name = _branch_name
223 branch_name = _branch_name
224 sha_commit_id = branch_commit_id
224 sha_commit_id = branch_commit_id
225 break
225 break
226 # case when we pass in full sha commit_id, which is a head
226 # case when we pass in full sha commit_id, which is a head
227 elif commit_id == branch_commit_id:
227 elif commit_id == branch_commit_id:
228 is_head = True
228 is_head = True
229 branch_name = _branch_name
229 branch_name = _branch_name
230 sha_commit_id = branch_commit_id
230 sha_commit_id = branch_commit_id
231 break
231 break
232
232
233 if h.is_svn(repo) and not repo.is_empty():
233 if h.is_svn(repo) and not repo.is_empty():
234 # Note: Subversion only has one head.
234 # Note: Subversion only has one head.
235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
236 is_head = True
236 is_head = True
237 return branch_name, sha_commit_id, is_head
237 return branch_name, sha_commit_id, is_head
238
238
239 # checked branches, means we only need to try to get the branch/commit_sha
239 # checked branches, means we only need to try to get the branch/commit_sha
240 if repo.is_empty():
240 if repo.is_empty():
241 is_head = True
241 is_head = True
242 branch_name = landing_ref
242 branch_name = landing_ref
243 sha_commit_id = EmptyCommit().raw_id
243 sha_commit_id = EmptyCommit().raw_id
244 else:
244 else:
245 commit = repo.get_commit(commit_id=commit_id)
245 commit = repo.get_commit(commit_id=commit_id)
246 if commit:
246 if commit:
247 branch_name = commit.branch
247 branch_name = commit.branch
248 sha_commit_id = commit.raw_id
248 sha_commit_id = commit.raw_id
249
249
250 return branch_name, sha_commit_id, is_head
250 return branch_name, sha_commit_id, is_head
251
251
252 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
252 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
253
253
254 repo_id = self.db_repo.repo_id
254 repo_id = self.db_repo.repo_id
255 force_recache = self.get_recache_flag()
255 force_recache = self.get_recache_flag()
256
256
257 cache_seconds = safe_int(
257 cache_seconds = safe_int(
258 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
258 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
259 cache_on = not force_recache and cache_seconds > 0
259 cache_on = not force_recache and cache_seconds > 0
260 log.debug(
260 log.debug(
261 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
261 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
262 'with caching: %s[TTL: %ss]' % (
262 'with caching: %s[TTL: %ss]' % (
263 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
263 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
264
264
265 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
265 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
266 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
266 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
267
267
268 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
268 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
269 def compute_file_tree(ver, _name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
269 def compute_file_tree(ver, _name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
270 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
270 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
271 ver, _repo_id, _commit_id, _f_path)
271 ver, _repo_id, _commit_id, _f_path)
272
272
273 c.full_load = _full_load
273 c.full_load = _full_load
274 return render(
274 return render(
275 'rhodecode:templates/files/files_browser_tree.mako',
275 'rhodecode:templates/files/files_browser_tree.mako',
276 self._get_template_context(c), self.request, _at_rev)
276 self._get_template_context(c), self.request, _at_rev)
277
277
278 return compute_file_tree(
278 return compute_file_tree(
279 rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash,
279 rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash,
280 self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
280 self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
281
281
282 def _get_archive_spec(self, fname):
282 def _get_archive_spec(self, fname):
283 log.debug('Detecting archive spec for: `%s`', fname)
283 log.debug('Detecting archive spec for: `%s`', fname)
284
284
285 fileformat = None
285 fileformat = None
286 ext = None
286 ext = None
287 content_type = None
287 content_type = None
288 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
288 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
289
289
290 if fname.endswith(extension):
290 if fname.endswith(extension):
291 fileformat = a_type
291 fileformat = a_type
292 log.debug('archive is of type: %s', fileformat)
292 log.debug('archive is of type: %s', fileformat)
293 ext = extension
293 ext = extension
294 break
294 break
295
295
296 if not fileformat:
296 if not fileformat:
297 raise ValueError()
297 raise ValueError()
298
298
299 # left over part of whole fname is the commit
299 # left over part of whole fname is the commit
300 commit_id = fname[:-len(ext)]
300 commit_id = fname[:-len(ext)]
301
301
302 return commit_id, ext, fileformat, content_type
302 return commit_id, ext, fileformat, content_type
303
303
304 def create_pure_path(self, *parts):
304 def create_pure_path(self, *parts):
305 # Split paths and sanitize them, removing any ../ etc
305 # Split paths and sanitize them, removing any ../ etc
306 sanitized_path = [
306 sanitized_path = [
307 x for x in pathlib2.PurePath(*parts).parts
307 x for x in pathlib2.PurePath(*parts).parts
308 if x not in ['.', '..']]
308 if x not in ['.', '..']]
309
309
310 pure_path = pathlib2.PurePath(*sanitized_path)
310 pure_path = pathlib2.PurePath(*sanitized_path)
311 return pure_path
311 return pure_path
312
312
313 def _is_lf_enabled(self, target_repo):
313 def _is_lf_enabled(self, target_repo):
314 lf_enabled = False
314 lf_enabled = False
315
315
316 lf_key_for_vcs_map = {
316 lf_key_for_vcs_map = {
317 'hg': 'extensions_largefiles',
317 'hg': 'extensions_largefiles',
318 'git': 'vcs_git_lfs_enabled'
318 'git': 'vcs_git_lfs_enabled'
319 }
319 }
320
320
321 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
321 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
322
322
323 if lf_key_for_vcs:
323 if lf_key_for_vcs:
324 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
324 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
325
325
326 return lf_enabled
326 return lf_enabled
327
327
328 def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
328 def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
329 # original backward compat name of archive
329 # original backward compat name of archive
330 clean_name = safe_str(db_repo_name.replace('/', '_'))
330 clean_name = safe_str(db_repo_name.replace('/', '_'))
331
331
332 # e.g vcsserver.zip
332 # e.g vcsserver.zip
333 # e.g vcsserver-abcdefgh.zip
333 # e.g vcsserver-abcdefgh.zip
334 # e.g vcsserver-abcdefgh-defghijk.zip
334 # e.g vcsserver-abcdefgh-defghijk.zip
335 archive_name = '{}{}{}{}{}{}'.format(
335 archive_name = '{}{}{}{}{}{}'.format(
336 clean_name,
336 clean_name,
337 '-sub' if subrepos else '',
337 '-sub' if subrepos else '',
338 commit_sha,
338 commit_sha,
339 '-{}'.format('plain') if not with_hash else '',
339 '-{}'.format('plain') if not with_hash else '',
340 '-{}'.format(path_sha) if path_sha else '',
340 '-{}'.format(path_sha) if path_sha else '',
341 ext)
341 ext)
342 return archive_name
342 return archive_name
343
343
344 @LoginRequired()
344 @LoginRequired()
345 @HasRepoPermissionAnyDecorator(
345 @HasRepoPermissionAnyDecorator(
346 'repository.read', 'repository.write', 'repository.admin')
346 'repository.read', 'repository.write', 'repository.admin')
347 def repo_archivefile(self):
347 def repo_archivefile(self):
348 # archive cache config
348 # archive cache config
349 from rhodecode import CONFIG
349 from rhodecode import CONFIG
350 _ = self.request.translate
350 _ = self.request.translate
351 self.load_default_context()
351 self.load_default_context()
352 default_at_path = '/'
352 default_at_path = '/'
353 fname = self.request.matchdict['fname']
353 fname = self.request.matchdict['fname']
354 subrepos = self.request.GET.get('subrepos') == 'true'
354 subrepos = self.request.GET.get('subrepos') == 'true'
355 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
355 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
356 at_path = self.request.GET.get('at_path') or default_at_path
356 at_path = self.request.GET.get('at_path') or default_at_path
357
357
358 if not self.db_repo.enable_downloads:
358 if not self.db_repo.enable_downloads:
359 return Response(_('Downloads disabled'))
359 return Response(_('Downloads disabled'))
360
360
361 try:
361 try:
362 commit_id, ext, fileformat, content_type = \
362 commit_id, ext, fileformat, content_type = \
363 self._get_archive_spec(fname)
363 self._get_archive_spec(fname)
364 except ValueError:
364 except ValueError:
365 return Response(_('Unknown archive type for: `{}`').format(
365 return Response(_('Unknown archive type for: `{}`').format(
366 h.escape(fname)))
366 h.escape(fname)))
367
367
368 try:
368 try:
369 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
369 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
370 except CommitDoesNotExistError:
370 except CommitDoesNotExistError:
371 return Response(_('Unknown commit_id {}').format(
371 return Response(_('Unknown commit_id {}').format(
372 h.escape(commit_id)))
372 h.escape(commit_id)))
373 except EmptyRepositoryError:
373 except EmptyRepositoryError:
374 return Response(_('Empty repository'))
374 return Response(_('Empty repository'))
375
375
376 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
376 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
377 if commit_id != commit.raw_id:
377 if commit_id != commit.raw_id:
378 fname='{}{}'.format(commit.raw_id, ext)
378 fname='{}{}'.format(commit.raw_id, ext)
379 raise HTTPFound(self.request.current_route_path(fname=fname))
379 raise HTTPFound(self.request.current_route_path(fname=fname))
380
380
381 try:
381 try:
382 at_path = commit.get_node(at_path).path or default_at_path
382 at_path = commit.get_node(at_path).path or default_at_path
383 except Exception:
383 except Exception:
384 return Response(_('No node at path {} for this repository').format(at_path))
384 return Response(_('No node at path {} for this repository').format(at_path))
385
385
386 # path sha is part of subdir
386 # path sha is part of subdir
387 path_sha = ''
387 path_sha = ''
388 if at_path != default_at_path:
388 if at_path != default_at_path:
389 path_sha = sha1(at_path)[:8]
389 path_sha = sha1(at_path)[:8]
390 short_sha = '-{}'.format(safe_str(commit.short_id))
390 short_sha = '-{}'.format(safe_str(commit.short_id))
391 # used for cache etc
391 # used for cache etc
392 archive_name = self._get_archive_name(
392 archive_name = self._get_archive_name(
393 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
393 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
394 path_sha=path_sha, with_hash=with_hash)
394 path_sha=path_sha, with_hash=with_hash)
395
395
396 if not with_hash:
396 if not with_hash:
397 short_sha = ''
397 short_sha = ''
398 path_sha = ''
398 path_sha = ''
399
399
400 # what end client gets served
400 # what end client gets served
401 response_archive_name = self._get_archive_name(
401 response_archive_name = self._get_archive_name(
402 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
402 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
403 path_sha=path_sha, with_hash=with_hash)
403 path_sha=path_sha, with_hash=with_hash)
404 # remove extension from our archive directory name
404 # remove extension from our archive directory name
405 archive_dir_name = response_archive_name[:-len(ext)]
405 archive_dir_name = response_archive_name[:-len(ext)]
406
406
407 use_cached_archive = False
407 use_cached_archive = False
408 archive_cache_dir = CONFIG.get('archive_cache_dir')
408 archive_cache_dir = CONFIG.get('archive_cache_dir')
409 archive_cache_enabled = archive_cache_dir and not self.request.GET.get('no_cache')
409 archive_cache_enabled = archive_cache_dir and not self.request.GET.get('no_cache')
410 cached_archive_path = None
410 cached_archive_path = None
411
411
412 if archive_cache_enabled:
412 if archive_cache_enabled:
413 # check if we it's ok to write, and re-create the archive cache
413 # check if we it's ok to write, and re-create the archive cache
414 if not os.path.isdir(CONFIG['archive_cache_dir']):
414 if not os.path.isdir(CONFIG['archive_cache_dir']):
415 os.makedirs(CONFIG['archive_cache_dir'])
415 os.makedirs(CONFIG['archive_cache_dir'])
416
416
417 cached_archive_path = os.path.join(
417 cached_archive_path = os.path.join(
418 CONFIG['archive_cache_dir'], archive_name)
418 CONFIG['archive_cache_dir'], archive_name)
419 if os.path.isfile(cached_archive_path):
419 if os.path.isfile(cached_archive_path):
420 log.debug('Found cached archive in %s', cached_archive_path)
420 log.debug('Found cached archive in %s', cached_archive_path)
421 fd, archive = None, cached_archive_path
421 fd, archive = None, cached_archive_path
422 use_cached_archive = True
422 use_cached_archive = True
423 else:
423 else:
424 log.debug('Archive %s is not yet cached', archive_name)
424 log.debug('Archive %s is not yet cached', archive_name)
425
425
426 # generate new archive, as previous was not found in the cache
426 # generate new archive, as previous was not found in the cache
427 if not use_cached_archive:
427 if not use_cached_archive:
428 _dir = os.path.abspath(archive_cache_dir) if archive_cache_dir else None
428 _dir = os.path.abspath(archive_cache_dir) if archive_cache_dir else None
429 fd, archive = tempfile.mkstemp(dir=_dir)
429 fd, archive = tempfile.mkstemp(dir=_dir)
430 log.debug('Creating new temp archive in %s', archive)
430 log.debug('Creating new temp archive in %s', archive)
431 try:
431 try:
432 commit.archive_repo(archive, archive_dir_name=archive_dir_name,
432 commit.archive_repo(archive, archive_dir_name=archive_dir_name,
433 kind=fileformat, subrepos=subrepos,
433 kind=fileformat, subrepos=subrepos,
434 archive_at_path=at_path)
434 archive_at_path=at_path)
435 except ImproperArchiveTypeError:
435 except ImproperArchiveTypeError:
436 return _('Unknown archive type')
436 return _('Unknown archive type')
437 if archive_cache_enabled:
437 if archive_cache_enabled:
438 # if we generated the archive and we have cache enabled
438 # if we generated the archive and we have cache enabled
439 # let's use this for future
439 # let's use this for future
440 log.debug('Storing new archive in %s', cached_archive_path)
440 log.debug('Storing new archive in %s', cached_archive_path)
441 shutil.move(archive, cached_archive_path)
441 shutil.move(archive, cached_archive_path)
442 archive = cached_archive_path
442 archive = cached_archive_path
443
443
444 # store download action
444 # store download action
445 audit_logger.store_web(
445 audit_logger.store_web(
446 'repo.archive.download', action_data={
446 'repo.archive.download', action_data={
447 'user_agent': self.request.user_agent,
447 'user_agent': self.request.user_agent,
448 'archive_name': archive_name,
448 'archive_name': archive_name,
449 'archive_spec': fname,
449 'archive_spec': fname,
450 'archive_cached': use_cached_archive},
450 'archive_cached': use_cached_archive},
451 user=self._rhodecode_user,
451 user=self._rhodecode_user,
452 repo=self.db_repo,
452 repo=self.db_repo,
453 commit=True
453 commit=True
454 )
454 )
455
455
456 def get_chunked_archive(archive_path):
456 def get_chunked_archive(archive_path):
457 with open(archive_path, 'rb') as stream:
457 with open(archive_path, 'rb') as stream:
458 while True:
458 while True:
459 data = stream.read(16 * 1024)
459 data = stream.read(16 * 1024)
460 if not data:
460 if not data:
461 if fd: # fd means we used temporary file
461 if fd: # fd means we used temporary file
462 os.close(fd)
462 os.close(fd)
463 if not archive_cache_enabled:
463 if not archive_cache_enabled:
464 log.debug('Destroying temp archive %s', archive_path)
464 log.debug('Destroying temp archive %s', archive_path)
465 os.remove(archive_path)
465 os.remove(archive_path)
466 break
466 break
467 yield data
467 yield data
468
468
469 response = Response(app_iter=get_chunked_archive(archive))
469 response = Response(app_iter=get_chunked_archive(archive))
470 response.content_disposition = str('attachment; filename=%s' % response_archive_name)
470 response.content_disposition = str('attachment; filename=%s' % response_archive_name)
471 response.content_type = str(content_type)
471 response.content_type = str(content_type)
472
472
473 return response
473 return response
474
474
475 def _get_file_node(self, commit_id, f_path):
475 def _get_file_node(self, commit_id, f_path):
476 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
476 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
478 try:
478 try:
479 node = commit.get_node(f_path)
479 node = commit.get_node(f_path)
480 if node.is_dir():
480 if node.is_dir():
481 raise NodeError('%s path is a %s not a file'
481 raise NodeError('%s path is a %s not a file'
482 % (node, type(node)))
482 % (node, type(node)))
483 except NodeDoesNotExistError:
483 except NodeDoesNotExistError:
484 commit = EmptyCommit(
484 commit = EmptyCommit(
485 commit_id=commit_id,
485 commit_id=commit_id,
486 idx=commit.idx,
486 idx=commit.idx,
487 repo=commit.repository,
487 repo=commit.repository,
488 alias=commit.repository.alias,
488 alias=commit.repository.alias,
489 message=commit.message,
489 message=commit.message,
490 author=commit.author,
490 author=commit.author,
491 date=commit.date)
491 date=commit.date)
492 node = FileNode(f_path, '', commit=commit)
492 node = FileNode(f_path, '', commit=commit)
493 else:
493 else:
494 commit = EmptyCommit(
494 commit = EmptyCommit(
495 repo=self.rhodecode_vcs_repo,
495 repo=self.rhodecode_vcs_repo,
496 alias=self.rhodecode_vcs_repo.alias)
496 alias=self.rhodecode_vcs_repo.alias)
497 node = FileNode(f_path, '', commit=commit)
497 node = FileNode(f_path, '', commit=commit)
498 return node
498 return node
499
499
500 @LoginRequired()
500 @LoginRequired()
501 @HasRepoPermissionAnyDecorator(
501 @HasRepoPermissionAnyDecorator(
502 'repository.read', 'repository.write', 'repository.admin')
502 'repository.read', 'repository.write', 'repository.admin')
503 def repo_files_diff(self):
503 def repo_files_diff(self):
504 c = self.load_default_context()
504 c = self.load_default_context()
505 f_path = self._get_f_path(self.request.matchdict)
505 f_path = self._get_f_path(self.request.matchdict)
506 diff1 = self.request.GET.get('diff1', '')
506 diff1 = self.request.GET.get('diff1', '')
507 diff2 = self.request.GET.get('diff2', '')
507 diff2 = self.request.GET.get('diff2', '')
508
508
509 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
509 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
510
510
511 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
511 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
512 line_context = self.request.GET.get('context', 3)
512 line_context = self.request.GET.get('context', 3)
513
513
514 if not any((diff1, diff2)):
514 if not any((diff1, diff2)):
515 h.flash(
515 h.flash(
516 'Need query parameter "diff1" or "diff2" to generate a diff.',
516 'Need query parameter "diff1" or "diff2" to generate a diff.',
517 category='error')
517 category='error')
518 raise HTTPBadRequest()
518 raise HTTPBadRequest()
519
519
520 c.action = self.request.GET.get('diff')
520 c.action = self.request.GET.get('diff')
521 if c.action not in ['download', 'raw']:
521 if c.action not in ['download', 'raw']:
522 compare_url = h.route_path(
522 compare_url = h.route_path(
523 'repo_compare',
523 'repo_compare',
524 repo_name=self.db_repo_name,
524 repo_name=self.db_repo_name,
525 source_ref_type='rev',
525 source_ref_type='rev',
526 source_ref=diff1,
526 source_ref=diff1,
527 target_repo=self.db_repo_name,
527 target_repo=self.db_repo_name,
528 target_ref_type='rev',
528 target_ref_type='rev',
529 target_ref=diff2,
529 target_ref=diff2,
530 _query=dict(f_path=f_path))
530 _query=dict(f_path=f_path))
531 # redirect to new view if we render diff
531 # redirect to new view if we render diff
532 raise HTTPFound(compare_url)
532 raise HTTPFound(compare_url)
533
533
534 try:
534 try:
535 node1 = self._get_file_node(diff1, path1)
535 node1 = self._get_file_node(diff1, path1)
536 node2 = self._get_file_node(diff2, f_path)
536 node2 = self._get_file_node(diff2, f_path)
537 except (RepositoryError, NodeError):
537 except (RepositoryError, NodeError):
538 log.exception("Exception while trying to get node from repository")
538 log.exception("Exception while trying to get node from repository")
539 raise HTTPFound(
539 raise HTTPFound(
540 h.route_path('repo_files', repo_name=self.db_repo_name,
540 h.route_path('repo_files', repo_name=self.db_repo_name,
541 commit_id='tip', f_path=f_path))
541 commit_id='tip', f_path=f_path))
542
542
543 if all(isinstance(node.commit, EmptyCommit)
543 if all(isinstance(node.commit, EmptyCommit)
544 for node in (node1, node2)):
544 for node in (node1, node2)):
545 raise HTTPNotFound()
545 raise HTTPNotFound()
546
546
547 c.commit_1 = node1.commit
547 c.commit_1 = node1.commit
548 c.commit_2 = node2.commit
548 c.commit_2 = node2.commit
549
549
550 if c.action == 'download':
550 if c.action == 'download':
551 _diff = diffs.get_gitdiff(node1, node2,
551 _diff = diffs.get_gitdiff(node1, node2,
552 ignore_whitespace=ignore_whitespace,
552 ignore_whitespace=ignore_whitespace,
553 context=line_context)
553 context=line_context)
554 diff = diffs.DiffProcessor(_diff, format='gitdiff')
554 diff = diffs.DiffProcessor(_diff, format='gitdiff')
555
555
556 response = Response(self.path_filter.get_raw_patch(diff))
556 response = Response(self.path_filter.get_raw_patch(diff))
557 response.content_type = 'text/plain'
557 response.content_type = 'text/plain'
558 response.content_disposition = (
558 response.content_disposition = (
559 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
559 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
560 )
560 )
561 charset = self._get_default_encoding(c)
561 charset = self._get_default_encoding(c)
562 if charset:
562 if charset:
563 response.charset = charset
563 response.charset = charset
564 return response
564 return response
565
565
566 elif c.action == 'raw':
566 elif c.action == 'raw':
567 _diff = diffs.get_gitdiff(node1, node2,
567 _diff = diffs.get_gitdiff(node1, node2,
568 ignore_whitespace=ignore_whitespace,
568 ignore_whitespace=ignore_whitespace,
569 context=line_context)
569 context=line_context)
570 diff = diffs.DiffProcessor(_diff, format='gitdiff')
570 diff = diffs.DiffProcessor(_diff, format='gitdiff')
571
571
572 response = Response(self.path_filter.get_raw_patch(diff))
572 response = Response(self.path_filter.get_raw_patch(diff))
573 response.content_type = 'text/plain'
573 response.content_type = 'text/plain'
574 charset = self._get_default_encoding(c)
574 charset = self._get_default_encoding(c)
575 if charset:
575 if charset:
576 response.charset = charset
576 response.charset = charset
577 return response
577 return response
578
578
579 # in case we ever end up here
579 # in case we ever end up here
580 raise HTTPNotFound()
580 raise HTTPNotFound()
581
581
582 @LoginRequired()
582 @LoginRequired()
583 @HasRepoPermissionAnyDecorator(
583 @HasRepoPermissionAnyDecorator(
584 'repository.read', 'repository.write', 'repository.admin')
584 'repository.read', 'repository.write', 'repository.admin')
585 def repo_files_diff_2way_redirect(self):
585 def repo_files_diff_2way_redirect(self):
586 """
586 """
587 Kept only to make OLD links work
587 Kept only to make OLD links work
588 """
588 """
589 f_path = self._get_f_path_unchecked(self.request.matchdict)
589 f_path = self._get_f_path_unchecked(self.request.matchdict)
590 diff1 = self.request.GET.get('diff1', '')
590 diff1 = self.request.GET.get('diff1', '')
591 diff2 = self.request.GET.get('diff2', '')
591 diff2 = self.request.GET.get('diff2', '')
592
592
593 if not any((diff1, diff2)):
593 if not any((diff1, diff2)):
594 h.flash(
594 h.flash(
595 'Need query parameter "diff1" or "diff2" to generate a diff.',
595 'Need query parameter "diff1" or "diff2" to generate a diff.',
596 category='error')
596 category='error')
597 raise HTTPBadRequest()
597 raise HTTPBadRequest()
598
598
599 compare_url = h.route_path(
599 compare_url = h.route_path(
600 'repo_compare',
600 'repo_compare',
601 repo_name=self.db_repo_name,
601 repo_name=self.db_repo_name,
602 source_ref_type='rev',
602 source_ref_type='rev',
603 source_ref=diff1,
603 source_ref=diff1,
604 target_ref_type='rev',
604 target_ref_type='rev',
605 target_ref=diff2,
605 target_ref=diff2,
606 _query=dict(f_path=f_path, diffmode='sideside',
606 _query=dict(f_path=f_path, diffmode='sideside',
607 target_repo=self.db_repo_name,))
607 target_repo=self.db_repo_name,))
608 raise HTTPFound(compare_url)
608 raise HTTPFound(compare_url)
609
609
610 @LoginRequired()
610 @LoginRequired()
611 def repo_files_default_commit_redirect(self):
611 def repo_files_default_commit_redirect(self):
612 """
612 """
613 Special page that redirects to the landing page of files based on the default
613 Special page that redirects to the landing page of files based on the default
614 commit for repository
614 commit for repository
615 """
615 """
616 c = self.load_default_context()
616 c = self.load_default_context()
617 ref_name = c.rhodecode_db_repo.landing_ref_name
617 ref_name = c.rhodecode_db_repo.landing_ref_name
618 landing_url = h.repo_files_by_ref_url(
618 landing_url = h.repo_files_by_ref_url(
619 c.rhodecode_db_repo.repo_name,
619 c.rhodecode_db_repo.repo_name,
620 c.rhodecode_db_repo.repo_type,
620 c.rhodecode_db_repo.repo_type,
621 f_path='',
621 f_path='',
622 ref_name=ref_name,
622 ref_name=ref_name,
623 commit_id='tip',
623 commit_id='tip',
624 query=dict(at=ref_name)
624 query=dict(at=ref_name)
625 )
625 )
626
626
627 raise HTTPFound(landing_url)
627 raise HTTPFound(landing_url)
628
628
629 @LoginRequired()
629 @LoginRequired()
630 @HasRepoPermissionAnyDecorator(
630 @HasRepoPermissionAnyDecorator(
631 'repository.read', 'repository.write', 'repository.admin')
631 'repository.read', 'repository.write', 'repository.admin')
632 def repo_files(self):
632 def repo_files(self):
633 c = self.load_default_context()
633 c = self.load_default_context()
634
634
635 view_name = getattr(self.request.matched_route, 'name', None)
635 view_name = getattr(self.request.matched_route, 'name', None)
636
636
637 c.annotate = view_name == 'repo_files:annotated'
637 c.annotate = view_name == 'repo_files:annotated'
638 # default is false, but .rst/.md files later are auto rendered, we can
638 # default is false, but .rst/.md files later are auto rendered, we can
639 # overwrite auto rendering by setting this GET flag
639 # overwrite auto rendering by setting this GET flag
640 c.renderer = view_name == 'repo_files:rendered' or \
640 c.renderer = view_name == 'repo_files:rendered' or \
641 not self.request.GET.get('no-render', False)
641 not self.request.GET.get('no-render', False)
642
642
643 commit_id, f_path = self._get_commit_and_path()
643 commit_id, f_path = self._get_commit_and_path()
644
644
645 c.commit = self._get_commit_or_redirect(commit_id)
645 c.commit = self._get_commit_or_redirect(commit_id)
646 c.branch = self.request.GET.get('branch', None)
646 c.branch = self.request.GET.get('branch', None)
647 c.f_path = f_path
647 c.f_path = f_path
648 at_rev = self.request.GET.get('at')
648 at_rev = self.request.GET.get('at')
649
649
650 # prev link
650 # prev link
651 try:
651 try:
652 prev_commit = c.commit.prev(c.branch)
652 prev_commit = c.commit.prev(c.branch)
653 c.prev_commit = prev_commit
653 c.prev_commit = prev_commit
654 c.url_prev = h.route_path(
654 c.url_prev = h.route_path(
655 'repo_files', repo_name=self.db_repo_name,
655 'repo_files', repo_name=self.db_repo_name,
656 commit_id=prev_commit.raw_id, f_path=f_path)
656 commit_id=prev_commit.raw_id, f_path=f_path)
657 if c.branch:
657 if c.branch:
658 c.url_prev += '?branch=%s' % c.branch
658 c.url_prev += '?branch=%s' % c.branch
659 except (CommitDoesNotExistError, VCSError):
659 except (CommitDoesNotExistError, VCSError):
660 c.url_prev = '#'
660 c.url_prev = '#'
661 c.prev_commit = EmptyCommit()
661 c.prev_commit = EmptyCommit()
662
662
663 # next link
663 # next link
664 try:
664 try:
665 next_commit = c.commit.next(c.branch)
665 next_commit = c.commit.next(c.branch)
666 c.next_commit = next_commit
666 c.next_commit = next_commit
667 c.url_next = h.route_path(
667 c.url_next = h.route_path(
668 'repo_files', repo_name=self.db_repo_name,
668 'repo_files', repo_name=self.db_repo_name,
669 commit_id=next_commit.raw_id, f_path=f_path)
669 commit_id=next_commit.raw_id, f_path=f_path)
670 if c.branch:
670 if c.branch:
671 c.url_next += '?branch=%s' % c.branch
671 c.url_next += '?branch=%s' % c.branch
672 except (CommitDoesNotExistError, VCSError):
672 except (CommitDoesNotExistError, VCSError):
673 c.url_next = '#'
673 c.url_next = '#'
674 c.next_commit = EmptyCommit()
674 c.next_commit = EmptyCommit()
675
675
676 # files or dirs
676 # files or dirs
677 try:
677 try:
678 c.file = c.commit.get_node(f_path)
678 c.file = c.commit.get_node(f_path)
679 c.file_author = True
679 c.file_author = True
680 c.file_tree = ''
680 c.file_tree = ''
681
681
682 # load file content
682 # load file content
683 if c.file.is_file():
683 if c.file.is_file():
684 c.lf_node = {}
684 c.lf_node = {}
685
685
686 has_lf_enabled = self._is_lf_enabled(self.db_repo)
686 has_lf_enabled = self._is_lf_enabled(self.db_repo)
687 if has_lf_enabled:
687 if has_lf_enabled:
688 c.lf_node = c.file.get_largefile_node()
688 c.lf_node = c.file.get_largefile_node()
689
689
690 c.file_source_page = 'true'
690 c.file_source_page = 'true'
691 c.file_last_commit = c.file.last_commit
691 c.file_last_commit = c.file.last_commit
692
692
693 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
693 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
694
694
695 if not (c.file_size_too_big or c.file.is_binary):
695 if not (c.file_size_too_big or c.file.is_binary):
696 if c.annotate: # annotation has precedence over renderer
696 if c.annotate: # annotation has precedence over renderer
697 c.annotated_lines = filenode_as_annotated_lines_tokens(
697 c.annotated_lines = filenode_as_annotated_lines_tokens(
698 c.file
698 c.file
699 )
699 )
700 else:
700 else:
701 c.renderer = (
701 c.renderer = (
702 c.renderer and h.renderer_from_filename(c.file.path)
702 c.renderer and h.renderer_from_filename(c.file.path)
703 )
703 )
704 if not c.renderer:
704 if not c.renderer:
705 c.lines = filenode_as_lines_tokens(c.file)
705 c.lines = filenode_as_lines_tokens(c.file)
706
706
707 _branch_name, _sha_commit_id, is_head = \
707 _branch_name, _sha_commit_id, is_head = \
708 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
708 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
709 landing_ref=self.db_repo.landing_ref_name)
709 landing_ref=self.db_repo.landing_ref_name)
710 c.on_branch_head = is_head
710 c.on_branch_head = is_head
711
711
712 branch = c.commit.branch if (
712 branch = c.commit.branch if (
713 c.commit.branch and '/' not in c.commit.branch) else None
713 c.commit.branch and '/' not in c.commit.branch) else None
714 c.branch_or_raw_id = branch or c.commit.raw_id
714 c.branch_or_raw_id = branch or c.commit.raw_id
715 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
715 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
716
716
717 author = c.file_last_commit.author
717 author = c.file_last_commit.author
718 c.authors = [[
718 c.authors = [[
719 h.email(author),
719 h.email(author),
720 h.person(author, 'username_or_name_or_email'),
720 h.person(author, 'username_or_name_or_email'),
721 1
721 1
722 ]]
722 ]]
723
723
724 else: # load tree content at path
724 else: # load tree content at path
725 c.file_source_page = 'false'
725 c.file_source_page = 'false'
726 c.authors = []
726 c.authors = []
727 # this loads a simple tree without metadata to speed things up
727 # this loads a simple tree without metadata to speed things up
728 # later via ajax we call repo_nodetree_full and fetch whole
728 # later via ajax we call repo_nodetree_full and fetch whole
729 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
729 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
730
730
731 c.readme_data, c.readme_file = \
731 c.readme_data, c.readme_file = \
732 self._get_readme_data(self.db_repo, c.visual.default_renderer,
732 self._get_readme_data(self.db_repo, c.visual.default_renderer,
733 c.commit.raw_id, f_path)
733 c.commit.raw_id, f_path)
734
734
735 except RepositoryError as e:
735 except RepositoryError as e:
736 h.flash(safe_str(h.escape(e)), category='error')
736 h.flash(h.escape(safe_str(e)), category='error')
737 raise HTTPNotFound()
737 raise HTTPNotFound()
738
738
739 if self.request.environ.get('HTTP_X_PJAX'):
739 if self.request.environ.get('HTTP_X_PJAX'):
740 html = render('rhodecode:templates/files/files_pjax.mako',
740 html = render('rhodecode:templates/files/files_pjax.mako',
741 self._get_template_context(c), self.request)
741 self._get_template_context(c), self.request)
742 else:
742 else:
743 html = render('rhodecode:templates/files/files.mako',
743 html = render('rhodecode:templates/files/files.mako',
744 self._get_template_context(c), self.request)
744 self._get_template_context(c), self.request)
745 return Response(html)
745 return Response(html)
746
746
747 @HasRepoPermissionAnyDecorator(
747 @HasRepoPermissionAnyDecorator(
748 'repository.read', 'repository.write', 'repository.admin')
748 'repository.read', 'repository.write', 'repository.admin')
749 def repo_files_annotated_previous(self):
749 def repo_files_annotated_previous(self):
750 self.load_default_context()
750 self.load_default_context()
751
751
752 commit_id, f_path = self._get_commit_and_path()
752 commit_id, f_path = self._get_commit_and_path()
753 commit = self._get_commit_or_redirect(commit_id)
753 commit = self._get_commit_or_redirect(commit_id)
754 prev_commit_id = commit.raw_id
754 prev_commit_id = commit.raw_id
755 line_anchor = self.request.GET.get('line_anchor')
755 line_anchor = self.request.GET.get('line_anchor')
756 is_file = False
756 is_file = False
757 try:
757 try:
758 _file = commit.get_node(f_path)
758 _file = commit.get_node(f_path)
759 is_file = _file.is_file()
759 is_file = _file.is_file()
760 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
760 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
761 pass
761 pass
762
762
763 if is_file:
763 if is_file:
764 history = commit.get_path_history(f_path)
764 history = commit.get_path_history(f_path)
765 prev_commit_id = history[1].raw_id \
765 prev_commit_id = history[1].raw_id \
766 if len(history) > 1 else prev_commit_id
766 if len(history) > 1 else prev_commit_id
767 prev_url = h.route_path(
767 prev_url = h.route_path(
768 'repo_files:annotated', repo_name=self.db_repo_name,
768 'repo_files:annotated', repo_name=self.db_repo_name,
769 commit_id=prev_commit_id, f_path=f_path,
769 commit_id=prev_commit_id, f_path=f_path,
770 _anchor='L{}'.format(line_anchor))
770 _anchor='L{}'.format(line_anchor))
771
771
772 raise HTTPFound(prev_url)
772 raise HTTPFound(prev_url)
773
773
774 @LoginRequired()
774 @LoginRequired()
775 @HasRepoPermissionAnyDecorator(
775 @HasRepoPermissionAnyDecorator(
776 'repository.read', 'repository.write', 'repository.admin')
776 'repository.read', 'repository.write', 'repository.admin')
777 def repo_nodetree_full(self):
777 def repo_nodetree_full(self):
778 """
778 """
779 Returns rendered html of file tree that contains commit date,
779 Returns rendered html of file tree that contains commit date,
780 author, commit_id for the specified combination of
780 author, commit_id for the specified combination of
781 repo, commit_id and file path
781 repo, commit_id and file path
782 """
782 """
783 c = self.load_default_context()
783 c = self.load_default_context()
784
784
785 commit_id, f_path = self._get_commit_and_path()
785 commit_id, f_path = self._get_commit_and_path()
786 commit = self._get_commit_or_redirect(commit_id)
786 commit = self._get_commit_or_redirect(commit_id)
787 try:
787 try:
788 dir_node = commit.get_node(f_path)
788 dir_node = commit.get_node(f_path)
789 except RepositoryError as e:
789 except RepositoryError as e:
790 return Response('error: {}'.format(h.escape(safe_str(e))))
790 return Response('error: {}'.format(h.escape(safe_str(e))))
791
791
792 if dir_node.is_file():
792 if dir_node.is_file():
793 return Response('')
793 return Response('')
794
794
795 c.file = dir_node
795 c.file = dir_node
796 c.commit = commit
796 c.commit = commit
797 at_rev = self.request.GET.get('at')
797 at_rev = self.request.GET.get('at')
798
798
799 html = self._get_tree_at_commit(
799 html = self._get_tree_at_commit(
800 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
800 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
801
801
802 return Response(html)
802 return Response(html)
803
803
804 def _get_attachement_headers(self, f_path):
804 def _get_attachement_headers(self, f_path):
805 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
805 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
806 safe_path = f_name.replace('"', '\\"')
806 safe_path = f_name.replace('"', '\\"')
807 encoded_path = urllib.quote(f_name)
807 encoded_path = urllib.quote(f_name)
808
808
809 return "attachment; " \
809 return "attachment; " \
810 "filename=\"{}\"; " \
810 "filename=\"{}\"; " \
811 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
811 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
812
812
813 @LoginRequired()
813 @LoginRequired()
814 @HasRepoPermissionAnyDecorator(
814 @HasRepoPermissionAnyDecorator(
815 'repository.read', 'repository.write', 'repository.admin')
815 'repository.read', 'repository.write', 'repository.admin')
816 def repo_file_raw(self):
816 def repo_file_raw(self):
817 """
817 """
818 Action for show as raw, some mimetypes are "rendered",
818 Action for show as raw, some mimetypes are "rendered",
819 those include images, icons.
819 those include images, icons.
820 """
820 """
821 c = self.load_default_context()
821 c = self.load_default_context()
822
822
823 commit_id, f_path = self._get_commit_and_path()
823 commit_id, f_path = self._get_commit_and_path()
824 commit = self._get_commit_or_redirect(commit_id)
824 commit = self._get_commit_or_redirect(commit_id)
825 file_node = self._get_filenode_or_redirect(commit, f_path)
825 file_node = self._get_filenode_or_redirect(commit, f_path)
826
826
827 raw_mimetype_mapping = {
827 raw_mimetype_mapping = {
828 # map original mimetype to a mimetype used for "show as raw"
828 # map original mimetype to a mimetype used for "show as raw"
829 # you can also provide a content-disposition to override the
829 # you can also provide a content-disposition to override the
830 # default "attachment" disposition.
830 # default "attachment" disposition.
831 # orig_type: (new_type, new_dispo)
831 # orig_type: (new_type, new_dispo)
832
832
833 # show images inline:
833 # show images inline:
834 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
834 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
835 # for example render an SVG with javascript inside or even render
835 # for example render an SVG with javascript inside or even render
836 # HTML.
836 # HTML.
837 'image/x-icon': ('image/x-icon', 'inline'),
837 'image/x-icon': ('image/x-icon', 'inline'),
838 'image/png': ('image/png', 'inline'),
838 'image/png': ('image/png', 'inline'),
839 'image/gif': ('image/gif', 'inline'),
839 'image/gif': ('image/gif', 'inline'),
840 'image/jpeg': ('image/jpeg', 'inline'),
840 'image/jpeg': ('image/jpeg', 'inline'),
841 'application/pdf': ('application/pdf', 'inline'),
841 'application/pdf': ('application/pdf', 'inline'),
842 }
842 }
843
843
844 mimetype = file_node.mimetype
844 mimetype = file_node.mimetype
845 try:
845 try:
846 mimetype, disposition = raw_mimetype_mapping[mimetype]
846 mimetype, disposition = raw_mimetype_mapping[mimetype]
847 except KeyError:
847 except KeyError:
848 # we don't know anything special about this, handle it safely
848 # we don't know anything special about this, handle it safely
849 if file_node.is_binary:
849 if file_node.is_binary:
850 # do same as download raw for binary files
850 # do same as download raw for binary files
851 mimetype, disposition = 'application/octet-stream', 'attachment'
851 mimetype, disposition = 'application/octet-stream', 'attachment'
852 else:
852 else:
853 # do not just use the original mimetype, but force text/plain,
853 # do not just use the original mimetype, but force text/plain,
854 # otherwise it would serve text/html and that might be unsafe.
854 # otherwise it would serve text/html and that might be unsafe.
855 # Note: underlying vcs library fakes text/plain mimetype if the
855 # Note: underlying vcs library fakes text/plain mimetype if the
856 # mimetype can not be determined and it thinks it is not
856 # mimetype can not be determined and it thinks it is not
857 # binary.This might lead to erroneous text display in some
857 # binary.This might lead to erroneous text display in some
858 # cases, but helps in other cases, like with text files
858 # cases, but helps in other cases, like with text files
859 # without extension.
859 # without extension.
860 mimetype, disposition = 'text/plain', 'inline'
860 mimetype, disposition = 'text/plain', 'inline'
861
861
862 if disposition == 'attachment':
862 if disposition == 'attachment':
863 disposition = self._get_attachement_headers(f_path)
863 disposition = self._get_attachement_headers(f_path)
864
864
865 stream_content = file_node.stream_bytes()
865 stream_content = file_node.stream_bytes()
866
866
867 response = Response(app_iter=stream_content)
867 response = Response(app_iter=stream_content)
868 response.content_disposition = disposition
868 response.content_disposition = disposition
869 response.content_type = mimetype
869 response.content_type = mimetype
870
870
871 charset = self._get_default_encoding(c)
871 charset = self._get_default_encoding(c)
872 if charset:
872 if charset:
873 response.charset = charset
873 response.charset = charset
874
874
875 return response
875 return response
876
876
877 @LoginRequired()
877 @LoginRequired()
878 @HasRepoPermissionAnyDecorator(
878 @HasRepoPermissionAnyDecorator(
879 'repository.read', 'repository.write', 'repository.admin')
879 'repository.read', 'repository.write', 'repository.admin')
880 def repo_file_download(self):
880 def repo_file_download(self):
881 c = self.load_default_context()
881 c = self.load_default_context()
882
882
883 commit_id, f_path = self._get_commit_and_path()
883 commit_id, f_path = self._get_commit_and_path()
884 commit = self._get_commit_or_redirect(commit_id)
884 commit = self._get_commit_or_redirect(commit_id)
885 file_node = self._get_filenode_or_redirect(commit, f_path)
885 file_node = self._get_filenode_or_redirect(commit, f_path)
886
886
887 if self.request.GET.get('lf'):
887 if self.request.GET.get('lf'):
888 # only if lf get flag is passed, we download this file
888 # only if lf get flag is passed, we download this file
889 # as LFS/Largefile
889 # as LFS/Largefile
890 lf_node = file_node.get_largefile_node()
890 lf_node = file_node.get_largefile_node()
891 if lf_node:
891 if lf_node:
892 # overwrite our pointer with the REAL large-file
892 # overwrite our pointer with the REAL large-file
893 file_node = lf_node
893 file_node = lf_node
894
894
895 disposition = self._get_attachement_headers(f_path)
895 disposition = self._get_attachement_headers(f_path)
896
896
897 stream_content = file_node.stream_bytes()
897 stream_content = file_node.stream_bytes()
898
898
899 response = Response(app_iter=stream_content)
899 response = Response(app_iter=stream_content)
900 response.content_disposition = disposition
900 response.content_disposition = disposition
901 response.content_type = file_node.mimetype
901 response.content_type = file_node.mimetype
902
902
903 charset = self._get_default_encoding(c)
903 charset = self._get_default_encoding(c)
904 if charset:
904 if charset:
905 response.charset = charset
905 response.charset = charset
906
906
907 return response
907 return response
908
908
909 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
909 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
910
910
911 cache_seconds = safe_int(
911 cache_seconds = safe_int(
912 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
912 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
913 cache_on = cache_seconds > 0
913 cache_on = cache_seconds > 0
914 log.debug(
914 log.debug(
915 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
915 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
916 'with caching: %s[TTL: %ss]' % (
916 'with caching: %s[TTL: %ss]' % (
917 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
917 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
918
918
919 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
919 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
920 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
920 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
921
921
922 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
922 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
923 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
923 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
924 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
924 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
925 _repo_id, commit_id, f_path)
925 _repo_id, commit_id, f_path)
926 try:
926 try:
927 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
927 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
928 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
928 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
929 log.exception(safe_str(e))
929 log.exception(safe_str(e))
930 h.flash(safe_str(h.escape(e)), category='error')
930 h.flash(h.escape(safe_str(e)), category='error')
931 raise HTTPFound(h.route_path(
931 raise HTTPFound(h.route_path(
932 'repo_files', repo_name=self.db_repo_name,
932 'repo_files', repo_name=self.db_repo_name,
933 commit_id='tip', f_path='/'))
933 commit_id='tip', f_path='/'))
934
934
935 return _d + _f
935 return _d + _f
936
936
937 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
937 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
938 commit_id, f_path)
938 commit_id, f_path)
939 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
939 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
940
940
941 @LoginRequired()
941 @LoginRequired()
942 @HasRepoPermissionAnyDecorator(
942 @HasRepoPermissionAnyDecorator(
943 'repository.read', 'repository.write', 'repository.admin')
943 'repository.read', 'repository.write', 'repository.admin')
944 def repo_nodelist(self):
944 def repo_nodelist(self):
945 self.load_default_context()
945 self.load_default_context()
946
946
947 commit_id, f_path = self._get_commit_and_path()
947 commit_id, f_path = self._get_commit_and_path()
948 commit = self._get_commit_or_redirect(commit_id)
948 commit = self._get_commit_or_redirect(commit_id)
949
949
950 metadata = self._get_nodelist_at_commit(
950 metadata = self._get_nodelist_at_commit(
951 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
951 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
952 return {'nodes': metadata}
952 return {'nodes': metadata}
953
953
954 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
954 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
955 items = []
955 items = []
956 for name, commit_id in branches_or_tags.items():
956 for name, commit_id in branches_or_tags.items():
957 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
957 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
958 items.append((sym_ref, name, ref_type))
958 items.append((sym_ref, name, ref_type))
959 return items
959 return items
960
960
961 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
961 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
962 return commit_id
962 return commit_id
963
963
964 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
964 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
965 return commit_id
965 return commit_id
966
966
967 # NOTE(dan): old code we used in "diff" mode compare
967 # NOTE(dan): old code we used in "diff" mode compare
968 new_f_path = vcspath.join(name, f_path)
968 new_f_path = vcspath.join(name, f_path)
969 return u'%s@%s' % (new_f_path, commit_id)
969 return u'%s@%s' % (new_f_path, commit_id)
970
970
971 def _get_node_history(self, commit_obj, f_path, commits=None):
971 def _get_node_history(self, commit_obj, f_path, commits=None):
972 """
972 """
973 get commit history for given node
973 get commit history for given node
974
974
975 :param commit_obj: commit to calculate history
975 :param commit_obj: commit to calculate history
976 :param f_path: path for node to calculate history for
976 :param f_path: path for node to calculate history for
977 :param commits: if passed don't calculate history and take
977 :param commits: if passed don't calculate history and take
978 commits defined in this list
978 commits defined in this list
979 """
979 """
980 _ = self.request.translate
980 _ = self.request.translate
981
981
982 # calculate history based on tip
982 # calculate history based on tip
983 tip = self.rhodecode_vcs_repo.get_commit()
983 tip = self.rhodecode_vcs_repo.get_commit()
984 if commits is None:
984 if commits is None:
985 pre_load = ["author", "branch"]
985 pre_load = ["author", "branch"]
986 try:
986 try:
987 commits = tip.get_path_history(f_path, pre_load=pre_load)
987 commits = tip.get_path_history(f_path, pre_load=pre_load)
988 except (NodeDoesNotExistError, CommitError):
988 except (NodeDoesNotExistError, CommitError):
989 # this node is not present at tip!
989 # this node is not present at tip!
990 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
990 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
991
991
992 history = []
992 history = []
993 commits_group = ([], _("Changesets"))
993 commits_group = ([], _("Changesets"))
994 for commit in commits:
994 for commit in commits:
995 branch = ' (%s)' % commit.branch if commit.branch else ''
995 branch = ' (%s)' % commit.branch if commit.branch else ''
996 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
996 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
997 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
997 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
998 history.append(commits_group)
998 history.append(commits_group)
999
999
1000 symbolic_reference = self._symbolic_reference
1000 symbolic_reference = self._symbolic_reference
1001
1001
1002 if self.rhodecode_vcs_repo.alias == 'svn':
1002 if self.rhodecode_vcs_repo.alias == 'svn':
1003 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1003 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1004 f_path, self.rhodecode_vcs_repo)
1004 f_path, self.rhodecode_vcs_repo)
1005 if adjusted_f_path != f_path:
1005 if adjusted_f_path != f_path:
1006 log.debug(
1006 log.debug(
1007 'Recognized svn tag or branch in file "%s", using svn '
1007 'Recognized svn tag or branch in file "%s", using svn '
1008 'specific symbolic references', f_path)
1008 'specific symbolic references', f_path)
1009 f_path = adjusted_f_path
1009 f_path = adjusted_f_path
1010 symbolic_reference = self._symbolic_reference_svn
1010 symbolic_reference = self._symbolic_reference_svn
1011
1011
1012 branches = self._create_references(
1012 branches = self._create_references(
1013 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1013 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1014 branches_group = (branches, _("Branches"))
1014 branches_group = (branches, _("Branches"))
1015
1015
1016 tags = self._create_references(
1016 tags = self._create_references(
1017 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1017 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1018 tags_group = (tags, _("Tags"))
1018 tags_group = (tags, _("Tags"))
1019
1019
1020 history.append(branches_group)
1020 history.append(branches_group)
1021 history.append(tags_group)
1021 history.append(tags_group)
1022
1022
1023 return history, commits
1023 return history, commits
1024
1024
1025 @LoginRequired()
1025 @LoginRequired()
1026 @HasRepoPermissionAnyDecorator(
1026 @HasRepoPermissionAnyDecorator(
1027 'repository.read', 'repository.write', 'repository.admin')
1027 'repository.read', 'repository.write', 'repository.admin')
1028 def repo_file_history(self):
1028 def repo_file_history(self):
1029 self.load_default_context()
1029 self.load_default_context()
1030
1030
1031 commit_id, f_path = self._get_commit_and_path()
1031 commit_id, f_path = self._get_commit_and_path()
1032 commit = self._get_commit_or_redirect(commit_id)
1032 commit = self._get_commit_or_redirect(commit_id)
1033 file_node = self._get_filenode_or_redirect(commit, f_path)
1033 file_node = self._get_filenode_or_redirect(commit, f_path)
1034
1034
1035 if file_node.is_file():
1035 if file_node.is_file():
1036 file_history, _hist = self._get_node_history(commit, f_path)
1036 file_history, _hist = self._get_node_history(commit, f_path)
1037
1037
1038 res = []
1038 res = []
1039 for section_items, section in file_history:
1039 for section_items, section in file_history:
1040 items = []
1040 items = []
1041 for obj_id, obj_text, obj_type in section_items:
1041 for obj_id, obj_text, obj_type in section_items:
1042 at_rev = ''
1042 at_rev = ''
1043 if obj_type in ['branch', 'bookmark', 'tag']:
1043 if obj_type in ['branch', 'bookmark', 'tag']:
1044 at_rev = obj_text
1044 at_rev = obj_text
1045 entry = {
1045 entry = {
1046 'id': obj_id,
1046 'id': obj_id,
1047 'text': obj_text,
1047 'text': obj_text,
1048 'type': obj_type,
1048 'type': obj_type,
1049 'at_rev': at_rev
1049 'at_rev': at_rev
1050 }
1050 }
1051
1051
1052 items.append(entry)
1052 items.append(entry)
1053
1053
1054 res.append({
1054 res.append({
1055 'text': section,
1055 'text': section,
1056 'children': items
1056 'children': items
1057 })
1057 })
1058
1058
1059 data = {
1059 data = {
1060 'more': False,
1060 'more': False,
1061 'results': res
1061 'results': res
1062 }
1062 }
1063 return data
1063 return data
1064
1064
1065 log.warning('Cannot fetch history for directory')
1065 log.warning('Cannot fetch history for directory')
1066 raise HTTPBadRequest()
1066 raise HTTPBadRequest()
1067
1067
1068 @LoginRequired()
1068 @LoginRequired()
1069 @HasRepoPermissionAnyDecorator(
1069 @HasRepoPermissionAnyDecorator(
1070 'repository.read', 'repository.write', 'repository.admin')
1070 'repository.read', 'repository.write', 'repository.admin')
1071 def repo_file_authors(self):
1071 def repo_file_authors(self):
1072 c = self.load_default_context()
1072 c = self.load_default_context()
1073
1073
1074 commit_id, f_path = self._get_commit_and_path()
1074 commit_id, f_path = self._get_commit_and_path()
1075 commit = self._get_commit_or_redirect(commit_id)
1075 commit = self._get_commit_or_redirect(commit_id)
1076 file_node = self._get_filenode_or_redirect(commit, f_path)
1076 file_node = self._get_filenode_or_redirect(commit, f_path)
1077
1077
1078 if not file_node.is_file():
1078 if not file_node.is_file():
1079 raise HTTPBadRequest()
1079 raise HTTPBadRequest()
1080
1080
1081 c.file_last_commit = file_node.last_commit
1081 c.file_last_commit = file_node.last_commit
1082 if self.request.GET.get('annotate') == '1':
1082 if self.request.GET.get('annotate') == '1':
1083 # use _hist from annotation if annotation mode is on
1083 # use _hist from annotation if annotation mode is on
1084 commit_ids = set(x[1] for x in file_node.annotate)
1084 commit_ids = set(x[1] for x in file_node.annotate)
1085 _hist = (
1085 _hist = (
1086 self.rhodecode_vcs_repo.get_commit(commit_id)
1086 self.rhodecode_vcs_repo.get_commit(commit_id)
1087 for commit_id in commit_ids)
1087 for commit_id in commit_ids)
1088 else:
1088 else:
1089 _f_history, _hist = self._get_node_history(commit, f_path)
1089 _f_history, _hist = self._get_node_history(commit, f_path)
1090 c.file_author = False
1090 c.file_author = False
1091
1091
1092 unique = collections.OrderedDict()
1092 unique = collections.OrderedDict()
1093 for commit in _hist:
1093 for commit in _hist:
1094 author = commit.author
1094 author = commit.author
1095 if author not in unique:
1095 if author not in unique:
1096 unique[commit.author] = [
1096 unique[commit.author] = [
1097 h.email(author),
1097 h.email(author),
1098 h.person(author, 'username_or_name_or_email'),
1098 h.person(author, 'username_or_name_or_email'),
1099 1 # counter
1099 1 # counter
1100 ]
1100 ]
1101
1101
1102 else:
1102 else:
1103 # increase counter
1103 # increase counter
1104 unique[commit.author][2] += 1
1104 unique[commit.author][2] += 1
1105
1105
1106 c.authors = [val for val in unique.values()]
1106 c.authors = [val for val in unique.values()]
1107
1107
1108 return self._get_template_context(c)
1108 return self._get_template_context(c)
1109
1109
1110 @LoginRequired()
1110 @LoginRequired()
1111 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1111 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1112 def repo_files_check_head(self):
1112 def repo_files_check_head(self):
1113 self.load_default_context()
1113 self.load_default_context()
1114
1114
1115 commit_id, f_path = self._get_commit_and_path()
1115 commit_id, f_path = self._get_commit_and_path()
1116 _branch_name, _sha_commit_id, is_head = \
1116 _branch_name, _sha_commit_id, is_head = \
1117 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1117 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1118 landing_ref=self.db_repo.landing_ref_name)
1118 landing_ref=self.db_repo.landing_ref_name)
1119
1119
1120 new_path = self.request.POST.get('path')
1120 new_path = self.request.POST.get('path')
1121 operation = self.request.POST.get('operation')
1121 operation = self.request.POST.get('operation')
1122 path_exist = ''
1122 path_exist = ''
1123
1123
1124 if new_path and operation in ['create', 'upload']:
1124 if new_path and operation in ['create', 'upload']:
1125 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1125 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1126 try:
1126 try:
1127 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1127 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1128 # NOTE(dan): construct whole path without leading /
1128 # NOTE(dan): construct whole path without leading /
1129 file_node = commit_obj.get_node(new_f_path)
1129 file_node = commit_obj.get_node(new_f_path)
1130 if file_node is not None:
1130 if file_node is not None:
1131 path_exist = new_f_path
1131 path_exist = new_f_path
1132 except EmptyRepositoryError:
1132 except EmptyRepositoryError:
1133 pass
1133 pass
1134 except Exception:
1134 except Exception:
1135 pass
1135 pass
1136
1136
1137 return {
1137 return {
1138 'branch': _branch_name,
1138 'branch': _branch_name,
1139 'sha': _sha_commit_id,
1139 'sha': _sha_commit_id,
1140 'is_head': is_head,
1140 'is_head': is_head,
1141 'path_exists': path_exist
1141 'path_exists': path_exist
1142 }
1142 }
1143
1143
1144 @LoginRequired()
1144 @LoginRequired()
1145 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1145 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1146 def repo_files_remove_file(self):
1146 def repo_files_remove_file(self):
1147 _ = self.request.translate
1147 _ = self.request.translate
1148 c = self.load_default_context()
1148 c = self.load_default_context()
1149 commit_id, f_path = self._get_commit_and_path()
1149 commit_id, f_path = self._get_commit_and_path()
1150
1150
1151 self._ensure_not_locked()
1151 self._ensure_not_locked()
1152 _branch_name, _sha_commit_id, is_head = \
1152 _branch_name, _sha_commit_id, is_head = \
1153 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1153 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1154 landing_ref=self.db_repo.landing_ref_name)
1154 landing_ref=self.db_repo.landing_ref_name)
1155
1155
1156 self.forbid_non_head(is_head, f_path)
1156 self.forbid_non_head(is_head, f_path)
1157 self.check_branch_permission(_branch_name)
1157 self.check_branch_permission(_branch_name)
1158
1158
1159 c.commit = self._get_commit_or_redirect(commit_id)
1159 c.commit = self._get_commit_or_redirect(commit_id)
1160 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1160 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1161
1161
1162 c.default_message = _(
1162 c.default_message = _(
1163 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1163 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1164 c.f_path = f_path
1164 c.f_path = f_path
1165
1165
1166 return self._get_template_context(c)
1166 return self._get_template_context(c)
1167
1167
1168 @LoginRequired()
1168 @LoginRequired()
1169 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1169 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1170 @CSRFRequired()
1170 @CSRFRequired()
1171 def repo_files_delete_file(self):
1171 def repo_files_delete_file(self):
1172 _ = self.request.translate
1172 _ = self.request.translate
1173
1173
1174 c = self.load_default_context()
1174 c = self.load_default_context()
1175 commit_id, f_path = self._get_commit_and_path()
1175 commit_id, f_path = self._get_commit_and_path()
1176
1176
1177 self._ensure_not_locked()
1177 self._ensure_not_locked()
1178 _branch_name, _sha_commit_id, is_head = \
1178 _branch_name, _sha_commit_id, is_head = \
1179 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1179 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1180 landing_ref=self.db_repo.landing_ref_name)
1180 landing_ref=self.db_repo.landing_ref_name)
1181
1181
1182 self.forbid_non_head(is_head, f_path)
1182 self.forbid_non_head(is_head, f_path)
1183 self.check_branch_permission(_branch_name)
1183 self.check_branch_permission(_branch_name)
1184
1184
1185 c.commit = self._get_commit_or_redirect(commit_id)
1185 c.commit = self._get_commit_or_redirect(commit_id)
1186 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1186 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1187
1187
1188 c.default_message = _(
1188 c.default_message = _(
1189 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1189 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1190 c.f_path = f_path
1190 c.f_path = f_path
1191 node_path = f_path
1191 node_path = f_path
1192 author = self._rhodecode_db_user.full_contact
1192 author = self._rhodecode_db_user.full_contact
1193 message = self.request.POST.get('message') or c.default_message
1193 message = self.request.POST.get('message') or c.default_message
1194 try:
1194 try:
1195 nodes = {
1195 nodes = {
1196 node_path: {
1196 node_path: {
1197 'content': ''
1197 'content': ''
1198 }
1198 }
1199 }
1199 }
1200 ScmModel().delete_nodes(
1200 ScmModel().delete_nodes(
1201 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1201 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1202 message=message,
1202 message=message,
1203 nodes=nodes,
1203 nodes=nodes,
1204 parent_commit=c.commit,
1204 parent_commit=c.commit,
1205 author=author,
1205 author=author,
1206 )
1206 )
1207
1207
1208 h.flash(
1208 h.flash(
1209 _('Successfully deleted file `{}`').format(
1209 _('Successfully deleted file `{}`').format(
1210 h.escape(f_path)), category='success')
1210 h.escape(f_path)), category='success')
1211 except Exception:
1211 except Exception:
1212 log.exception('Error during commit operation')
1212 log.exception('Error during commit operation')
1213 h.flash(_('Error occurred during commit'), category='error')
1213 h.flash(_('Error occurred during commit'), category='error')
1214 raise HTTPFound(
1214 raise HTTPFound(
1215 h.route_path('repo_commit', repo_name=self.db_repo_name,
1215 h.route_path('repo_commit', repo_name=self.db_repo_name,
1216 commit_id='tip'))
1216 commit_id='tip'))
1217
1217
1218 @LoginRequired()
1218 @LoginRequired()
1219 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1219 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1220 def repo_files_edit_file(self):
1220 def repo_files_edit_file(self):
1221 _ = self.request.translate
1221 _ = self.request.translate
1222 c = self.load_default_context()
1222 c = self.load_default_context()
1223 commit_id, f_path = self._get_commit_and_path()
1223 commit_id, f_path = self._get_commit_and_path()
1224
1224
1225 self._ensure_not_locked()
1225 self._ensure_not_locked()
1226 _branch_name, _sha_commit_id, is_head = \
1226 _branch_name, _sha_commit_id, is_head = \
1227 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1227 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1228 landing_ref=self.db_repo.landing_ref_name)
1228 landing_ref=self.db_repo.landing_ref_name)
1229
1229
1230 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1230 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1231 self.check_branch_permission(_branch_name, commit_id=commit_id)
1231 self.check_branch_permission(_branch_name, commit_id=commit_id)
1232
1232
1233 c.commit = self._get_commit_or_redirect(commit_id)
1233 c.commit = self._get_commit_or_redirect(commit_id)
1234 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1234 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1235
1235
1236 if c.file.is_binary:
1236 if c.file.is_binary:
1237 files_url = h.route_path(
1237 files_url = h.route_path(
1238 'repo_files',
1238 'repo_files',
1239 repo_name=self.db_repo_name,
1239 repo_name=self.db_repo_name,
1240 commit_id=c.commit.raw_id, f_path=f_path)
1240 commit_id=c.commit.raw_id, f_path=f_path)
1241 raise HTTPFound(files_url)
1241 raise HTTPFound(files_url)
1242
1242
1243 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1243 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1244 c.f_path = f_path
1244 c.f_path = f_path
1245
1245
1246 return self._get_template_context(c)
1246 return self._get_template_context(c)
1247
1247
1248 @LoginRequired()
1248 @LoginRequired()
1249 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1249 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1250 @CSRFRequired()
1250 @CSRFRequired()
1251 def repo_files_update_file(self):
1251 def repo_files_update_file(self):
1252 _ = self.request.translate
1252 _ = self.request.translate
1253 c = self.load_default_context()
1253 c = self.load_default_context()
1254 commit_id, f_path = self._get_commit_and_path()
1254 commit_id, f_path = self._get_commit_and_path()
1255
1255
1256 self._ensure_not_locked()
1256 self._ensure_not_locked()
1257
1257
1258 c.commit = self._get_commit_or_redirect(commit_id)
1258 c.commit = self._get_commit_or_redirect(commit_id)
1259 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1259 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1260
1260
1261 if c.file.is_binary:
1261 if c.file.is_binary:
1262 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1262 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1263 commit_id=c.commit.raw_id, f_path=f_path))
1263 commit_id=c.commit.raw_id, f_path=f_path))
1264
1264
1265 _branch_name, _sha_commit_id, is_head = \
1265 _branch_name, _sha_commit_id, is_head = \
1266 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1266 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1267 landing_ref=self.db_repo.landing_ref_name)
1267 landing_ref=self.db_repo.landing_ref_name)
1268
1268
1269 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1269 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1270 self.check_branch_permission(_branch_name, commit_id=commit_id)
1270 self.check_branch_permission(_branch_name, commit_id=commit_id)
1271
1271
1272 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1272 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1273 c.f_path = f_path
1273 c.f_path = f_path
1274
1274
1275 old_content = c.file.content
1275 old_content = c.file.content
1276 sl = old_content.splitlines(1)
1276 sl = old_content.splitlines(1)
1277 first_line = sl[0] if sl else ''
1277 first_line = sl[0] if sl else ''
1278
1278
1279 r_post = self.request.POST
1279 r_post = self.request.POST
1280 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1280 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1281 line_ending_mode = detect_mode(first_line, 0)
1281 line_ending_mode = detect_mode(first_line, 0)
1282 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1282 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1283
1283
1284 message = r_post.get('message') or c.default_message
1284 message = r_post.get('message') or c.default_message
1285 org_node_path = c.file.unicode_path
1285 org_node_path = c.file.unicode_path
1286 filename = r_post['filename']
1286 filename = r_post['filename']
1287
1287
1288 root_path = c.file.dir_path
1288 root_path = c.file.dir_path
1289 pure_path = self.create_pure_path(root_path, filename)
1289 pure_path = self.create_pure_path(root_path, filename)
1290 node_path = safe_unicode(bytes(pure_path))
1290 node_path = safe_unicode(bytes(pure_path))
1291
1291
1292 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1292 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1293 commit_id=commit_id)
1293 commit_id=commit_id)
1294 if content == old_content and node_path == org_node_path:
1294 if content == old_content and node_path == org_node_path:
1295 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1295 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1296 category='warning')
1296 category='warning')
1297 raise HTTPFound(default_redirect_url)
1297 raise HTTPFound(default_redirect_url)
1298
1298
1299 try:
1299 try:
1300 mapping = {
1300 mapping = {
1301 org_node_path: {
1301 org_node_path: {
1302 'org_filename': org_node_path,
1302 'org_filename': org_node_path,
1303 'filename': node_path,
1303 'filename': node_path,
1304 'content': content,
1304 'content': content,
1305 'lexer': '',
1305 'lexer': '',
1306 'op': 'mod',
1306 'op': 'mod',
1307 'mode': c.file.mode
1307 'mode': c.file.mode
1308 }
1308 }
1309 }
1309 }
1310
1310
1311 commit = ScmModel().update_nodes(
1311 commit = ScmModel().update_nodes(
1312 user=self._rhodecode_db_user.user_id,
1312 user=self._rhodecode_db_user.user_id,
1313 repo=self.db_repo,
1313 repo=self.db_repo,
1314 message=message,
1314 message=message,
1315 nodes=mapping,
1315 nodes=mapping,
1316 parent_commit=c.commit,
1316 parent_commit=c.commit,
1317 )
1317 )
1318
1318
1319 h.flash(_('Successfully committed changes to file `{}`').format(
1319 h.flash(_('Successfully committed changes to file `{}`').format(
1320 h.escape(f_path)), category='success')
1320 h.escape(f_path)), category='success')
1321 default_redirect_url = h.route_path(
1321 default_redirect_url = h.route_path(
1322 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1322 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1323
1323
1324 except Exception:
1324 except Exception:
1325 log.exception('Error occurred during commit')
1325 log.exception('Error occurred during commit')
1326 h.flash(_('Error occurred during commit'), category='error')
1326 h.flash(_('Error occurred during commit'), category='error')
1327
1327
1328 raise HTTPFound(default_redirect_url)
1328 raise HTTPFound(default_redirect_url)
1329
1329
1330 @LoginRequired()
1330 @LoginRequired()
1331 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1331 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1332 def repo_files_add_file(self):
1332 def repo_files_add_file(self):
1333 _ = self.request.translate
1333 _ = self.request.translate
1334 c = self.load_default_context()
1334 c = self.load_default_context()
1335 commit_id, f_path = self._get_commit_and_path()
1335 commit_id, f_path = self._get_commit_and_path()
1336
1336
1337 self._ensure_not_locked()
1337 self._ensure_not_locked()
1338
1338
1339 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1339 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1340 if c.commit is None:
1340 if c.commit is None:
1341 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1341 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1342
1342
1343 if self.rhodecode_vcs_repo.is_empty():
1343 if self.rhodecode_vcs_repo.is_empty():
1344 # for empty repository we cannot check for current branch, we rely on
1344 # for empty repository we cannot check for current branch, we rely on
1345 # c.commit.branch instead
1345 # c.commit.branch instead
1346 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1346 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1347 else:
1347 else:
1348 _branch_name, _sha_commit_id, is_head = \
1348 _branch_name, _sha_commit_id, is_head = \
1349 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1349 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1350 landing_ref=self.db_repo.landing_ref_name)
1350 landing_ref=self.db_repo.landing_ref_name)
1351
1351
1352 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1352 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1353 self.check_branch_permission(_branch_name, commit_id=commit_id)
1353 self.check_branch_permission(_branch_name, commit_id=commit_id)
1354
1354
1355 c.default_message = (_('Added file via RhodeCode Enterprise'))
1355 c.default_message = (_('Added file via RhodeCode Enterprise'))
1356 c.f_path = f_path.lstrip('/') # ensure not relative path
1356 c.f_path = f_path.lstrip('/') # ensure not relative path
1357
1357
1358 return self._get_template_context(c)
1358 return self._get_template_context(c)
1359
1359
1360 @LoginRequired()
1360 @LoginRequired()
1361 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1361 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1362 @CSRFRequired()
1362 @CSRFRequired()
1363 def repo_files_create_file(self):
1363 def repo_files_create_file(self):
1364 _ = self.request.translate
1364 _ = self.request.translate
1365 c = self.load_default_context()
1365 c = self.load_default_context()
1366 commit_id, f_path = self._get_commit_and_path()
1366 commit_id, f_path = self._get_commit_and_path()
1367
1367
1368 self._ensure_not_locked()
1368 self._ensure_not_locked()
1369
1369
1370 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1370 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1371 if c.commit is None:
1371 if c.commit is None:
1372 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1372 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1373
1373
1374 # calculate redirect URL
1374 # calculate redirect URL
1375 if self.rhodecode_vcs_repo.is_empty():
1375 if self.rhodecode_vcs_repo.is_empty():
1376 default_redirect_url = h.route_path(
1376 default_redirect_url = h.route_path(
1377 'repo_summary', repo_name=self.db_repo_name)
1377 'repo_summary', repo_name=self.db_repo_name)
1378 else:
1378 else:
1379 default_redirect_url = h.route_path(
1379 default_redirect_url = h.route_path(
1380 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1380 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1381
1381
1382 if self.rhodecode_vcs_repo.is_empty():
1382 if self.rhodecode_vcs_repo.is_empty():
1383 # for empty repository we cannot check for current branch, we rely on
1383 # for empty repository we cannot check for current branch, we rely on
1384 # c.commit.branch instead
1384 # c.commit.branch instead
1385 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1385 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1386 else:
1386 else:
1387 _branch_name, _sha_commit_id, is_head = \
1387 _branch_name, _sha_commit_id, is_head = \
1388 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1388 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1389 landing_ref=self.db_repo.landing_ref_name)
1389 landing_ref=self.db_repo.landing_ref_name)
1390
1390
1391 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1391 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1392 self.check_branch_permission(_branch_name, commit_id=commit_id)
1392 self.check_branch_permission(_branch_name, commit_id=commit_id)
1393
1393
1394 c.default_message = (_('Added file via RhodeCode Enterprise'))
1394 c.default_message = (_('Added file via RhodeCode Enterprise'))
1395 c.f_path = f_path
1395 c.f_path = f_path
1396
1396
1397 r_post = self.request.POST
1397 r_post = self.request.POST
1398 message = r_post.get('message') or c.default_message
1398 message = r_post.get('message') or c.default_message
1399 filename = r_post.get('filename')
1399 filename = r_post.get('filename')
1400 unix_mode = 0
1400 unix_mode = 0
1401 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1401 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1402
1402
1403 if not filename:
1403 if not filename:
1404 # If there's no commit, redirect to repo summary
1404 # If there's no commit, redirect to repo summary
1405 if type(c.commit) is EmptyCommit:
1405 if type(c.commit) is EmptyCommit:
1406 redirect_url = h.route_path(
1406 redirect_url = h.route_path(
1407 'repo_summary', repo_name=self.db_repo_name)
1407 'repo_summary', repo_name=self.db_repo_name)
1408 else:
1408 else:
1409 redirect_url = default_redirect_url
1409 redirect_url = default_redirect_url
1410 h.flash(_('No filename specified'), category='warning')
1410 h.flash(_('No filename specified'), category='warning')
1411 raise HTTPFound(redirect_url)
1411 raise HTTPFound(redirect_url)
1412
1412
1413 root_path = f_path
1413 root_path = f_path
1414 pure_path = self.create_pure_path(root_path, filename)
1414 pure_path = self.create_pure_path(root_path, filename)
1415 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1415 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1416
1416
1417 author = self._rhodecode_db_user.full_contact
1417 author = self._rhodecode_db_user.full_contact
1418 nodes = {
1418 nodes = {
1419 node_path: {
1419 node_path: {
1420 'content': content
1420 'content': content
1421 }
1421 }
1422 }
1422 }
1423
1423
1424 try:
1424 try:
1425
1425
1426 commit = ScmModel().create_nodes(
1426 commit = ScmModel().create_nodes(
1427 user=self._rhodecode_db_user.user_id,
1427 user=self._rhodecode_db_user.user_id,
1428 repo=self.db_repo,
1428 repo=self.db_repo,
1429 message=message,
1429 message=message,
1430 nodes=nodes,
1430 nodes=nodes,
1431 parent_commit=c.commit,
1431 parent_commit=c.commit,
1432 author=author,
1432 author=author,
1433 )
1433 )
1434
1434
1435 h.flash(_('Successfully committed new file `{}`').format(
1435 h.flash(_('Successfully committed new file `{}`').format(
1436 h.escape(node_path)), category='success')
1436 h.escape(node_path)), category='success')
1437
1437
1438 default_redirect_url = h.route_path(
1438 default_redirect_url = h.route_path(
1439 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1439 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1440
1440
1441 except NonRelativePathError:
1441 except NonRelativePathError:
1442 log.exception('Non Relative path found')
1442 log.exception('Non Relative path found')
1443 h.flash(_('The location specified must be a relative path and must not '
1443 h.flash(_('The location specified must be a relative path and must not '
1444 'contain .. in the path'), category='warning')
1444 'contain .. in the path'), category='warning')
1445 raise HTTPFound(default_redirect_url)
1445 raise HTTPFound(default_redirect_url)
1446 except (NodeError, NodeAlreadyExistsError) as e:
1446 except (NodeError, NodeAlreadyExistsError) as e:
1447 h.flash(_(h.escape(e)), category='error')
1447 h.flash(h.escape(safe_str(e)), category='error')
1448 except Exception:
1448 except Exception:
1449 log.exception('Error occurred during commit')
1449 log.exception('Error occurred during commit')
1450 h.flash(_('Error occurred during commit'), category='error')
1450 h.flash(_('Error occurred during commit'), category='error')
1451
1451
1452 raise HTTPFound(default_redirect_url)
1452 raise HTTPFound(default_redirect_url)
1453
1453
1454 @LoginRequired()
1454 @LoginRequired()
1455 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1455 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1456 @CSRFRequired()
1456 @CSRFRequired()
1457 def repo_files_upload_file(self):
1457 def repo_files_upload_file(self):
1458 _ = self.request.translate
1458 _ = self.request.translate
1459 c = self.load_default_context()
1459 c = self.load_default_context()
1460 commit_id, f_path = self._get_commit_and_path()
1460 commit_id, f_path = self._get_commit_and_path()
1461
1461
1462 self._ensure_not_locked()
1462 self._ensure_not_locked()
1463
1463
1464 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1464 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1465 if c.commit is None:
1465 if c.commit is None:
1466 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1466 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1467
1467
1468 # calculate redirect URL
1468 # calculate redirect URL
1469 if self.rhodecode_vcs_repo.is_empty():
1469 if self.rhodecode_vcs_repo.is_empty():
1470 default_redirect_url = h.route_path(
1470 default_redirect_url = h.route_path(
1471 'repo_summary', repo_name=self.db_repo_name)
1471 'repo_summary', repo_name=self.db_repo_name)
1472 else:
1472 else:
1473 default_redirect_url = h.route_path(
1473 default_redirect_url = h.route_path(
1474 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1474 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1475
1475
1476 if self.rhodecode_vcs_repo.is_empty():
1476 if self.rhodecode_vcs_repo.is_empty():
1477 # for empty repository we cannot check for current branch, we rely on
1477 # for empty repository we cannot check for current branch, we rely on
1478 # c.commit.branch instead
1478 # c.commit.branch instead
1479 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1479 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1480 else:
1480 else:
1481 _branch_name, _sha_commit_id, is_head = \
1481 _branch_name, _sha_commit_id, is_head = \
1482 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1482 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1483 landing_ref=self.db_repo.landing_ref_name)
1483 landing_ref=self.db_repo.landing_ref_name)
1484
1484
1485 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1485 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1486 if error:
1486 if error:
1487 return {
1487 return {
1488 'error': error,
1488 'error': error,
1489 'redirect_url': default_redirect_url
1489 'redirect_url': default_redirect_url
1490 }
1490 }
1491 error = self.check_branch_permission(_branch_name, json_mode=True)
1491 error = self.check_branch_permission(_branch_name, json_mode=True)
1492 if error:
1492 if error:
1493 return {
1493 return {
1494 'error': error,
1494 'error': error,
1495 'redirect_url': default_redirect_url
1495 'redirect_url': default_redirect_url
1496 }
1496 }
1497
1497
1498 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1498 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1499 c.f_path = f_path
1499 c.f_path = f_path
1500
1500
1501 r_post = self.request.POST
1501 r_post = self.request.POST
1502
1502
1503 message = c.default_message
1503 message = c.default_message
1504 user_message = r_post.getall('message')
1504 user_message = r_post.getall('message')
1505 if isinstance(user_message, list) and user_message:
1505 if isinstance(user_message, list) and user_message:
1506 # we take the first from duplicated results if it's not empty
1506 # we take the first from duplicated results if it's not empty
1507 message = user_message[0] if user_message[0] else message
1507 message = user_message[0] if user_message[0] else message
1508
1508
1509 nodes = {}
1509 nodes = {}
1510
1510
1511 for file_obj in r_post.getall('files_upload') or []:
1511 for file_obj in r_post.getall('files_upload') or []:
1512 content = file_obj.file
1512 content = file_obj.file
1513 filename = file_obj.filename
1513 filename = file_obj.filename
1514
1514
1515 root_path = f_path
1515 root_path = f_path
1516 pure_path = self.create_pure_path(root_path, filename)
1516 pure_path = self.create_pure_path(root_path, filename)
1517 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1517 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1518
1518
1519 nodes[node_path] = {
1519 nodes[node_path] = {
1520 'content': content
1520 'content': content
1521 }
1521 }
1522
1522
1523 if not nodes:
1523 if not nodes:
1524 error = 'missing files'
1524 error = 'missing files'
1525 return {
1525 return {
1526 'error': error,
1526 'error': error,
1527 'redirect_url': default_redirect_url
1527 'redirect_url': default_redirect_url
1528 }
1528 }
1529
1529
1530 author = self._rhodecode_db_user.full_contact
1530 author = self._rhodecode_db_user.full_contact
1531
1531
1532 try:
1532 try:
1533 commit = ScmModel().create_nodes(
1533 commit = ScmModel().create_nodes(
1534 user=self._rhodecode_db_user.user_id,
1534 user=self._rhodecode_db_user.user_id,
1535 repo=self.db_repo,
1535 repo=self.db_repo,
1536 message=message,
1536 message=message,
1537 nodes=nodes,
1537 nodes=nodes,
1538 parent_commit=c.commit,
1538 parent_commit=c.commit,
1539 author=author,
1539 author=author,
1540 )
1540 )
1541 if len(nodes) == 1:
1541 if len(nodes) == 1:
1542 flash_message = _('Successfully committed {} new files').format(len(nodes))
1542 flash_message = _('Successfully committed {} new files').format(len(nodes))
1543 else:
1543 else:
1544 flash_message = _('Successfully committed 1 new file')
1544 flash_message = _('Successfully committed 1 new file')
1545
1545
1546 h.flash(flash_message, category='success')
1546 h.flash(flash_message, category='success')
1547
1547
1548 default_redirect_url = h.route_path(
1548 default_redirect_url = h.route_path(
1549 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1549 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1550
1550
1551 except NonRelativePathError:
1551 except NonRelativePathError:
1552 log.exception('Non Relative path found')
1552 log.exception('Non Relative path found')
1553 error = _('The location specified must be a relative path and must not '
1553 error = _('The location specified must be a relative path and must not '
1554 'contain .. in the path')
1554 'contain .. in the path')
1555 h.flash(error, category='warning')
1555 h.flash(error, category='warning')
1556
1556
1557 return {
1557 return {
1558 'error': error,
1558 'error': error,
1559 'redirect_url': default_redirect_url
1559 'redirect_url': default_redirect_url
1560 }
1560 }
1561 except (NodeError, NodeAlreadyExistsError) as e:
1561 except (NodeError, NodeAlreadyExistsError) as e:
1562 error = h.escape(e)
1562 error = h.escape(e)
1563 h.flash(error, category='error')
1563 h.flash(error, category='error')
1564
1564
1565 return {
1565 return {
1566 'error': error,
1566 'error': error,
1567 'redirect_url': default_redirect_url
1567 'redirect_url': default_redirect_url
1568 }
1568 }
1569 except Exception:
1569 except Exception:
1570 log.exception('Error occurred during commit')
1570 log.exception('Error occurred during commit')
1571 error = _('Error occurred during commit')
1571 error = _('Error occurred during commit')
1572 h.flash(error, category='error')
1572 h.flash(error, category='error')
1573 return {
1573 return {
1574 'error': error,
1574 'error': error,
1575 'redirect_url': default_redirect_url
1575 'redirect_url': default_redirect_url
1576 }
1576 }
1577
1577
1578 return {
1578 return {
1579 'error': None,
1579 'error': None,
1580 'redirect_url': default_redirect_url
1580 'redirect_url': default_redirect_url
1581 }
1581 }
@@ -1,389 +1,390 b''
1 import sys
1 import sys
2 import threading
2 import threading
3 import weakref
3 import weakref
4 from base64 import b64encode
4 from base64 import b64encode
5 from logging import getLogger
5 from logging import getLogger
6 from os import urandom
6 from os import urandom
7
7
8 from redis import StrictRedis
8 from redis import StrictRedis
9
9
10 __version__ = '3.7.0'
10 __version__ = '3.7.0'
11
11
12 loggers = {
12 loggers = {
13 k: getLogger("rhodecode" + ".".join((__name__, k)))
13 k: getLogger("rhodecode." + ".".join((__name__, k)))
14 for k in [
14 for k in [
15 "acquire",
15 "acquire",
16 "refresh.thread.start",
16 "refresh.thread.start",
17 "refresh.thread.stop",
17 "refresh.thread.stop",
18 "refresh.thread.exit",
18 "refresh.thread.exit",
19 "refresh.start",
19 "refresh.start",
20 "refresh.shutdown",
20 "refresh.shutdown",
21 "refresh.exit",
21 "refresh.exit",
22 "release",
22 "release",
23 ]
23 ]
24 }
24 }
25
25
26 PY3 = sys.version_info[0] == 3
26 PY3 = sys.version_info[0] == 3
27
27
28 if PY3:
28 if PY3:
29 text_type = str
29 text_type = str
30 binary_type = bytes
30 binary_type = bytes
31 else:
31 else:
32 text_type = unicode # noqa
32 text_type = unicode # noqa
33 binary_type = str
33 binary_type = str
34
34
35
35
36 # Check if the id match. If not, return an error code.
36 # Check if the id match. If not, return an error code.
37 UNLOCK_SCRIPT = b"""
37 UNLOCK_SCRIPT = b"""
38 if redis.call("get", KEYS[1]) ~= ARGV[1] then
38 if redis.call("get", KEYS[1]) ~= ARGV[1] then
39 return 1
39 return 1
40 else
40 else
41 redis.call("del", KEYS[2])
41 redis.call("del", KEYS[2])
42 redis.call("lpush", KEYS[2], 1)
42 redis.call("lpush", KEYS[2], 1)
43 redis.call("pexpire", KEYS[2], ARGV[2])
43 redis.call("pexpire", KEYS[2], ARGV[2])
44 redis.call("del", KEYS[1])
44 redis.call("del", KEYS[1])
45 return 0
45 return 0
46 end
46 end
47 """
47 """
48
48
49 # Covers both cases when key doesn't exist and doesn't equal to lock's id
49 # Covers both cases when key doesn't exist and doesn't equal to lock's id
50 EXTEND_SCRIPT = b"""
50 EXTEND_SCRIPT = b"""
51 if redis.call("get", KEYS[1]) ~= ARGV[1] then
51 if redis.call("get", KEYS[1]) ~= ARGV[1] then
52 return 1
52 return 1
53 elseif redis.call("ttl", KEYS[1]) < 0 then
53 elseif redis.call("ttl", KEYS[1]) < 0 then
54 return 2
54 return 2
55 else
55 else
56 redis.call("expire", KEYS[1], ARGV[2])
56 redis.call("expire", KEYS[1], ARGV[2])
57 return 0
57 return 0
58 end
58 end
59 """
59 """
60
60
61 RESET_SCRIPT = b"""
61 RESET_SCRIPT = b"""
62 redis.call('del', KEYS[2])
62 redis.call('del', KEYS[2])
63 redis.call('lpush', KEYS[2], 1)
63 redis.call('lpush', KEYS[2], 1)
64 redis.call('pexpire', KEYS[2], ARGV[2])
64 redis.call('pexpire', KEYS[2], ARGV[2])
65 return redis.call('del', KEYS[1])
65 return redis.call('del', KEYS[1])
66 """
66 """
67
67
68 RESET_ALL_SCRIPT = b"""
68 RESET_ALL_SCRIPT = b"""
69 local locks = redis.call('keys', 'lock:*')
69 local locks = redis.call('keys', 'lock:*')
70 local signal
70 local signal
71 for _, lock in pairs(locks) do
71 for _, lock in pairs(locks) do
72 signal = 'lock-signal:' .. string.sub(lock, 6)
72 signal = 'lock-signal:' .. string.sub(lock, 6)
73 redis.call('del', signal)
73 redis.call('del', signal)
74 redis.call('lpush', signal, 1)
74 redis.call('lpush', signal, 1)
75 redis.call('expire', signal, 1)
75 redis.call('expire', signal, 1)
76 redis.call('del', lock)
76 redis.call('del', lock)
77 end
77 end
78 return #locks
78 return #locks
79 """
79 """
80
80
81
81
82 class AlreadyAcquired(RuntimeError):
82 class AlreadyAcquired(RuntimeError):
83 pass
83 pass
84
84
85
85
86 class NotAcquired(RuntimeError):
86 class NotAcquired(RuntimeError):
87 pass
87 pass
88
88
89
89
90 class AlreadyStarted(RuntimeError):
90 class AlreadyStarted(RuntimeError):
91 pass
91 pass
92
92
93
93
94 class TimeoutNotUsable(RuntimeError):
94 class TimeoutNotUsable(RuntimeError):
95 pass
95 pass
96
96
97
97
98 class InvalidTimeout(RuntimeError):
98 class InvalidTimeout(RuntimeError):
99 pass
99 pass
100
100
101
101
102 class TimeoutTooLarge(RuntimeError):
102 class TimeoutTooLarge(RuntimeError):
103 pass
103 pass
104
104
105
105
106 class NotExpirable(RuntimeError):
106 class NotExpirable(RuntimeError):
107 pass
107 pass
108
108
109
109
110 class Lock(object):
110 class Lock(object):
111 """
111 """
112 A Lock context manager implemented via redis SETNX/BLPOP.
112 A Lock context manager implemented via redis SETNX/BLPOP.
113 """
113 """
114 unlock_script = None
114 unlock_script = None
115 extend_script = None
115 extend_script = None
116 reset_script = None
116 reset_script = None
117 reset_all_script = None
117 reset_all_script = None
118
118
119 def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
119 def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
120 """
120 """
121 :param redis_client:
121 :param redis_client:
122 An instance of :class:`~StrictRedis`.
122 An instance of :class:`~StrictRedis`.
123 :param name:
123 :param name:
124 The name (redis key) the lock should have.
124 The name (redis key) the lock should have.
125 :param expire:
125 :param expire:
126 The lock expiry time in seconds. If left at the default (None)
126 The lock expiry time in seconds. If left at the default (None)
127 the lock will not expire.
127 the lock will not expire.
128 :param id:
128 :param id:
129 The ID (redis value) the lock should have. A random value is
129 The ID (redis value) the lock should have. A random value is
130 generated when left at the default.
130 generated when left at the default.
131
131
132 Note that if you specify this then the lock is marked as "held". Acquires
132 Note that if you specify this then the lock is marked as "held". Acquires
133 won't be possible.
133 won't be possible.
134 :param auto_renewal:
134 :param auto_renewal:
135 If set to ``True``, Lock will automatically renew the lock so that it
135 If set to ``True``, Lock will automatically renew the lock so that it
136 doesn't expire for as long as the lock is held (acquire() called
136 doesn't expire for as long as the lock is held (acquire() called
137 or running in a context manager).
137 or running in a context manager).
138
138
139 Implementation note: Renewal will happen using a daemon thread with
139 Implementation note: Renewal will happen using a daemon thread with
140 an interval of ``expire*2/3``. If wishing to use a different renewal
140 an interval of ``expire*2/3``. If wishing to use a different renewal
141 time, subclass Lock, call ``super().__init__()`` then set
141 time, subclass Lock, call ``super().__init__()`` then set
142 ``self._lock_renewal_interval`` to your desired interval.
142 ``self._lock_renewal_interval`` to your desired interval.
143 :param strict:
143 :param strict:
144 If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
144 If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
145 :param signal_expire:
145 :param signal_expire:
146 Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
146 Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
147 """
147 """
148 if strict and not isinstance(redis_client, StrictRedis):
148 if strict and not isinstance(redis_client, StrictRedis):
149 raise ValueError("redis_client must be instance of StrictRedis. "
149 raise ValueError("redis_client must be instance of StrictRedis. "
150 "Use strict=False if you know what you're doing.")
150 "Use strict=False if you know what you're doing.")
151 if auto_renewal and expire is None:
151 if auto_renewal and expire is None:
152 raise ValueError("Expire may not be None when auto_renewal is set")
152 raise ValueError("Expire may not be None when auto_renewal is set")
153
153
154 self._client = redis_client
154 self._client = redis_client
155
155
156 if expire:
156 if expire:
157 expire = int(expire)
157 expire = int(expire)
158 if expire < 0:
158 if expire < 0:
159 raise ValueError("A negative expire is not acceptable.")
159 raise ValueError("A negative expire is not acceptable.")
160 else:
160 else:
161 expire = None
161 expire = None
162 self._expire = expire
162 self._expire = expire
163
163
164 self._signal_expire = signal_expire
164 self._signal_expire = signal_expire
165 if id is None:
165 if id is None:
166 self._id = b64encode(urandom(18)).decode('ascii')
166 self._id = b64encode(urandom(18)).decode('ascii')
167 elif isinstance(id, binary_type):
167 elif isinstance(id, binary_type):
168 try:
168 try:
169 self._id = id.decode('ascii')
169 self._id = id.decode('ascii')
170 except UnicodeDecodeError:
170 except UnicodeDecodeError:
171 self._id = b64encode(id).decode('ascii')
171 self._id = b64encode(id).decode('ascii')
172 elif isinstance(id, text_type):
172 elif isinstance(id, text_type):
173 self._id = id
173 self._id = id
174 else:
174 else:
175 raise TypeError("Incorrect type for `id`. Must be bytes/str not %s." % type(id))
175 raise TypeError("Incorrect type for `id`. Must be bytes/str not %s." % type(id))
176 self._name = 'lock:' + name
176 self._name = 'lock:' + name
177 self._signal = 'lock-signal:' + name
177 self._signal = 'lock-signal:' + name
178 self._lock_renewal_interval = (float(expire) * 2 / 3
178 self._lock_renewal_interval = (float(expire) * 2 / 3
179 if auto_renewal
179 if auto_renewal
180 else None)
180 else None)
181 self._lock_renewal_thread = None
181 self._lock_renewal_thread = None
182
182
183 self.register_scripts(redis_client)
183 self.register_scripts(redis_client)
184
184
185 @classmethod
185 @classmethod
186 def register_scripts(cls, redis_client):
186 def register_scripts(cls, redis_client):
187 global reset_all_script
187 global reset_all_script
188 if reset_all_script is None:
188 if reset_all_script is None:
189 reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
189 reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
190 cls.unlock_script = redis_client.register_script(UNLOCK_SCRIPT)
190 cls.unlock_script = redis_client.register_script(UNLOCK_SCRIPT)
191 cls.extend_script = redis_client.register_script(EXTEND_SCRIPT)
191 cls.extend_script = redis_client.register_script(EXTEND_SCRIPT)
192 cls.reset_script = redis_client.register_script(RESET_SCRIPT)
192 cls.reset_script = redis_client.register_script(RESET_SCRIPT)
193 cls.reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
193 cls.reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
194
194
195 @property
195 @property
196 def _held(self):
196 def _held(self):
197 return self.id == self.get_owner_id()
197 return self.id == self.get_owner_id()
198
198
199 def reset(self):
199 def reset(self):
200 """
200 """
201 Forcibly deletes the lock. Use this with care.
201 Forcibly deletes the lock. Use this with care.
202 """
202 """
203 self.reset_script(client=self._client, keys=(self._name, self._signal), args=(self.id, self._signal_expire))
203 self.reset_script(client=self._client, keys=(self._name, self._signal), args=(self.id, self._signal_expire))
204
204
205 @property
205 @property
206 def id(self):
206 def id(self):
207 return self._id
207 return self._id
208
208
209 def get_owner_id(self):
209 def get_owner_id(self):
210 owner_id = self._client.get(self._name)
210 owner_id = self._client.get(self._name)
211 if isinstance(owner_id, binary_type):
211 if isinstance(owner_id, binary_type):
212 owner_id = owner_id.decode('ascii', 'replace')
212 owner_id = owner_id.decode('ascii', 'replace')
213 return owner_id
213 return owner_id
214
214
215 def acquire(self, blocking=True, timeout=None):
215 def acquire(self, blocking=True, timeout=None):
216 """
216 """
217 :param blocking:
217 :param blocking:
218 Boolean value specifying whether lock should be blocking or not.
218 Boolean value specifying whether lock should be blocking or not.
219 :param timeout:
219 :param timeout:
220 An integer value specifying the maximum number of seconds to block.
220 An integer value specifying the maximum number of seconds to block.
221 """
221 """
222 logger = loggers["acquire"]
222 logger = loggers["acquire"]
223
223
224 logger.debug("Getting %r ...", self._name)
224 logger.debug("Getting acquire on %r ...", self._name)
225
225
226 if self._held:
226 if self._held:
227 raise AlreadyAcquired("Already acquired from this Lock instance.")
227 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
228
229
229 if not blocking and timeout is not None:
230 if not blocking and timeout is not None:
230 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
231
232
232 if timeout:
233 if timeout:
233 timeout = int(timeout)
234 timeout = int(timeout)
234 if timeout < 0:
235 if timeout < 0:
235 raise InvalidTimeout("Timeout (%d) cannot be less than or equal to 0" % timeout)
236 raise InvalidTimeout("Timeout (%d) cannot be less than or equal to 0" % timeout)
236
237
237 if self._expire and not self._lock_renewal_interval and timeout > self._expire:
238 if self._expire and not self._lock_renewal_interval and timeout > self._expire:
238 raise TimeoutTooLarge("Timeout (%d) cannot be greater than expire (%d)" % (timeout, self._expire))
239 raise TimeoutTooLarge("Timeout (%d) cannot be greater than expire (%d)" % (timeout, self._expire))
239
240
240 busy = True
241 busy = True
241 blpop_timeout = timeout or self._expire or 0
242 blpop_timeout = timeout or self._expire or 0
242 timed_out = False
243 timed_out = False
243 while busy:
244 while busy:
244 busy = not self._client.set(self._name, self._id, nx=True, ex=self._expire)
245 busy = not self._client.set(self._name, self._id, nx=True, ex=self._expire)
245 if busy:
246 if busy:
246 if timed_out:
247 if timed_out:
247 return False
248 return False
248 elif blocking:
249 elif blocking:
249 timed_out = not self._client.blpop(self._signal, blpop_timeout) and timeout
250 timed_out = not self._client.blpop(self._signal, blpop_timeout) and timeout
250 else:
251 else:
251 logger.warning("Failed to get %r.", self._name)
252 logger.warning("Failed to get %r.", self._name)
252 return False
253 return False
253
254
254 logger.info("Got lock for %r.", self._name)
255 logger.info("Got lock for %r.", self._name)
255 if self._lock_renewal_interval is not None:
256 if self._lock_renewal_interval is not None:
256 self._start_lock_renewer()
257 self._start_lock_renewer()
257 return True
258 return True
258
259
259 def extend(self, expire=None):
260 def extend(self, expire=None):
260 """Extends expiration time of the lock.
261 """Extends expiration time of the lock.
261
262
262 :param expire:
263 :param expire:
263 New expiration time. If ``None`` - `expire` provided during
264 New expiration time. If ``None`` - `expire` provided during
264 lock initialization will be taken.
265 lock initialization will be taken.
265 """
266 """
266 if expire:
267 if expire:
267 expire = int(expire)
268 expire = int(expire)
268 if expire < 0:
269 if expire < 0:
269 raise ValueError("A negative expire is not acceptable.")
270 raise ValueError("A negative expire is not acceptable.")
270 elif self._expire is not None:
271 elif self._expire is not None:
271 expire = self._expire
272 expire = self._expire
272 else:
273 else:
273 raise TypeError(
274 raise TypeError(
274 "To extend a lock 'expire' must be provided as an "
275 "To extend a lock 'expire' must be provided as an "
275 "argument to extend() method or at initialization time."
276 "argument to extend() method or at initialization time."
276 )
277 )
277
278
278 error = self.extend_script(client=self._client, keys=(self._name, self._signal), args=(self._id, expire))
279 error = self.extend_script(client=self._client, keys=(self._name, self._signal), args=(self._id, expire))
279 if error == 1:
280 if error == 1:
280 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
281 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
281 elif error == 2:
282 elif error == 2:
282 raise NotExpirable("Lock %s has no assigned expiration time" % self._name)
283 raise NotExpirable("Lock %s has no assigned expiration time" % self._name)
283 elif error:
284 elif error:
284 raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
285 raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
285
286
286 @staticmethod
287 @staticmethod
287 def _lock_renewer(lockref, interval, stop):
288 def _lock_renewer(lockref, interval, stop):
288 """
289 """
289 Renew the lock key in redis every `interval` seconds for as long
290 Renew the lock key in redis every `interval` seconds for as long
290 as `self._lock_renewal_thread.should_exit` is False.
291 as `self._lock_renewal_thread.should_exit` is False.
291 """
292 """
292 while not stop.wait(timeout=interval):
293 while not stop.wait(timeout=interval):
293 loggers["refresh.thread.start"].debug("Refreshing lock")
294 loggers["refresh.thread.start"].debug("Refreshing lock")
294 lock = lockref()
295 lock = lockref()
295 if lock is None:
296 if lock is None:
296 loggers["refresh.thread.stop"].debug(
297 loggers["refresh.thread.stop"].debug(
297 "The lock no longer exists, stopping lock refreshing"
298 "The lock no longer exists, stopping lock refreshing"
298 )
299 )
299 break
300 break
300 lock.extend(expire=lock._expire)
301 lock.extend(expire=lock._expire)
301 del lock
302 del lock
302 loggers["refresh.thread.exit"].debug("Exit requested, stopping lock refreshing")
303 loggers["refresh.thread.exit"].debug("Exit requested, stopping lock refreshing")
303
304
304 def _start_lock_renewer(self):
305 def _start_lock_renewer(self):
305 """
306 """
306 Starts the lock refresher thread.
307 Starts the lock refresher thread.
307 """
308 """
308 if self._lock_renewal_thread is not None:
309 if self._lock_renewal_thread is not None:
309 raise AlreadyStarted("Lock refresh thread already started")
310 raise AlreadyStarted("Lock refresh thread already started")
310
311
311 loggers["refresh.start"].debug(
312 loggers["refresh.start"].debug(
312 "Starting thread to refresh lock every %s seconds",
313 "Starting thread to refresh lock every %s seconds",
313 self._lock_renewal_interval
314 self._lock_renewal_interval
314 )
315 )
315 self._lock_renewal_stop = threading.Event()
316 self._lock_renewal_stop = threading.Event()
316 self._lock_renewal_thread = threading.Thread(
317 self._lock_renewal_thread = threading.Thread(
317 group=None,
318 group=None,
318 target=self._lock_renewer,
319 target=self._lock_renewer,
319 kwargs={'lockref': weakref.ref(self),
320 kwargs={'lockref': weakref.ref(self),
320 'interval': self._lock_renewal_interval,
321 'interval': self._lock_renewal_interval,
321 'stop': self._lock_renewal_stop}
322 'stop': self._lock_renewal_stop}
322 )
323 )
323 self._lock_renewal_thread.setDaemon(True)
324 self._lock_renewal_thread.setDaemon(True)
324 self._lock_renewal_thread.start()
325 self._lock_renewal_thread.start()
325
326
326 def _stop_lock_renewer(self):
327 def _stop_lock_renewer(self):
327 """
328 """
328 Stop the lock renewer.
329 Stop the lock renewer.
329
330
330 This signals the renewal thread and waits for its exit.
331 This signals the renewal thread and waits for its exit.
331 """
332 """
332 if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive():
333 if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive():
333 return
334 return
334 loggers["refresh.shutdown"].debug("Signalling the lock refresher to stop")
335 loggers["refresh.shutdown"].debug("Signalling the lock refresher to stop")
335 self._lock_renewal_stop.set()
336 self._lock_renewal_stop.set()
336 self._lock_renewal_thread.join()
337 self._lock_renewal_thread.join()
337 self._lock_renewal_thread = None
338 self._lock_renewal_thread = None
338 loggers["refresh.exit"].debug("Lock refresher has stopped")
339 loggers["refresh.exit"].debug("Lock refresher has stopped")
339
340
340 def __enter__(self):
341 def __enter__(self):
341 acquired = self.acquire(blocking=True)
342 acquired = self.acquire(blocking=True)
342 assert acquired, "Lock wasn't acquired, but blocking=True"
343 assert acquired, "Lock wasn't acquired, but blocking=True"
343 return self
344 return self
344
345
345 def __exit__(self, exc_type=None, exc_value=None, traceback=None):
346 def __exit__(self, exc_type=None, exc_value=None, traceback=None):
346 self.release()
347 self.release()
347
348
348 def release(self):
349 def release(self):
349 """Releases the lock, that was acquired with the same object.
350 """Releases the lock, that was acquired with the same object.
350
351
351 .. note::
352 .. note::
352
353
353 If you want to release a lock that you acquired in a different place you have two choices:
354 If you want to release a lock that you acquired in a different place you have two choices:
354
355
355 * Use ``Lock("name", id=id_from_other_place).release()``
356 * Use ``Lock("name", id=id_from_other_place).release()``
356 * Use ``Lock("name").reset()``
357 * Use ``Lock("name").reset()``
357 """
358 """
358 if self._lock_renewal_thread is not None:
359 if self._lock_renewal_thread is not None:
359 self._stop_lock_renewer()
360 self._stop_lock_renewer()
360 loggers["release"].debug("Releasing %r.", self._name)
361 loggers["release"].debug("Releasing %r.", self._name)
361 error = self.unlock_script(client=self._client, keys=(self._name, self._signal), args=(self._id, self._signal_expire))
362 error = self.unlock_script(client=self._client, keys=(self._name, self._signal), args=(self._id, self._signal_expire))
362 if error == 1:
363 if error == 1:
363 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
364 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
364 elif error:
365 elif error:
365 raise RuntimeError("Unsupported error code %s from EXTEND script." % error)
366 raise RuntimeError("Unsupported error code %s from EXTEND script." % error)
366
367
367 def locked(self):
368 def locked(self):
368 """
369 """
369 Return true if the lock is acquired.
370 Return true if the lock is acquired.
370
371
371 Checks that lock with same name already exists. This method returns true, even if
372 Checks that lock with same name already exists. This method returns true, even if
372 lock have another id.
373 lock have another id.
373 """
374 """
374 return self._client.exists(self._name) == 1
375 return self._client.exists(self._name) == 1
375
376
376
377
377 reset_all_script = None
378 reset_all_script = None
378
379
379
380
380 def reset_all(redis_client):
381 def reset_all(redis_client):
381 """
382 """
382 Forcibly deletes all locks if its remains (like a crash reason). Use this with care.
383 Forcibly deletes all locks if its remains (like a crash reason). Use this with care.
383
384
384 :param redis_client:
385 :param redis_client:
385 An instance of :class:`~StrictRedis`.
386 An instance of :class:`~StrictRedis`.
386 """
387 """
387 Lock.register_scripts(redis_client)
388 Lock.register_scripts(redis_client)
388
389
389 reset_all_script(client=redis_client) # noqa
390 reset_all_script(client=redis_client) # noqa
@@ -1,280 +1,283 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import gzip
21 import gzip
22 import shutil
22 import shutil
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import urlparse
25 import urlparse
26
26
27 from webob.exc import HTTPNotFound
27 from webob.exc import HTTPNotFound
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
32 from rhodecode.lib.middleware.simplehg import SimpleHg
32 from rhodecode.lib.middleware.simplehg import SimpleHg
33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
34 from rhodecode.model.settings import VcsSettingsModel
34 from rhodecode.model.settings import VcsSettingsModel
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38 VCS_TYPE_KEY = '_rc_vcs_type'
38 VCS_TYPE_KEY = '_rc_vcs_type'
39 VCS_TYPE_SKIP = '_rc_vcs_skip'
39 VCS_TYPE_SKIP = '_rc_vcs_skip'
40
40
41
41
42 def is_git(environ):
42 def is_git(environ):
43 """
43 """
44 Returns True if requests should be handled by GIT wsgi middleware
44 Returns True if requests should be handled by GIT wsgi middleware
45 """
45 """
46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
47 log.debug(
47 log.debug(
48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
49 is_git_path is not None)
49 is_git_path is not None)
50
50
51 return is_git_path
51 return is_git_path
52
52
53
53
54 def is_hg(environ):
54 def is_hg(environ):
55 """
55 """
56 Returns True if requests target is mercurial server - header
56 Returns True if requests target is mercurial server - header
57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
58 """
58 """
59 is_hg_path = False
59 is_hg_path = False
60
60
61 http_accept = environ.get('HTTP_ACCEPT')
61 http_accept = environ.get('HTTP_ACCEPT')
62
62
63 if http_accept and http_accept.startswith('application/mercurial'):
63 if http_accept and http_accept.startswith('application/mercurial'):
64 query = urlparse.parse_qs(environ['QUERY_STRING'])
64 query = urlparse.parse_qs(environ['QUERY_STRING'])
65 if 'cmd' in query:
65 if 'cmd' in query:
66 is_hg_path = True
66 is_hg_path = True
67
67
68 log.debug(
68 log.debug(
69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
70 is_hg_path)
70 is_hg_path)
71
71
72 return is_hg_path
72 return is_hg_path
73
73
74
74
75 def is_svn(environ):
75 def is_svn(environ):
76 """
76 """
77 Returns True if requests target is Subversion server
77 Returns True if requests target is Subversion server
78 """
78 """
79
79
80 http_dav = environ.get('HTTP_DAV', '')
80 http_dav = environ.get('HTTP_DAV', '')
81 magic_path_segment = rhodecode.CONFIG.get(
81 magic_path_segment = rhodecode.CONFIG.get(
82 'rhodecode_subversion_magic_path', '/!svn')
82 'rhodecode_subversion_magic_path', '/!svn')
83 is_svn_path = (
83 is_svn_path = (
84 'subversion' in http_dav or
84 'subversion' in http_dav or
85 magic_path_segment in environ['PATH_INFO']
85 magic_path_segment in environ['PATH_INFO']
86 or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH']
86 or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH']
87 )
87 )
88 log.debug(
88 log.debug(
89 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
89 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
90 is_svn_path)
90 is_svn_path)
91
91
92 return is_svn_path
92 return is_svn_path
93
93
94
94
95 class GunzipMiddleware(object):
95 class GunzipMiddleware(object):
96 """
96 """
97 WSGI middleware that unzips gzip-encoded requests before
97 WSGI middleware that unzips gzip-encoded requests before
98 passing on to the underlying application.
98 passing on to the underlying application.
99 """
99 """
100
100
101 def __init__(self, application):
101 def __init__(self, application):
102 self.app = application
102 self.app = application
103
103
104 def __call__(self, environ, start_response):
104 def __call__(self, environ, start_response):
105 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
105 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
106
106
107 if b'gzip' in accepts_encoding_header:
107 if b'gzip' in accepts_encoding_header:
108 log.debug('gzip detected, now running gunzip wrapper')
108 log.debug('gzip detected, now running gunzip wrapper')
109 wsgi_input = environ['wsgi.input']
109 wsgi_input = environ['wsgi.input']
110
110
111 if not hasattr(environ['wsgi.input'], 'seek'):
111 if not hasattr(environ['wsgi.input'], 'seek'):
112 # The gzip implementation in the standard library of Python 2.x
112 # The gzip implementation in the standard library of Python 2.x
113 # requires the '.seek()' and '.tell()' methods to be available
113 # requires the '.seek()' and '.tell()' methods to be available
114 # on the input stream. Read the data into a temporary file to
114 # on the input stream. Read the data into a temporary file to
115 # work around this limitation.
115 # work around this limitation.
116
116
117 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
117 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
118 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
118 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
119 wsgi_input.seek(0)
119 wsgi_input.seek(0)
120
120
121 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
121 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
122 # since we "Ungzipped" the content we say now it's no longer gzip
122 # since we "Ungzipped" the content we say now it's no longer gzip
123 # content encoding
123 # content encoding
124 del environ['HTTP_CONTENT_ENCODING']
124 del environ['HTTP_CONTENT_ENCODING']
125
125
126 # content length has changes ? or i'm not sure
126 # content length has changes ? or i'm not sure
127 if 'CONTENT_LENGTH' in environ:
127 if 'CONTENT_LENGTH' in environ:
128 del environ['CONTENT_LENGTH']
128 del environ['CONTENT_LENGTH']
129 else:
129 else:
130 log.debug('content not gzipped, gzipMiddleware passing '
130 log.debug('content not gzipped, gzipMiddleware passing '
131 'request further')
131 'request further')
132 return self.app(environ, start_response)
132 return self.app(environ, start_response)
133
133
134
134
135 def is_vcs_call(environ):
135 def is_vcs_call(environ):
136 if VCS_TYPE_KEY in environ:
136 if VCS_TYPE_KEY in environ:
137 raw_type = environ[VCS_TYPE_KEY]
137 raw_type = environ[VCS_TYPE_KEY]
138 return raw_type and raw_type != VCS_TYPE_SKIP
138 return raw_type and raw_type != VCS_TYPE_SKIP
139 return False
139 return False
140
140
141
141
142 def get_path_elem(route_path):
142 def get_path_elem(route_path):
143 if not route_path:
143 if not route_path:
144 return None
144 return None
145
145
146 cleaned_route_path = route_path.lstrip('/')
146 cleaned_route_path = route_path.lstrip('/')
147 if cleaned_route_path:
147 if cleaned_route_path:
148 cleaned_route_path_elems = cleaned_route_path.split('/')
148 cleaned_route_path_elems = cleaned_route_path.split('/')
149 if cleaned_route_path_elems:
149 if cleaned_route_path_elems:
150 return cleaned_route_path_elems[0]
150 return cleaned_route_path_elems[0]
151 return None
151 return None
152
152
153
153
154 def detect_vcs_request(environ, backends):
154 def detect_vcs_request(environ, backends):
155 checks = {
155 checks = {
156 'hg': (is_hg, SimpleHg),
156 'hg': (is_hg, SimpleHg),
157 'git': (is_git, SimpleGit),
157 'git': (is_git, SimpleGit),
158 'svn': (is_svn, SimpleSvn),
158 'svn': (is_svn, SimpleSvn),
159 }
159 }
160 handler = None
160 handler = None
161 # List of path views first chunk we don't do any checks
161 # List of path views first chunk we don't do any checks
162 white_list = [
162 white_list = [
163 # e.g /_file_store/download
163 # e.g /_file_store/download
164 '_file_store',
164 '_file_store',
165
165
166 # static files no detection
166 # static files no detection
167 '_static',
167 '_static',
168
168
169 # skip ops ping
170 '_admin/ops/ping',
171
169 # full channelstream connect should be VCS skipped
172 # full channelstream connect should be VCS skipped
170 '_admin/channelstream/connect',
173 '_admin/channelstream/connect',
171 ]
174 ]
172
175
173 path_info = environ['PATH_INFO']
176 path_info = environ['PATH_INFO']
174
177
175 path_elem = get_path_elem(path_info)
178 path_elem = get_path_elem(path_info)
176
179
177 if path_elem in white_list:
180 if path_elem in white_list:
178 log.debug('path `%s` in whitelist, skipping...', path_info)
181 log.debug('path `%s` in whitelist, skipping...', path_info)
179 return handler
182 return handler
180
183
181 path_url = path_info.lstrip('/')
184 path_url = path_info.lstrip('/')
182 if path_url in white_list:
185 if path_url in white_list:
183 log.debug('full url path `%s` in whitelist, skipping...', path_url)
186 log.debug('full url path `%s` in whitelist, skipping...', path_url)
184 return handler
187 return handler
185
188
186 if VCS_TYPE_KEY in environ:
189 if VCS_TYPE_KEY in environ:
187 raw_type = environ[VCS_TYPE_KEY]
190 raw_type = environ[VCS_TYPE_KEY]
188 if raw_type == VCS_TYPE_SKIP:
191 if raw_type == VCS_TYPE_SKIP:
189 log.debug('got `skip` marker for vcs detection, skipping...')
192 log.debug('got `skip` marker for vcs detection, skipping...')
190 return handler
193 return handler
191
194
192 _check, handler = checks.get(raw_type) or [None, None]
195 _check, handler = checks.get(raw_type) or [None, None]
193 if handler:
196 if handler:
194 log.debug('got handler:%s from environ', handler)
197 log.debug('got handler:%s from environ', handler)
195
198
196 if not handler:
199 if not handler:
197 log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends)
200 log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends)
198 for vcs_type in backends:
201 for vcs_type in backends:
199 vcs_check, _handler = checks[vcs_type]
202 vcs_check, _handler = checks[vcs_type]
200 if vcs_check(environ):
203 if vcs_check(environ):
201 log.debug('vcs handler found %s', _handler)
204 log.debug('vcs handler found %s', _handler)
202 handler = _handler
205 handler = _handler
203 break
206 break
204
207
205 return handler
208 return handler
206
209
207
210
208 class VCSMiddleware(object):
211 class VCSMiddleware(object):
209
212
210 def __init__(self, app, registry, config, appenlight_client):
213 def __init__(self, app, registry, config, appenlight_client):
211 self.application = app
214 self.application = app
212 self.registry = registry
215 self.registry = registry
213 self.config = config
216 self.config = config
214 self.appenlight_client = appenlight_client
217 self.appenlight_client = appenlight_client
215 self.use_gzip = True
218 self.use_gzip = True
216 # order in which we check the middlewares, based on vcs.backends config
219 # order in which we check the middlewares, based on vcs.backends config
217 self.check_middlewares = config['vcs.backends']
220 self.check_middlewares = config['vcs.backends']
218
221
219 def vcs_config(self, repo_name=None):
222 def vcs_config(self, repo_name=None):
220 """
223 """
221 returns serialized VcsSettings
224 returns serialized VcsSettings
222 """
225 """
223 try:
226 try:
224 return VcsSettingsModel(
227 return VcsSettingsModel(
225 repo=repo_name).get_ui_settings_as_config_obj()
228 repo=repo_name).get_ui_settings_as_config_obj()
226 except Exception:
229 except Exception:
227 pass
230 pass
228
231
229 def wrap_in_gzip_if_enabled(self, app, config):
232 def wrap_in_gzip_if_enabled(self, app, config):
230 if self.use_gzip:
233 if self.use_gzip:
231 app = GunzipMiddleware(app)
234 app = GunzipMiddleware(app)
232 return app
235 return app
233
236
234 def _get_handler_app(self, environ):
237 def _get_handler_app(self, environ):
235 app = None
238 app = None
236 log.debug('VCSMiddleware: detecting vcs type.')
239 log.debug('VCSMiddleware: detecting vcs type.')
237 handler = detect_vcs_request(environ, self.check_middlewares)
240 handler = detect_vcs_request(environ, self.check_middlewares)
238 if handler:
241 if handler:
239 app = handler(self.config, self.registry)
242 app = handler(self.config, self.registry)
240
243
241 return app
244 return app
242
245
243 def __call__(self, environ, start_response):
246 def __call__(self, environ, start_response):
244 # check if we handle one of interesting protocols, optionally extract
247 # check if we handle one of interesting protocols, optionally extract
245 # specific vcsSettings and allow changes of how things are wrapped
248 # specific vcsSettings and allow changes of how things are wrapped
246 vcs_handler = self._get_handler_app(environ)
249 vcs_handler = self._get_handler_app(environ)
247 if vcs_handler:
250 if vcs_handler:
248 # translate the _REPO_ID into real repo NAME for usage
251 # translate the _REPO_ID into real repo NAME for usage
249 # in middleware
252 # in middleware
250 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
253 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
251
254
252 # Set acl, url and vcs repo names.
255 # Set acl, url and vcs repo names.
253 vcs_handler.set_repo_names(environ)
256 vcs_handler.set_repo_names(environ)
254
257
255 # register repo config back to the handler
258 # register repo config back to the handler
256 vcs_conf = self.vcs_config(vcs_handler.acl_repo_name)
259 vcs_conf = self.vcs_config(vcs_handler.acl_repo_name)
257 # maybe damaged/non existent settings. We still want to
260 # maybe damaged/non existent settings. We still want to
258 # pass that point to validate on is_valid_and_existing_repo
261 # pass that point to validate on is_valid_and_existing_repo
259 # and return proper HTTP Code back to client
262 # and return proper HTTP Code back to client
260 if vcs_conf:
263 if vcs_conf:
261 vcs_handler.repo_vcs_config = vcs_conf
264 vcs_handler.repo_vcs_config = vcs_conf
262
265
263 # check for type, presence in database and on filesystem
266 # check for type, presence in database and on filesystem
264 if not vcs_handler.is_valid_and_existing_repo(
267 if not vcs_handler.is_valid_and_existing_repo(
265 vcs_handler.acl_repo_name,
268 vcs_handler.acl_repo_name,
266 vcs_handler.base_path,
269 vcs_handler.base_path,
267 vcs_handler.SCM):
270 vcs_handler.SCM):
268 return HTTPNotFound()(environ, start_response)
271 return HTTPNotFound()(environ, start_response)
269
272
270 environ['REPO_NAME'] = vcs_handler.url_repo_name
273 environ['REPO_NAME'] = vcs_handler.url_repo_name
271
274
272 # Wrap handler in middlewares if they are enabled.
275 # Wrap handler in middlewares if they are enabled.
273 vcs_handler = self.wrap_in_gzip_if_enabled(
276 vcs_handler = self.wrap_in_gzip_if_enabled(
274 vcs_handler, self.config)
277 vcs_handler, self.config)
275 vcs_handler, _ = wrap_in_appenlight_if_enabled(
278 vcs_handler, _ = wrap_in_appenlight_if_enabled(
276 vcs_handler, self.config, self.appenlight_client)
279 vcs_handler, self.config, self.appenlight_client)
277
280
278 return vcs_handler(environ, start_response)
281 return vcs_handler(environ, start_response)
279
282
280 return self.application(environ, start_response)
283 return self.application(environ, start_response)
@@ -1,312 +1,354 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2020 RhodeCode GmbH
3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import errno
22 import errno
23 import logging
23 import logging
24
24
25 import msgpack
25 import msgpack
26 import gevent
26 import gevent
27 import redis
27 import redis
28
28
29 from dogpile.cache.api import CachedValue
29 from dogpile.cache.api import CachedValue
30 from dogpile.cache.backends import memory as memory_backend
30 from dogpile.cache.backends import memory as memory_backend
31 from dogpile.cache.backends import file as file_backend
31 from dogpile.cache.backends import file as file_backend
32 from dogpile.cache.backends import redis as redis_backend
32 from dogpile.cache.backends import redis as redis_backend
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
34 from dogpile.cache.util import memoized_property
34 from dogpile.cache.util import memoized_property
35
35
36 from pyramid.settings import asbool
37
36 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
37
39
38
40
39 _default_max_size = 1024
41 _default_max_size = 1024
40
42
41 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
42
44
43
45
44 class LRUMemoryBackend(memory_backend.MemoryBackend):
46 class LRUMemoryBackend(memory_backend.MemoryBackend):
45 key_prefix = 'lru_mem_backend'
47 key_prefix = 'lru_mem_backend'
46 pickle_values = False
48 pickle_values = False
47
49
48 def __init__(self, arguments):
50 def __init__(self, arguments):
49 max_size = arguments.pop('max_size', _default_max_size)
51 max_size = arguments.pop('max_size', _default_max_size)
50
52
51 LRUDictClass = LRUDict
53 LRUDictClass = LRUDict
52 if arguments.pop('log_key_count', None):
54 if arguments.pop('log_key_count', None):
53 LRUDictClass = LRUDictDebug
55 LRUDictClass = LRUDictDebug
54
56
55 arguments['cache_dict'] = LRUDictClass(max_size)
57 arguments['cache_dict'] = LRUDictClass(max_size)
56 super(LRUMemoryBackend, self).__init__(arguments)
58 super(LRUMemoryBackend, self).__init__(arguments)
57
59
58 def delete(self, key):
60 def delete(self, key):
59 try:
61 try:
60 del self._cache[key]
62 del self._cache[key]
61 except KeyError:
63 except KeyError:
62 # we don't care if key isn't there at deletion
64 # we don't care if key isn't there at deletion
63 pass
65 pass
64
66
65 def delete_multi(self, keys):
67 def delete_multi(self, keys):
66 for key in keys:
68 for key in keys:
67 self.delete(key)
69 self.delete(key)
68
70
69
71
70 class PickleSerializer(object):
72 class PickleSerializer(object):
71
73
72 def _dumps(self, value, safe=False):
74 def _dumps(self, value, safe=False):
73 try:
75 try:
74 return compat.pickle.dumps(value)
76 return compat.pickle.dumps(value)
75 except Exception:
77 except Exception:
76 if safe:
78 if safe:
77 return NO_VALUE
79 return NO_VALUE
78 else:
80 else:
79 raise
81 raise
80
82
81 def _loads(self, value, safe=True):
83 def _loads(self, value, safe=True):
82 try:
84 try:
83 return compat.pickle.loads(value)
85 return compat.pickle.loads(value)
84 except Exception:
86 except Exception:
85 if safe:
87 if safe:
86 return NO_VALUE
88 return NO_VALUE
87 else:
89 else:
88 raise
90 raise
89
91
90
92
91 class MsgPackSerializer(object):
93 class MsgPackSerializer(object):
92
94
93 def _dumps(self, value, safe=False):
95 def _dumps(self, value, safe=False):
94 try:
96 try:
95 return msgpack.packb(value)
97 return msgpack.packb(value)
96 except Exception:
98 except Exception:
97 if safe:
99 if safe:
98 return NO_VALUE
100 return NO_VALUE
99 else:
101 else:
100 raise
102 raise
101
103
102 def _loads(self, value, safe=True):
104 def _loads(self, value, safe=True):
103 """
105 """
104 pickle maintained the `CachedValue` wrapper of the tuple
106 pickle maintained the `CachedValue` wrapper of the tuple
105 msgpack does not, so it must be added back in.
107 msgpack does not, so it must be added back in.
106 """
108 """
107 try:
109 try:
108 value = msgpack.unpackb(value, use_list=False)
110 value = msgpack.unpackb(value, use_list=False)
109 return CachedValue(*value)
111 return CachedValue(*value)
110 except Exception:
112 except Exception:
111 if safe:
113 if safe:
112 return NO_VALUE
114 return NO_VALUE
113 else:
115 else:
114 raise
116 raise
115
117
116
118
117 import fcntl
119 import fcntl
118 flock_org = fcntl.flock
120 flock_org = fcntl.flock
119
121
120
122
121 class CustomLockFactory(FileLock):
123 class CustomLockFactory(FileLock):
122
124
123 @memoized_property
125 @memoized_property
124 def _module(self):
126 def _module(self):
125
127
126 def gevent_flock(fd, operation):
128 def gevent_flock(fd, operation):
127 """
129 """
128 Gevent compatible flock
130 Gevent compatible flock
129 """
131 """
130 # set non-blocking, this will cause an exception if we cannot acquire a lock
132 # set non-blocking, this will cause an exception if we cannot acquire a lock
131 operation |= fcntl.LOCK_NB
133 operation |= fcntl.LOCK_NB
132 start_lock_time = time.time()
134 start_lock_time = time.time()
133 timeout = 60 * 15 # 15min
135 timeout = 60 * 15 # 15min
134 while True:
136 while True:
135 try:
137 try:
136 flock_org(fd, operation)
138 flock_org(fd, operation)
137 # lock has been acquired
139 # lock has been acquired
138 break
140 break
139 except (OSError, IOError) as e:
141 except (OSError, IOError) as e:
140 # raise on other errors than Resource temporarily unavailable
142 # raise on other errors than Resource temporarily unavailable
141 if e.errno != errno.EAGAIN:
143 if e.errno != errno.EAGAIN:
142 raise
144 raise
143 elif (time.time() - start_lock_time) > timeout:
145 elif (time.time() - start_lock_time) > timeout:
144 # waited to much time on a lock, better fail than loop for ever
146 # waited to much time on a lock, better fail than loop for ever
145 log.error('Failed to acquire lock on `%s` after waiting %ss',
147 log.error('Failed to acquire lock on `%s` after waiting %ss',
146 self.filename, timeout)
148 self.filename, timeout)
147 raise
149 raise
148 wait_timeout = 0.03
150 wait_timeout = 0.03
149 log.debug('Failed to acquire lock on `%s`, retry in %ss',
151 log.debug('Failed to acquire lock on `%s`, retry in %ss',
150 self.filename, wait_timeout)
152 self.filename, wait_timeout)
151 gevent.sleep(wait_timeout)
153 gevent.sleep(wait_timeout)
152
154
153 fcntl.flock = gevent_flock
155 fcntl.flock = gevent_flock
154 return fcntl
156 return fcntl
155
157
156
158
157 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
159 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
158 key_prefix = 'file_backend'
160 key_prefix = 'file_backend'
159
161
160 def __init__(self, arguments):
162 def __init__(self, arguments):
161 arguments['lock_factory'] = CustomLockFactory
163 arguments['lock_factory'] = CustomLockFactory
162 db_file = arguments.get('filename')
164 db_file = arguments.get('filename')
163
165
164 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
166 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
165 try:
167 try:
166 super(FileNamespaceBackend, self).__init__(arguments)
168 super(FileNamespaceBackend, self).__init__(arguments)
167 except Exception:
169 except Exception:
168 log.error('Failed to initialize db at: %s', db_file)
170 log.error('Failed to initialize db at: %s', db_file)
169 raise
171 raise
170
172
171 def __repr__(self):
173 def __repr__(self):
172 return '{} `{}`'.format(self.__class__, self.filename)
174 return '{} `{}`'.format(self.__class__, self.filename)
173
175
174 def list_keys(self, prefix=''):
176 def list_keys(self, prefix=''):
175 prefix = '{}:{}'.format(self.key_prefix, prefix)
177 prefix = '{}:{}'.format(self.key_prefix, prefix)
176
178
177 def cond(v):
179 def cond(v):
178 if not prefix:
180 if not prefix:
179 return True
181 return True
180
182
181 if v.startswith(prefix):
183 if v.startswith(prefix):
182 return True
184 return True
183 return False
185 return False
184
186
185 with self._dbm_file(True) as dbm:
187 with self._dbm_file(True) as dbm:
186 try:
188 try:
187 return filter(cond, dbm.keys())
189 return filter(cond, dbm.keys())
188 except Exception:
190 except Exception:
189 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
191 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
190 raise
192 raise
191
193
192 def get_store(self):
194 def get_store(self):
193 return self.filename
195 return self.filename
194
196
195 def _dbm_get(self, key):
197 def _dbm_get(self, key):
196 with self._dbm_file(False) as dbm:
198 with self._dbm_file(False) as dbm:
197 if hasattr(dbm, 'get'):
199 if hasattr(dbm, 'get'):
198 value = dbm.get(key, NO_VALUE)
200 value = dbm.get(key, NO_VALUE)
199 else:
201 else:
200 # gdbm objects lack a .get method
202 # gdbm objects lack a .get method
201 try:
203 try:
202 value = dbm[key]
204 value = dbm[key]
203 except KeyError:
205 except KeyError:
204 value = NO_VALUE
206 value = NO_VALUE
205 if value is not NO_VALUE:
207 if value is not NO_VALUE:
206 value = self._loads(value)
208 value = self._loads(value)
207 return value
209 return value
208
210
209 def get(self, key):
211 def get(self, key):
210 try:
212 try:
211 return self._dbm_get(key)
213 return self._dbm_get(key)
212 except Exception:
214 except Exception:
213 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
215 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
214 raise
216 raise
215
217
216 def set(self, key, value):
218 def set(self, key, value):
217 with self._dbm_file(True) as dbm:
219 with self._dbm_file(True) as dbm:
218 dbm[key] = self._dumps(value)
220 dbm[key] = self._dumps(value)
219
221
220 def set_multi(self, mapping):
222 def set_multi(self, mapping):
221 with self._dbm_file(True) as dbm:
223 with self._dbm_file(True) as dbm:
222 for key, value in mapping.items():
224 for key, value in mapping.items():
223 dbm[key] = self._dumps(value)
225 dbm[key] = self._dumps(value)
224
226
225
227
226 class BaseRedisBackend(redis_backend.RedisBackend):
228 class BaseRedisBackend(redis_backend.RedisBackend):
229 key_prefix = ''
230
231 def __init__(self, arguments):
232 super(BaseRedisBackend, self).__init__(arguments)
233 self._lock_timeout = self.lock_timeout
234 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
235
236 if self._lock_auto_renewal and not self._lock_timeout:
237 # set default timeout for auto_renewal
238 self._lock_timeout = 30
227
239
228 def _create_client(self):
240 def _create_client(self):
229 args = {}
241 args = {}
230
242
231 if self.url is not None:
243 if self.url is not None:
232 args.update(url=self.url)
244 args.update(url=self.url)
233
245
234 else:
246 else:
235 args.update(
247 args.update(
236 host=self.host, password=self.password,
248 host=self.host, password=self.password,
237 port=self.port, db=self.db
249 port=self.port, db=self.db
238 )
250 )
239
251
240 connection_pool = redis.ConnectionPool(**args)
252 connection_pool = redis.ConnectionPool(**args)
241
253
242 return redis.StrictRedis(connection_pool=connection_pool)
254 return redis.StrictRedis(connection_pool=connection_pool)
243
255
244 def list_keys(self, prefix=''):
256 def list_keys(self, prefix=''):
245 prefix = '{}:{}*'.format(self.key_prefix, prefix)
257 prefix = '{}:{}*'.format(self.key_prefix, prefix)
246 return self.client.keys(prefix)
258 return self.client.keys(prefix)
247
259
248 def get_store(self):
260 def get_store(self):
249 return self.client.connection_pool
261 return self.client.connection_pool
250
262
251 def get(self, key):
263 def get(self, key):
252 value = self.client.get(key)
264 value = self.client.get(key)
253 if value is None:
265 if value is None:
254 return NO_VALUE
266 return NO_VALUE
255 return self._loads(value)
267 return self._loads(value)
256
268
257 def get_multi(self, keys):
269 def get_multi(self, keys):
258 if not keys:
270 if not keys:
259 return []
271 return []
260 values = self.client.mget(keys)
272 values = self.client.mget(keys)
261 loads = self._loads
273 loads = self._loads
262 return [
274 return [
263 loads(v) if v is not None else NO_VALUE
275 loads(v) if v is not None else NO_VALUE
264 for v in values]
276 for v in values]
265
277
266 def set(self, key, value):
278 def set(self, key, value):
267 if self.redis_expiration_time:
279 if self.redis_expiration_time:
268 self.client.setex(key, self.redis_expiration_time,
280 self.client.setex(key, self.redis_expiration_time,
269 self._dumps(value))
281 self._dumps(value))
270 else:
282 else:
271 self.client.set(key, self._dumps(value))
283 self.client.set(key, self._dumps(value))
272
284
273 def set_multi(self, mapping):
285 def set_multi(self, mapping):
274 dumps = self._dumps
286 dumps = self._dumps
275 mapping = dict(
287 mapping = dict(
276 (k, dumps(v))
288 (k, dumps(v))
277 for k, v in mapping.items()
289 for k, v in mapping.items()
278 )
290 )
279
291
280 if not self.redis_expiration_time:
292 if not self.redis_expiration_time:
281 self.client.mset(mapping)
293 self.client.mset(mapping)
282 else:
294 else:
283 pipe = self.client.pipeline()
295 pipe = self.client.pipeline()
284 for key, value in mapping.items():
296 for key, value in mapping.items():
285 pipe.setex(key, self.redis_expiration_time, value)
297 pipe.setex(key, self.redis_expiration_time, value)
286 pipe.execute()
298 pipe.execute()
287
299
288 def get_mutex(self, key):
300 def get_mutex(self, key):
289 if self.distributed_lock:
301 if self.distributed_lock:
290 import redis_lock
291 lock_key = redis_backend.u('_lock_{0}').format(key)
302 lock_key = redis_backend.u('_lock_{0}').format(key)
292 log.debug('Trying to acquire Redis lock for key %s', lock_key)
303 log.debug('Trying to acquire Redis lock for key %s', lock_key)
293 lock = redis_lock.Lock(
304 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
294 redis_client=self.client,
305 auto_renewal=self._lock_auto_renewal)
295 name=lock_key,
296 expire=self.lock_timeout,
297 auto_renewal=False,
298 strict=True,
299 )
300 return lock
301 else:
306 else:
302 return None
307 return None
303
308
304
309
305 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
310 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
306 key_prefix = 'redis_pickle_backend'
311 key_prefix = 'redis_pickle_backend'
307 pass
312 pass
308
313
309
314
310 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
315 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
311 key_prefix = 'redis_msgpack_backend'
316 key_prefix = 'redis_msgpack_backend'
312 pass
317 pass
318
319
320 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
321 import redis_lock
322
323 class _RedisLockWrapper(object):
324 """LockWrapper for redis_lock"""
325
326 @classmethod
327 def get_lock(cls):
328 return redis_lock.Lock(
329 redis_client=client,
330 name=lock_key,
331 expire=lock_timeout,
332 auto_renewal=auto_renewal,
333 strict=True,
334 )
335
336 def __init__(self):
337 self.lock = self.get_lock()
338
339 def acquire(self, wait=True):
340 try:
341 return self.lock.acquire(wait)
342 except redis_lock.AlreadyAcquired:
343 return False
344 except redis_lock.AlreadyStarted:
345 # refresh thread exists, but it also means we acquired the lock
346 return True
347
348 def release(self):
349 try:
350 self.lock.release()
351 except redis_lock.NotAcquired:
352 pass
353
354 return _RedisLockWrapper()
@@ -1,415 +1,418 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2020 RhodeCode GmbH
3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import time
21 import time
22 import logging
22 import logging
23 import functools
23 import functools
24 import threading
24 import threading
25
25
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27 from dogpile.cache.util import compat
27 from dogpile.cache.util import compat
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.utils import safe_str, sha1
30 from rhodecode.lib.utils import safe_str, sha1
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33
33
34 from rhodecode.lib.rc_cache import cache_key_meta
34 from rhodecode.lib.rc_cache import cache_key_meta
35 from rhodecode.lib.rc_cache import region_meta
35 from rhodecode.lib.rc_cache import region_meta
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 def isCython(func):
40 def isCython(func):
41 """
41 """
42 Private helper that checks if a function is a cython function.
42 Private helper that checks if a function is a cython function.
43 """
43 """
44 return func.__class__.__name__ == 'cython_function_or_method'
44 return func.__class__.__name__ == 'cython_function_or_method'
45
45
46
46
47 class RhodeCodeCacheRegion(CacheRegion):
47 class RhodeCodeCacheRegion(CacheRegion):
48
48
49 def conditional_cache_on_arguments(
49 def conditional_cache_on_arguments(
50 self, namespace=None,
50 self, namespace=None,
51 expiration_time=None,
51 expiration_time=None,
52 should_cache_fn=None,
52 should_cache_fn=None,
53 to_str=compat.string_type,
53 to_str=compat.string_type,
54 function_key_generator=None,
54 function_key_generator=None,
55 condition=True):
55 condition=True):
56 """
56 """
57 Custom conditional decorator, that will not touch any dogpile internals if
57 Custom conditional decorator, that will not touch any dogpile internals if
58 condition isn't meet. This works a bit different than should_cache_fn
58 condition isn't meet. This works a bit different than should_cache_fn
59 And it's faster in cases we don't ever want to compute cached values
59 And it's faster in cases we don't ever want to compute cached values
60 """
60 """
61 expiration_time_is_callable = compat.callable(expiration_time)
61 expiration_time_is_callable = compat.callable(expiration_time)
62
62
63 if function_key_generator is None:
63 if function_key_generator is None:
64 function_key_generator = self.function_key_generator
64 function_key_generator = self.function_key_generator
65
65
66 # workaround for py2 and cython problems, this block should be removed
66 # workaround for py2 and cython problems, this block should be removed
67 # once we've migrated to py3
67 # once we've migrated to py3
68 if 'cython' == 'cython':
68 if 'cython' == 'cython':
69 def decorator(fn):
69 def decorator(fn):
70 if to_str is compat.string_type:
70 if to_str is compat.string_type:
71 # backwards compatible
71 # backwards compatible
72 key_generator = function_key_generator(namespace, fn)
72 key_generator = function_key_generator(namespace, fn)
73 else:
73 else:
74 key_generator = function_key_generator(namespace, fn, to_str=to_str)
74 key_generator = function_key_generator(namespace, fn, to_str=to_str)
75
75
76 @functools.wraps(fn)
76 @functools.wraps(fn)
77 def decorate(*arg, **kw):
77 def decorate(*arg, **kw):
78 key = key_generator(*arg, **kw)
78 key = key_generator(*arg, **kw)
79
79
80 @functools.wraps(fn)
80 @functools.wraps(fn)
81 def creator():
81 def creator():
82 return fn(*arg, **kw)
82 return fn(*arg, **kw)
83
83
84 if not condition:
84 if not condition:
85 return creator()
85 return creator()
86
86
87 timeout = expiration_time() if expiration_time_is_callable \
87 timeout = expiration_time() if expiration_time_is_callable \
88 else expiration_time
88 else expiration_time
89
89
90 return self.get_or_create(key, creator, timeout, should_cache_fn)
90 return self.get_or_create(key, creator, timeout, should_cache_fn)
91
91
92 def invalidate(*arg, **kw):
92 def invalidate(*arg, **kw):
93 key = key_generator(*arg, **kw)
93 key = key_generator(*arg, **kw)
94 self.delete(key)
94 self.delete(key)
95
95
96 def set_(value, *arg, **kw):
96 def set_(value, *arg, **kw):
97 key = key_generator(*arg, **kw)
97 key = key_generator(*arg, **kw)
98 self.set(key, value)
98 self.set(key, value)
99
99
100 def get(*arg, **kw):
100 def get(*arg, **kw):
101 key = key_generator(*arg, **kw)
101 key = key_generator(*arg, **kw)
102 return self.get(key)
102 return self.get(key)
103
103
104 def refresh(*arg, **kw):
104 def refresh(*arg, **kw):
105 key = key_generator(*arg, **kw)
105 key = key_generator(*arg, **kw)
106 value = fn(*arg, **kw)
106 value = fn(*arg, **kw)
107 self.set(key, value)
107 self.set(key, value)
108 return value
108 return value
109
109
110 decorate.set = set_
110 decorate.set = set_
111 decorate.invalidate = invalidate
111 decorate.invalidate = invalidate
112 decorate.refresh = refresh
112 decorate.refresh = refresh
113 decorate.get = get
113 decorate.get = get
114 decorate.original = fn
114 decorate.original = fn
115 decorate.key_generator = key_generator
115 decorate.key_generator = key_generator
116 decorate.__wrapped__ = fn
116 decorate.__wrapped__ = fn
117
117
118 return decorate
118 return decorate
119 return decorator
119 return decorator
120
120
121 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
121 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
122
122
123 if not condition:
123 if not condition:
124 log.debug('Calling un-cached func:%s', user_func.func_name)
124 log.debug('Calling un-cached func:%s', user_func.func_name)
125 return user_func(*arg, **kw)
125 return user_func(*arg, **kw)
126
126
127 key = key_generator(*arg, **kw)
127 key = key_generator(*arg, **kw)
128
128
129 timeout = expiration_time() if expiration_time_is_callable \
129 timeout = expiration_time() if expiration_time_is_callable \
130 else expiration_time
130 else expiration_time
131
131
132 log.debug('Calling cached fn:%s', user_func.func_name)
132 log.debug('Calling cached fn:%s', user_func.func_name)
133 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
133 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
134
134
135 def cache_decorator(user_func):
135 def cache_decorator(user_func):
136 if to_str is compat.string_type:
136 if to_str is compat.string_type:
137 # backwards compatible
137 # backwards compatible
138 key_generator = function_key_generator(namespace, user_func)
138 key_generator = function_key_generator(namespace, user_func)
139 else:
139 else:
140 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
140 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
141
141
142 def refresh(*arg, **kw):
142 def refresh(*arg, **kw):
143 """
143 """
144 Like invalidate, but regenerates the value instead
144 Like invalidate, but regenerates the value instead
145 """
145 """
146 key = key_generator(*arg, **kw)
146 key = key_generator(*arg, **kw)
147 value = user_func(*arg, **kw)
147 value = user_func(*arg, **kw)
148 self.set(key, value)
148 self.set(key, value)
149 return value
149 return value
150
150
151 def invalidate(*arg, **kw):
151 def invalidate(*arg, **kw):
152 key = key_generator(*arg, **kw)
152 key = key_generator(*arg, **kw)
153 self.delete(key)
153 self.delete(key)
154
154
155 def set_(value, *arg, **kw):
155 def set_(value, *arg, **kw):
156 key = key_generator(*arg, **kw)
156 key = key_generator(*arg, **kw)
157 self.set(key, value)
157 self.set(key, value)
158
158
159 def get(*arg, **kw):
159 def get(*arg, **kw):
160 key = key_generator(*arg, **kw)
160 key = key_generator(*arg, **kw)
161 return self.get(key)
161 return self.get(key)
162
162
163 user_func.set = set_
163 user_func.set = set_
164 user_func.invalidate = invalidate
164 user_func.invalidate = invalidate
165 user_func.get = get
165 user_func.get = get
166 user_func.refresh = refresh
166 user_func.refresh = refresh
167 user_func.key_generator = key_generator
167 user_func.key_generator = key_generator
168 user_func.original = user_func
168 user_func.original = user_func
169
169
170 # Use `decorate` to preserve the signature of :param:`user_func`.
170 # Use `decorate` to preserve the signature of :param:`user_func`.
171 return decorator.decorate(user_func, functools.partial(
171 return decorator.decorate(user_func, functools.partial(
172 get_or_create_for_user_func, key_generator))
172 get_or_create_for_user_func, key_generator))
173
173
174 return cache_decorator
174 return cache_decorator
175
175
176
176
177 def make_region(*arg, **kw):
177 def make_region(*arg, **kw):
178 return RhodeCodeCacheRegion(*arg, **kw)
178 return RhodeCodeCacheRegion(*arg, **kw)
179
179
180
180
181 def get_default_cache_settings(settings, prefixes=None):
181 def get_default_cache_settings(settings, prefixes=None):
182 prefixes = prefixes or []
182 prefixes = prefixes or []
183 cache_settings = {}
183 cache_settings = {}
184 for key in settings.keys():
184 for key in settings.keys():
185 for prefix in prefixes:
185 for prefix in prefixes:
186 if key.startswith(prefix):
186 if key.startswith(prefix):
187 name = key.split(prefix)[1].strip()
187 name = key.split(prefix)[1].strip()
188 val = settings[key]
188 val = settings[key]
189 if isinstance(val, compat.string_types):
189 if isinstance(val, compat.string_types):
190 val = val.strip()
190 val = val.strip()
191 cache_settings[name] = val
191 cache_settings[name] = val
192 return cache_settings
192 return cache_settings
193
193
194
194
195 def compute_key_from_params(*args):
195 def compute_key_from_params(*args):
196 """
196 """
197 Helper to compute key from given params to be used in cache manager
197 Helper to compute key from given params to be used in cache manager
198 """
198 """
199 return sha1("_".join(map(safe_str, args)))
199 return sha1("_".join(map(safe_str, args)))
200
200
201
201
202 def backend_key_generator(backend):
202 def backend_key_generator(backend):
203 """
203 """
204 Special wrapper that also sends over the backend to the key generator
204 Special wrapper that also sends over the backend to the key generator
205 """
205 """
206 def wrapper(namespace, fn):
206 def wrapper(namespace, fn):
207 return key_generator(backend, namespace, fn)
207 return key_generator(backend, namespace, fn)
208 return wrapper
208 return wrapper
209
209
210
210
211 def key_generator(backend, namespace, fn):
211 def key_generator(backend, namespace, fn):
212 fname = fn.__name__
212 fname = fn.__name__
213
213
214 def generate_key(*args):
214 def generate_key(*args):
215 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
215 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
216 namespace_pref = namespace or 'default_namespace'
216 namespace_pref = namespace or 'default_namespace'
217 arg_key = compute_key_from_params(*args)
217 arg_key = compute_key_from_params(*args)
218 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
218 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
219
219
220 return final_key
220 return final_key
221
221
222 return generate_key
222 return generate_key
223
223
224
224
225 def get_or_create_region(region_name, region_namespace=None):
225 def get_or_create_region(region_name, region_namespace=None):
226 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
226 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
227 region_obj = region_meta.dogpile_cache_regions.get(region_name)
227 region_obj = region_meta.dogpile_cache_regions.get(region_name)
228 if not region_obj:
228 if not region_obj:
229 raise EnvironmentError(
229 raise EnvironmentError(
230 'Region `{}` not in configured: {}.'.format(
230 'Region `{}` not in configured: {}.'.format(
231 region_name, region_meta.dogpile_cache_regions.keys()))
231 region_name, region_meta.dogpile_cache_regions.keys()))
232
232
233 region_uid_name = '{}:{}'.format(region_name, region_namespace)
233 region_uid_name = '{}:{}'.format(region_name, region_namespace)
234 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
234 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
235 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
235 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
236 if region_exist:
236 if region_exist:
237 log.debug('Using already configured region: %s', region_namespace)
237 log.debug('Using already configured region: %s', region_namespace)
238 return region_exist
238 return region_exist
239 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
239 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
240 expiration_time = region_obj.expiration_time
240 expiration_time = region_obj.expiration_time
241
241
242 if not os.path.isdir(cache_dir):
242 if not os.path.isdir(cache_dir):
243 os.makedirs(cache_dir)
243 os.makedirs(cache_dir)
244 new_region = make_region(
244 new_region = make_region(
245 name=region_uid_name,
245 name=region_uid_name,
246 function_key_generator=backend_key_generator(region_obj.actual_backend)
246 function_key_generator=backend_key_generator(region_obj.actual_backend)
247 )
247 )
248 namespace_filename = os.path.join(
248 namespace_filename = os.path.join(
249 cache_dir, "{}.cache.dbm".format(region_namespace))
249 cache_dir, "{}.cache.dbm".format(region_namespace))
250 # special type that allows 1db per namespace
250 # special type that allows 1db per namespace
251 new_region.configure(
251 new_region.configure(
252 backend='dogpile.cache.rc.file_namespace',
252 backend='dogpile.cache.rc.file_namespace',
253 expiration_time=expiration_time,
253 expiration_time=expiration_time,
254 arguments={"filename": namespace_filename}
254 arguments={"filename": namespace_filename}
255 )
255 )
256
256
257 # create and save in region caches
257 # create and save in region caches
258 log.debug('configuring new region: %s', region_uid_name)
258 log.debug('configuring new region: %s', region_uid_name)
259 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
259 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
260
260
261 return region_obj
261 return region_obj
262
262
263
263
264 def clear_cache_namespace(cache_region, cache_namespace_uid):
264 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
265 region = get_or_create_region(cache_region, cache_namespace_uid)
265 region = get_or_create_region(cache_region, cache_namespace_uid)
266 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
266 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
267 num_delete_keys = len(cache_keys)
267 num_delete_keys = len(cache_keys)
268 if num_delete_keys:
268 if invalidate:
269 region.delete_multi(cache_keys)
269 region.invalidate(hard=False)
270 else:
271 if num_delete_keys:
272 region.delete_multi(cache_keys)
270 return num_delete_keys
273 return num_delete_keys
271
274
272
275
273 class ActiveRegionCache(object):
276 class ActiveRegionCache(object):
274 def __init__(self, context, cache_data):
277 def __init__(self, context, cache_data):
275 self.context = context
278 self.context = context
276 self.cache_data = cache_data
279 self.cache_data = cache_data
277
280
278 def should_invalidate(self):
281 def should_invalidate(self):
279 return False
282 return False
280
283
281
284
282 class FreshRegionCache(object):
285 class FreshRegionCache(object):
283 def __init__(self, context, cache_data):
286 def __init__(self, context, cache_data):
284 self.context = context
287 self.context = context
285 self.cache_data = cache_data
288 self.cache_data = cache_data
286
289
287 def should_invalidate(self):
290 def should_invalidate(self):
288 return True
291 return True
289
292
290
293
291 class InvalidationContext(object):
294 class InvalidationContext(object):
292 """
295 """
293 usage::
296 usage::
294
297
295 from rhodecode.lib import rc_cache
298 from rhodecode.lib import rc_cache
296
299
297 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
300 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
298 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
301 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
299
302
300 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
303 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
301 def heavy_compute(cache_name, param1, param2):
304 def heavy_compute(cache_name, param1, param2):
302 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
305 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
303
306
304 # invalidation namespace is shared namespace key for all process caches
307 # invalidation namespace is shared namespace key for all process caches
305 # we use it to send a global signal
308 # we use it to send a global signal
306 invalidation_namespace = 'repo_cache:1'
309 invalidation_namespace = 'repo_cache:1'
307
310
308 inv_context_manager = rc_cache.InvalidationContext(
311 inv_context_manager = rc_cache.InvalidationContext(
309 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
312 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
310 with inv_context_manager as invalidation_context:
313 with inv_context_manager as invalidation_context:
311 args = ('one', 'two')
314 args = ('one', 'two')
312 # re-compute and store cache if we get invalidate signal
315 # re-compute and store cache if we get invalidate signal
313 if invalidation_context.should_invalidate():
316 if invalidation_context.should_invalidate():
314 result = heavy_compute.refresh(*args)
317 result = heavy_compute.refresh(*args)
315 else:
318 else:
316 result = heavy_compute(*args)
319 result = heavy_compute(*args)
317
320
318 compute_time = inv_context_manager.compute_time
321 compute_time = inv_context_manager.compute_time
319 log.debug('result computed in %.4fs', compute_time)
322 log.debug('result computed in %.4fs', compute_time)
320
323
321 # To send global invalidation signal, simply run
324 # To send global invalidation signal, simply run
322 CacheKey.set_invalidate(invalidation_namespace)
325 CacheKey.set_invalidate(invalidation_namespace)
323
326
324 """
327 """
325
328
326 def __repr__(self):
329 def __repr__(self):
327 return '<InvalidationContext:{}[{}]>'.format(
330 return '<InvalidationContext:{}[{}]>'.format(
328 safe_str(self.cache_key), safe_str(self.uid))
331 safe_str(self.cache_key), safe_str(self.uid))
329
332
330 def __init__(self, uid, invalidation_namespace='',
333 def __init__(self, uid, invalidation_namespace='',
331 raise_exception=False, thread_scoped=None):
334 raise_exception=False, thread_scoped=None):
332 self.uid = uid
335 self.uid = uid
333 self.invalidation_namespace = invalidation_namespace
336 self.invalidation_namespace = invalidation_namespace
334 self.raise_exception = raise_exception
337 self.raise_exception = raise_exception
335 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
338 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
336 self.thread_id = 'global'
339 self.thread_id = 'global'
337
340
338 if thread_scoped is None:
341 if thread_scoped is None:
339 # if we set "default" we can override this via .ini settings
342 # if we set "default" we can override this via .ini settings
340 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
343 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
341
344
342 # Append the thread id to the cache key if this invalidation context
345 # Append the thread id to the cache key if this invalidation context
343 # should be scoped to the current thread.
346 # should be scoped to the current thread.
344 if thread_scoped is True:
347 if thread_scoped is True:
345 self.thread_id = threading.current_thread().ident
348 self.thread_id = threading.current_thread().ident
346
349
347 self.cache_key = compute_key_from_params(uid)
350 self.cache_key = compute_key_from_params(uid)
348 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
351 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
349 self.proc_id, self.thread_id, self.cache_key)
352 self.proc_id, self.thread_id, self.cache_key)
350 self.compute_time = 0
353 self.compute_time = 0
351
354
352 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
355 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
353 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
356 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
354 # fetch all cache keys for this namespace and convert them to a map to find if we
357 # fetch all cache keys for this namespace and convert them to a map to find if we
355 # have specific cache_key object registered. We do this because we want to have
358 # have specific cache_key object registered. We do this because we want to have
356 # all consistent cache_state_uid for newly registered objects
359 # all consistent cache_state_uid for newly registered objects
357 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
360 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
358 cache_obj = cache_obj_map.get(self.cache_key)
361 cache_obj = cache_obj_map.get(self.cache_key)
359 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
362 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
360 if not cache_obj:
363 if not cache_obj:
361 new_cache_args = invalidation_namespace
364 new_cache_args = invalidation_namespace
362 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
365 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
363 cache_state_uid = None
366 cache_state_uid = None
364 if first_cache_obj:
367 if first_cache_obj:
365 cache_state_uid = first_cache_obj.cache_state_uid
368 cache_state_uid = first_cache_obj.cache_state_uid
366 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
369 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
367 cache_state_uid=cache_state_uid)
370 cache_state_uid=cache_state_uid)
368 cache_key_meta.cache_keys_by_pid.append(self.cache_key)
371 cache_key_meta.cache_keys_by_pid.append(self.cache_key)
369
372
370 return cache_obj
373 return cache_obj
371
374
372 def __enter__(self):
375 def __enter__(self):
373 """
376 """
374 Test if current object is valid, and return CacheRegion function
377 Test if current object is valid, and return CacheRegion function
375 that does invalidation and calculation
378 that does invalidation and calculation
376 """
379 """
377 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
380 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
378 # register or get a new key based on uid
381 # register or get a new key based on uid
379 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
382 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
380 cache_data = self.cache_obj.get_dict()
383 cache_data = self.cache_obj.get_dict()
381 self._start_time = time.time()
384 self._start_time = time.time()
382 if self.cache_obj.cache_active:
385 if self.cache_obj.cache_active:
383 # means our cache obj is existing and marked as it's
386 # means our cache obj is existing and marked as it's
384 # cache is not outdated, we return ActiveRegionCache
387 # cache is not outdated, we return ActiveRegionCache
385 self.skip_cache_active_change = True
388 self.skip_cache_active_change = True
386
389
387 return ActiveRegionCache(context=self, cache_data=cache_data)
390 return ActiveRegionCache(context=self, cache_data=cache_data)
388
391
389 # the key is either not existing or set to False, we return
392 # the key is either not existing or set to False, we return
390 # the real invalidator which re-computes value. We additionally set
393 # the real invalidator which re-computes value. We additionally set
391 # the flag to actually update the Database objects
394 # the flag to actually update the Database objects
392 self.skip_cache_active_change = False
395 self.skip_cache_active_change = False
393 return FreshRegionCache(context=self, cache_data=cache_data)
396 return FreshRegionCache(context=self, cache_data=cache_data)
394
397
395 def __exit__(self, exc_type, exc_val, exc_tb):
398 def __exit__(self, exc_type, exc_val, exc_tb):
396 # save compute time
399 # save compute time
397 self.compute_time = time.time() - self._start_time
400 self.compute_time = time.time() - self._start_time
398
401
399 if self.skip_cache_active_change:
402 if self.skip_cache_active_change:
400 return
403 return
401
404
402 try:
405 try:
403 self.cache_obj.cache_active = True
406 self.cache_obj.cache_active = True
404 Session().add(self.cache_obj)
407 Session().add(self.cache_obj)
405 Session().commit()
408 Session().commit()
406 except IntegrityError:
409 except IntegrityError:
407 # if we catch integrity error, it means we inserted this object
410 # if we catch integrity error, it means we inserted this object
408 # assumption is that's really an edge race-condition case and
411 # assumption is that's really an edge race-condition case and
409 # it's safe is to skip it
412 # it's safe is to skip it
410 Session().rollback()
413 Session().rollback()
411 except Exception:
414 except Exception:
412 log.exception('Failed to commit on cache key update')
415 log.exception('Failed to commit on cache key update')
413 Session().rollback()
416 Session().rollback()
414 if self.raise_exception:
417 if self.raise_exception:
415 raise
418 raise
@@ -1,2378 +1,2379 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib
32 import urllib
33 import collections
33 import collections
34
34
35 from pyramid import compat
35 from pyramid import compat
36 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
37
37
38 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 from rhodecode.lib import audit_logger
41 from rhodecode.lib import audit_logger
42 from rhodecode.lib.compat import OrderedDict
42 from rhodecode.lib.compat import OrderedDict
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.markup_renderer import (
44 from rhodecode.lib.markup_renderer import (
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 from rhodecode.lib.utils2 import (
46 from rhodecode.lib.utils2 import (
47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 get_current_rhodecode_user)
48 get_current_rhodecode_user)
49 from rhodecode.lib.vcs.backends.base import (
49 from rhodecode.lib.vcs.backends.base import (
50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 TargetRefMissing, SourceRefMissing)
51 TargetRefMissing, SourceRefMissing)
52 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 from rhodecode.lib.vcs.exceptions import (
53 from rhodecode.lib.vcs.exceptions import (
54 CommitDoesNotExistError, EmptyRepositoryError)
54 CommitDoesNotExistError, EmptyRepositoryError)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.comment import CommentsModel
58 from rhodecode.model.db import (
58 from rhodecode.model.db import (
59 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 from rhodecode.model.meta import Session
61 from rhodecode.model.meta import Session
62 from rhodecode.model.notification import NotificationModel, \
62 from rhodecode.model.notification import NotificationModel, \
63 EmailNotificationModel
63 EmailNotificationModel
64 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.scm import ScmModel
65 from rhodecode.model.settings import VcsSettingsModel
65 from rhodecode.model.settings import VcsSettingsModel
66
66
67
67
68 log = logging.getLogger(__name__)
68 log = logging.getLogger(__name__)
69
69
70
70
71 # Data structure to hold the response data when updating commits during a pull
71 # Data structure to hold the response data when updating commits during a pull
72 # request update.
72 # request update.
73 class UpdateResponse(object):
73 class UpdateResponse(object):
74
74
75 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 commit_changes, source_changed, target_changed):
76 commit_changes, source_changed, target_changed):
77
77
78 self.executed = executed
78 self.executed = executed
79 self.reason = reason
79 self.reason = reason
80 self.new = new
80 self.new = new
81 self.old = old
81 self.old = old
82 self.common_ancestor_id = common_ancestor_id
82 self.common_ancestor_id = common_ancestor_id
83 self.changes = commit_changes
83 self.changes = commit_changes
84 self.source_changed = source_changed
84 self.source_changed = source_changed
85 self.target_changed = target_changed
85 self.target_changed = target_changed
86
86
87
87
88 def get_diff_info(
88 def get_diff_info(
89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 get_commit_authors=True):
90 get_commit_authors=True):
91 """
91 """
92 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 This is also used for default reviewers logic
93 This is also used for default reviewers logic
94 """
94 """
95
95
96 source_scm = source_repo.scm_instance()
96 source_scm = source_repo.scm_instance()
97 target_scm = target_repo.scm_instance()
97 target_scm = target_repo.scm_instance()
98
98
99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 if not ancestor_id:
100 if not ancestor_id:
101 raise ValueError(
101 raise ValueError(
102 'cannot calculate diff info without a common ancestor. '
102 'cannot calculate diff info without a common ancestor. '
103 'Make sure both repositories are related, and have a common forking commit.')
103 'Make sure both repositories are related, and have a common forking commit.')
104
104
105 # case here is that want a simple diff without incoming commits,
105 # case here is that want a simple diff without incoming commits,
106 # previewing what will be merged based only on commits in the source.
106 # previewing what will be merged based only on commits in the source.
107 log.debug('Using ancestor %s as source_ref instead of %s',
107 log.debug('Using ancestor %s as source_ref instead of %s',
108 ancestor_id, source_ref)
108 ancestor_id, source_ref)
109
109
110 # source of changes now is the common ancestor
110 # source of changes now is the common ancestor
111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 # target commit becomes the source ref as it is the last commit
112 # target commit becomes the source ref as it is the last commit
113 # for diff generation this logic gives proper diff
113 # for diff generation this logic gives proper diff
114 target_commit = source_scm.get_commit(commit_id=source_ref)
114 target_commit = source_scm.get_commit(commit_id=source_ref)
115
115
116 vcs_diff = \
116 vcs_diff = \
117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 ignore_whitespace=False, context=3)
118 ignore_whitespace=False, context=3)
119
119
120 diff_processor = diffs.DiffProcessor(
120 diff_processor = diffs.DiffProcessor(
121 vcs_diff, format='newdiff', diff_limit=None,
121 vcs_diff, format='newdiff', diff_limit=None,
122 file_limit=None, show_full_diff=True)
122 file_limit=None, show_full_diff=True)
123
123
124 _parsed = diff_processor.prepare()
124 _parsed = diff_processor.prepare()
125
125
126 all_files = []
126 all_files = []
127 all_files_changes = []
127 all_files_changes = []
128 changed_lines = {}
128 changed_lines = {}
129 stats = [0, 0]
129 stats = [0, 0]
130 for f in _parsed:
130 for f in _parsed:
131 all_files.append(f['filename'])
131 all_files.append(f['filename'])
132 all_files_changes.append({
132 all_files_changes.append({
133 'filename': f['filename'],
133 'filename': f['filename'],
134 'stats': f['stats']
134 'stats': f['stats']
135 })
135 })
136 stats[0] += f['stats']['added']
136 stats[0] += f['stats']['added']
137 stats[1] += f['stats']['deleted']
137 stats[1] += f['stats']['deleted']
138
138
139 changed_lines[f['filename']] = []
139 changed_lines[f['filename']] = []
140 if len(f['chunks']) < 2:
140 if len(f['chunks']) < 2:
141 continue
141 continue
142 # first line is "context" information
142 # first line is "context" information
143 for chunks in f['chunks'][1:]:
143 for chunks in f['chunks'][1:]:
144 for chunk in chunks['lines']:
144 for chunk in chunks['lines']:
145 if chunk['action'] not in ('del', 'mod'):
145 if chunk['action'] not in ('del', 'mod'):
146 continue
146 continue
147 changed_lines[f['filename']].append(chunk['old_lineno'])
147 changed_lines[f['filename']].append(chunk['old_lineno'])
148
148
149 commit_authors = []
149 commit_authors = []
150 user_counts = {}
150 user_counts = {}
151 email_counts = {}
151 email_counts = {}
152 author_counts = {}
152 author_counts = {}
153 _commit_cache = {}
153 _commit_cache = {}
154
154
155 commits = []
155 commits = []
156 if get_commit_authors:
156 if get_commit_authors:
157 log.debug('Obtaining commit authors from set of commits')
157 log.debug('Obtaining commit authors from set of commits')
158 _compare_data = target_scm.compare(
158 _compare_data = target_scm.compare(
159 target_ref, source_ref, source_scm, merge=True,
159 target_ref, source_ref, source_scm, merge=True,
160 pre_load=["author", "date", "message"]
160 pre_load=["author", "date", "message"]
161 )
161 )
162
162
163 for commit in _compare_data:
163 for commit in _compare_data:
164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 # at this function which is later called via JSON serialization
165 # at this function which is later called via JSON serialization
166 serialized_commit = dict(
166 serialized_commit = dict(
167 author=commit.author,
167 author=commit.author,
168 date=commit.date,
168 date=commit.date,
169 message=commit.message,
169 message=commit.message,
170 commit_id=commit.raw_id,
170 commit_id=commit.raw_id,
171 raw_id=commit.raw_id
171 raw_id=commit.raw_id
172 )
172 )
173 commits.append(serialized_commit)
173 commits.append(serialized_commit)
174 user = User.get_from_cs_author(serialized_commit['author'])
174 user = User.get_from_cs_author(serialized_commit['author'])
175 if user and user not in commit_authors:
175 if user and user not in commit_authors:
176 commit_authors.append(user)
176 commit_authors.append(user)
177
177
178 # lines
178 # lines
179 if get_authors:
179 if get_authors:
180 log.debug('Calculating authors of changed files')
180 log.debug('Calculating authors of changed files')
181 target_commit = source_repo.get_commit(ancestor_id)
181 target_commit = source_repo.get_commit(ancestor_id)
182
182
183 for fname, lines in changed_lines.items():
183 for fname, lines in changed_lines.items():
184
184
185 try:
185 try:
186 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 except Exception:
187 except Exception:
188 log.exception("Failed to load node with path %s", fname)
188 log.exception("Failed to load node with path %s", fname)
189 continue
189 continue
190
190
191 if not isinstance(node, FileNode):
191 if not isinstance(node, FileNode):
192 continue
192 continue
193
193
194 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 if node.is_binary:
195 if node.is_binary:
196 author = node.last_commit.author
196 author = node.last_commit.author
197 email = node.last_commit.author_email
197 email = node.last_commit.author_email
198
198
199 user = User.get_from_cs_author(author)
199 user = User.get_from_cs_author(author)
200 if user:
200 if user:
201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 author_counts[author] = author_counts.get(author, 0) + 1
202 author_counts[author] = author_counts.get(author, 0) + 1
203 email_counts[email] = email_counts.get(email, 0) + 1
203 email_counts[email] = email_counts.get(email, 0) + 1
204
204
205 continue
205 continue
206
206
207 for annotation in node.annotate:
207 for annotation in node.annotate:
208 line_no, commit_id, get_commit_func, line_text = annotation
208 line_no, commit_id, get_commit_func, line_text = annotation
209 if line_no in lines:
209 if line_no in lines:
210 if commit_id not in _commit_cache:
210 if commit_id not in _commit_cache:
211 _commit_cache[commit_id] = get_commit_func()
211 _commit_cache[commit_id] = get_commit_func()
212 commit = _commit_cache[commit_id]
212 commit = _commit_cache[commit_id]
213 author = commit.author
213 author = commit.author
214 email = commit.author_email
214 email = commit.author_email
215 user = User.get_from_cs_author(author)
215 user = User.get_from_cs_author(author)
216 if user:
216 if user:
217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 author_counts[author] = author_counts.get(author, 0) + 1
218 author_counts[author] = author_counts.get(author, 0) + 1
219 email_counts[email] = email_counts.get(email, 0) + 1
219 email_counts[email] = email_counts.get(email, 0) + 1
220
220
221 log.debug('Default reviewers processing finished')
221 log.debug('Default reviewers processing finished')
222
222
223 return {
223 return {
224 'commits': commits,
224 'commits': commits,
225 'files': all_files_changes,
225 'files': all_files_changes,
226 'stats': stats,
226 'stats': stats,
227 'ancestor': ancestor_id,
227 'ancestor': ancestor_id,
228 # original authors of modified files
228 # original authors of modified files
229 'original_authors': {
229 'original_authors': {
230 'users': user_counts,
230 'users': user_counts,
231 'authors': author_counts,
231 'authors': author_counts,
232 'emails': email_counts,
232 'emails': email_counts,
233 },
233 },
234 'commit_authors': commit_authors
234 'commit_authors': commit_authors
235 }
235 }
236
236
237
237
238 class PullRequestModel(BaseModel):
238 class PullRequestModel(BaseModel):
239
239
240 cls = PullRequest
240 cls = PullRequest
241
241
242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243
243
244 UPDATE_STATUS_MESSAGES = {
244 UPDATE_STATUS_MESSAGES = {
245 UpdateFailureReason.NONE: lazy_ugettext(
245 UpdateFailureReason.NONE: lazy_ugettext(
246 'Pull request update successful.'),
246 'Pull request update successful.'),
247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 'Pull request update failed because of an unknown error.'),
248 'Pull request update failed because of an unknown error.'),
249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 'No update needed because the source and target have not changed.'),
250 'No update needed because the source and target have not changed.'),
251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 'Pull request cannot be updated because the reference type is '
252 'Pull request cannot be updated because the reference type is '
253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 'This pull request cannot be updated because the target '
255 'This pull request cannot be updated because the target '
256 'reference is missing.'),
256 'reference is missing.'),
257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 'This pull request cannot be updated because the source '
258 'This pull request cannot be updated because the source '
259 'reference is missing.'),
259 'reference is missing.'),
260 }
260 }
261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263
263
264 def __get_pull_request(self, pull_request):
264 def __get_pull_request(self, pull_request):
265 return self._get_instance((
265 return self._get_instance((
266 PullRequest, PullRequestVersion), pull_request)
266 PullRequest, PullRequestVersion), pull_request)
267
267
268 def _check_perms(self, perms, pull_request, user, api=False):
268 def _check_perms(self, perms, pull_request, user, api=False):
269 if not api:
269 if not api:
270 return h.HasRepoPermissionAny(*perms)(
270 return h.HasRepoPermissionAny(*perms)(
271 user=user, repo_name=pull_request.target_repo.repo_name)
271 user=user, repo_name=pull_request.target_repo.repo_name)
272 else:
272 else:
273 return h.HasRepoPermissionAnyApi(*perms)(
273 return h.HasRepoPermissionAnyApi(*perms)(
274 user=user, repo_name=pull_request.target_repo.repo_name)
274 user=user, repo_name=pull_request.target_repo.repo_name)
275
275
276 def check_user_read(self, pull_request, user, api=False):
276 def check_user_read(self, pull_request, user, api=False):
277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 return self._check_perms(_perms, pull_request, user, api)
278 return self._check_perms(_perms, pull_request, user, api)
279
279
280 def check_user_merge(self, pull_request, user, api=False):
280 def check_user_merge(self, pull_request, user, api=False):
281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 return self._check_perms(_perms, pull_request, user, api)
282 return self._check_perms(_perms, pull_request, user, api)
283
283
284 def check_user_update(self, pull_request, user, api=False):
284 def check_user_update(self, pull_request, user, api=False):
285 owner = user.user_id == pull_request.user_id
285 owner = user.user_id == pull_request.user_id
286 return self.check_user_merge(pull_request, user, api) or owner
286 return self.check_user_merge(pull_request, user, api) or owner
287
287
288 def check_user_delete(self, pull_request, user):
288 def check_user_delete(self, pull_request, user):
289 owner = user.user_id == pull_request.user_id
289 owner = user.user_id == pull_request.user_id
290 _perms = ('repository.admin',)
290 _perms = ('repository.admin',)
291 return self._check_perms(_perms, pull_request, user) or owner
291 return self._check_perms(_perms, pull_request, user) or owner
292
292
293 def is_user_reviewer(self, pull_request, user):
293 def is_user_reviewer(self, pull_request, user):
294 return user.user_id in [
294 return user.user_id in [
295 x.user_id for x in
295 x.user_id for x in
296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 if x.user
297 if x.user
298 ]
298 ]
299
299
300 def check_user_change_status(self, pull_request, user, api=False):
300 def check_user_change_status(self, pull_request, user, api=False):
301 return self.check_user_update(pull_request, user, api) \
301 return self.check_user_update(pull_request, user, api) \
302 or self.is_user_reviewer(pull_request, user)
302 or self.is_user_reviewer(pull_request, user)
303
303
304 def check_user_comment(self, pull_request, user):
304 def check_user_comment(self, pull_request, user):
305 owner = user.user_id == pull_request.user_id
305 owner = user.user_id == pull_request.user_id
306 return self.check_user_read(pull_request, user) or owner
306 return self.check_user_read(pull_request, user) or owner
307
307
308 def get(self, pull_request):
308 def get(self, pull_request):
309 return self.__get_pull_request(pull_request)
309 return self.__get_pull_request(pull_request)
310
310
311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 statuses=None, opened_by=None, order_by=None,
312 statuses=None, opened_by=None, order_by=None,
313 order_dir='desc', only_created=False):
313 order_dir='desc', only_created=False):
314 repo = None
314 repo = None
315 if repo_name:
315 if repo_name:
316 repo = self._get_repo(repo_name)
316 repo = self._get_repo(repo_name)
317
317
318 q = PullRequest.query()
318 q = PullRequest.query()
319
319
320 if search_q:
320 if search_q:
321 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 q = q.join(User, User.user_id == PullRequest.user_id)
322 q = q.join(User, User.user_id == PullRequest.user_id)
323 q = q.filter(or_(
323 q = q.filter(or_(
324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 User.username.ilike(like_expression),
325 User.username.ilike(like_expression),
326 PullRequest.title.ilike(like_expression),
326 PullRequest.title.ilike(like_expression),
327 PullRequest.description.ilike(like_expression),
327 PullRequest.description.ilike(like_expression),
328 ))
328 ))
329
329
330 # source or target
330 # source or target
331 if repo and source:
331 if repo and source:
332 q = q.filter(PullRequest.source_repo == repo)
332 q = q.filter(PullRequest.source_repo == repo)
333 elif repo:
333 elif repo:
334 q = q.filter(PullRequest.target_repo == repo)
334 q = q.filter(PullRequest.target_repo == repo)
335
335
336 # closed,opened
336 # closed,opened
337 if statuses:
337 if statuses:
338 q = q.filter(PullRequest.status.in_(statuses))
338 q = q.filter(PullRequest.status.in_(statuses))
339
339
340 # opened by filter
340 # opened by filter
341 if opened_by:
341 if opened_by:
342 q = q.filter(PullRequest.user_id.in_(opened_by))
342 q = q.filter(PullRequest.user_id.in_(opened_by))
343
343
344 # only get those that are in "created" state
344 # only get those that are in "created" state
345 if only_created:
345 if only_created:
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347
347
348 if order_by:
348 order_map = {
349 order_map = {
349 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
350 'id': PullRequest.pull_request_id,
351 'id': PullRequest.pull_request_id,
351 'title': PullRequest.title,
352 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
353 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
354 'target_repo': PullRequest.target_repo_id
354 }
355 }
355 if order_by and order_by in order_map:
356 if order_dir == 'asc':
356 if order_dir == 'asc':
357 q = q.order_by(order_map[order_by].asc())
357 q = q.order_by(order_map[order_by].asc())
358 else:
358 else:
359 q = q.order_by(order_map[order_by].desc())
359 q = q.order_by(order_map[order_by].desc())
360
360
361 return q
361 return q
362
362
363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 opened_by=None):
364 opened_by=None):
365 """
365 """
366 Count the number of pull requests for a specific repository.
366 Count the number of pull requests for a specific repository.
367
367
368 :param repo_name: target or source repo
368 :param repo_name: target or source repo
369 :param search_q: filter by text
369 :param search_q: filter by text
370 :param source: boolean flag to specify if repo_name refers to source
370 :param source: boolean flag to specify if repo_name refers to source
371 :param statuses: list of pull request statuses
371 :param statuses: list of pull request statuses
372 :param opened_by: author user of the pull request
372 :param opened_by: author user of the pull request
373 :returns: int number of pull requests
373 :returns: int number of pull requests
374 """
374 """
375 q = self._prepare_get_all_query(
375 q = self._prepare_get_all_query(
376 repo_name, search_q=search_q, source=source, statuses=statuses,
376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 opened_by=opened_by)
377 opened_by=opened_by)
378
378
379 return q.count()
379 return q.count()
380
380
381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 """
383 """
384 Get all pull requests for a specific repository.
384 Get all pull requests for a specific repository.
385
385
386 :param repo_name: target or source repo
386 :param repo_name: target or source repo
387 :param search_q: filter by text
387 :param search_q: filter by text
388 :param source: boolean flag to specify if repo_name refers to source
388 :param source: boolean flag to specify if repo_name refers to source
389 :param statuses: list of pull request statuses
389 :param statuses: list of pull request statuses
390 :param opened_by: author user of the pull request
390 :param opened_by: author user of the pull request
391 :param offset: pagination offset
391 :param offset: pagination offset
392 :param length: length of returned list
392 :param length: length of returned list
393 :param order_by: order of the returned list
393 :param order_by: order of the returned list
394 :param order_dir: 'asc' or 'desc' ordering direction
394 :param order_dir: 'asc' or 'desc' ordering direction
395 :returns: list of pull requests
395 :returns: list of pull requests
396 """
396 """
397 q = self._prepare_get_all_query(
397 q = self._prepare_get_all_query(
398 repo_name, search_q=search_q, source=source, statuses=statuses,
398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400
400
401 if length:
401 if length:
402 pull_requests = q.limit(length).offset(offset).all()
402 pull_requests = q.limit(length).offset(offset).all()
403 else:
403 else:
404 pull_requests = q.all()
404 pull_requests = q.all()
405
405
406 return pull_requests
406 return pull_requests
407
407
408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
409 """
409 """
410 Count the number of pull requests for a specific repository that are
410 Count the number of pull requests for a specific repository that are
411 awaiting review.
411 awaiting review.
412
412
413 :param repo_name: target or source repo
413 :param repo_name: target or source repo
414 :param search_q: filter by text
414 :param search_q: filter by text
415 :param statuses: list of pull request statuses
415 :param statuses: list of pull request statuses
416 :returns: int number of pull requests
416 :returns: int number of pull requests
417 """
417 """
418 pull_requests = self.get_awaiting_review(
418 pull_requests = self.get_awaiting_review(
419 repo_name, search_q=search_q, statuses=statuses)
419 repo_name, search_q=search_q, statuses=statuses)
420
420
421 return len(pull_requests)
421 return len(pull_requests)
422
422
423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
424 offset=0, length=None, order_by=None, order_dir='desc'):
424 offset=0, length=None, order_by=None, order_dir='desc'):
425 """
425 """
426 Get all pull requests for a specific repository that are awaiting
426 Get all pull requests for a specific repository that are awaiting
427 review.
427 review.
428
428
429 :param repo_name: target or source repo
429 :param repo_name: target or source repo
430 :param search_q: filter by text
430 :param search_q: filter by text
431 :param statuses: list of pull request statuses
431 :param statuses: list of pull request statuses
432 :param offset: pagination offset
432 :param offset: pagination offset
433 :param length: length of returned list
433 :param length: length of returned list
434 :param order_by: order of the returned list
434 :param order_by: order of the returned list
435 :param order_dir: 'asc' or 'desc' ordering direction
435 :param order_dir: 'asc' or 'desc' ordering direction
436 :returns: list of pull requests
436 :returns: list of pull requests
437 """
437 """
438 pull_requests = self.get_all(
438 pull_requests = self.get_all(
439 repo_name, search_q=search_q, statuses=statuses,
439 repo_name, search_q=search_q, statuses=statuses,
440 order_by=order_by, order_dir=order_dir)
440 order_by=order_by, order_dir=order_dir)
441
441
442 _filtered_pull_requests = []
442 _filtered_pull_requests = []
443 for pr in pull_requests:
443 for pr in pull_requests:
444 status = pr.calculated_review_status()
444 status = pr.calculated_review_status()
445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
446 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 ChangesetStatus.STATUS_UNDER_REVIEW]:
447 _filtered_pull_requests.append(pr)
447 _filtered_pull_requests.append(pr)
448 if length:
448 if length:
449 return _filtered_pull_requests[offset:offset+length]
449 return _filtered_pull_requests[offset:offset+length]
450 else:
450 else:
451 return _filtered_pull_requests
451 return _filtered_pull_requests
452
452
453 def _prepare_awaiting_my_review_review_query(
453 def _prepare_awaiting_my_review_review_query(
454 self, repo_name, user_id, search_q=None, statuses=None,
454 self, repo_name, user_id, search_q=None, statuses=None,
455 order_by=None, order_dir='desc'):
455 order_by=None, order_dir='desc'):
456
456
457 for_review_statuses = [
457 for_review_statuses = [
458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
459 ]
459 ]
460
460
461 pull_request_alias = aliased(PullRequest)
461 pull_request_alias = aliased(PullRequest)
462 status_alias = aliased(ChangesetStatus)
462 status_alias = aliased(ChangesetStatus)
463 reviewers_alias = aliased(PullRequestReviewers)
463 reviewers_alias = aliased(PullRequestReviewers)
464 repo_alias = aliased(Repository)
464 repo_alias = aliased(Repository)
465
465
466 last_ver_subq = Session()\
466 last_ver_subq = Session()\
467 .query(func.min(ChangesetStatus.version)) \
467 .query(func.min(ChangesetStatus.version)) \
468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
470 .subquery()
470 .subquery()
471
471
472 q = Session().query(pull_request_alias) \
472 q = Session().query(pull_request_alias) \
473 .options(lazyload(pull_request_alias.author)) \
473 .options(lazyload(pull_request_alias.author)) \
474 .join(reviewers_alias,
474 .join(reviewers_alias,
475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
476 .join(repo_alias,
476 .join(repo_alias,
477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
478 .outerjoin(status_alias,
478 .outerjoin(status_alias,
479 and_(status_alias.user_id == reviewers_alias.user_id,
479 and_(status_alias.user_id == reviewers_alias.user_id,
480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
481 .filter(or_(status_alias.version == null(),
481 .filter(or_(status_alias.version == null(),
482 status_alias.version == last_ver_subq)) \
482 status_alias.version == last_ver_subq)) \
483 .filter(reviewers_alias.user_id == user_id) \
483 .filter(reviewers_alias.user_id == user_id) \
484 .filter(repo_alias.repo_name == repo_name) \
484 .filter(repo_alias.repo_name == repo_name) \
485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
486 .group_by(pull_request_alias)
486 .group_by(pull_request_alias)
487
487
488 # closed,opened
488 # closed,opened
489 if statuses:
489 if statuses:
490 q = q.filter(pull_request_alias.status.in_(statuses))
490 q = q.filter(pull_request_alias.status.in_(statuses))
491
491
492 if search_q:
492 if search_q:
493 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 like_expression = u'%{}%'.format(safe_unicode(search_q))
494 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 q = q.join(User, User.user_id == pull_request_alias.user_id)
495 q = q.filter(or_(
495 q = q.filter(or_(
496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
497 User.username.ilike(like_expression),
497 User.username.ilike(like_expression),
498 pull_request_alias.title.ilike(like_expression),
498 pull_request_alias.title.ilike(like_expression),
499 pull_request_alias.description.ilike(like_expression),
499 pull_request_alias.description.ilike(like_expression),
500 ))
500 ))
501
501
502 if order_by:
502 order_map = {
503 order_map = {
503 'name_raw': pull_request_alias.pull_request_id,
504 'name_raw': pull_request_alias.pull_request_id,
504 'title': pull_request_alias.title,
505 'title': pull_request_alias.title,
505 'updated_on_raw': pull_request_alias.updated_on,
506 'updated_on_raw': pull_request_alias.updated_on,
506 'target_repo': pull_request_alias.target_repo_id
507 'target_repo': pull_request_alias.target_repo_id
507 }
508 }
508 if order_by and order_by in order_map:
509 if order_dir == 'asc':
509 if order_dir == 'asc':
510 q = q.order_by(order_map[order_by].asc())
510 q = q.order_by(order_map[order_by].asc())
511 else:
511 else:
512 q = q.order_by(order_map[order_by].desc())
512 q = q.order_by(order_map[order_by].desc())
513
513
514 return q
514 return q
515
515
516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
517 """
517 """
518 Count the number of pull requests for a specific repository that are
518 Count the number of pull requests for a specific repository that are
519 awaiting review from a specific user.
519 awaiting review from a specific user.
520
520
521 :param repo_name: target or source repo
521 :param repo_name: target or source repo
522 :param user_id: reviewer user of the pull request
522 :param user_id: reviewer user of the pull request
523 :param search_q: filter by text
523 :param search_q: filter by text
524 :param statuses: list of pull request statuses
524 :param statuses: list of pull request statuses
525 :returns: int number of pull requests
525 :returns: int number of pull requests
526 """
526 """
527 q = self._prepare_awaiting_my_review_review_query(
527 q = self._prepare_awaiting_my_review_review_query(
528 repo_name, user_id, search_q=search_q, statuses=statuses)
528 repo_name, user_id, search_q=search_q, statuses=statuses)
529 return q.count()
529 return q.count()
530
530
531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
532 offset=0, length=None, order_by=None, order_dir='desc'):
532 offset=0, length=None, order_by=None, order_dir='desc'):
533 """
533 """
534 Get all pull requests for a specific repository that are awaiting
534 Get all pull requests for a specific repository that are awaiting
535 review from a specific user.
535 review from a specific user.
536
536
537 :param repo_name: target or source repo
537 :param repo_name: target or source repo
538 :param user_id: reviewer user of the pull request
538 :param user_id: reviewer user of the pull request
539 :param search_q: filter by text
539 :param search_q: filter by text
540 :param statuses: list of pull request statuses
540 :param statuses: list of pull request statuses
541 :param offset: pagination offset
541 :param offset: pagination offset
542 :param length: length of returned list
542 :param length: length of returned list
543 :param order_by: order of the returned list
543 :param order_by: order of the returned list
544 :param order_dir: 'asc' or 'desc' ordering direction
544 :param order_dir: 'asc' or 'desc' ordering direction
545 :returns: list of pull requests
545 :returns: list of pull requests
546 """
546 """
547
547
548 q = self._prepare_awaiting_my_review_review_query(
548 q = self._prepare_awaiting_my_review_review_query(
549 repo_name, user_id, search_q=search_q, statuses=statuses,
549 repo_name, user_id, search_q=search_q, statuses=statuses,
550 order_by=order_by, order_dir=order_dir)
550 order_by=order_by, order_dir=order_dir)
551
551
552 if length:
552 if length:
553 pull_requests = q.limit(length).offset(offset).all()
553 pull_requests = q.limit(length).offset(offset).all()
554 else:
554 else:
555 pull_requests = q.all()
555 pull_requests = q.all()
556
556
557 return pull_requests
557 return pull_requests
558
558
559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
560 order_by=None, order_dir='desc'):
560 order_by=None, order_dir='desc'):
561 """
561 """
562 return a query of pull-requests user is an creator, or he's added as a reviewer
562 return a query of pull-requests user is an creator, or he's added as a reviewer
563 """
563 """
564 q = PullRequest.query()
564 q = PullRequest.query()
565 if user_id:
565 if user_id:
566 reviewers_subquery = Session().query(
566 reviewers_subquery = Session().query(
567 PullRequestReviewers.pull_request_id).filter(
567 PullRequestReviewers.pull_request_id).filter(
568 PullRequestReviewers.user_id == user_id).subquery()
568 PullRequestReviewers.user_id == user_id).subquery()
569 user_filter = or_(
569 user_filter = or_(
570 PullRequest.user_id == user_id,
570 PullRequest.user_id == user_id,
571 PullRequest.pull_request_id.in_(reviewers_subquery)
571 PullRequest.pull_request_id.in_(reviewers_subquery)
572 )
572 )
573 q = PullRequest.query().filter(user_filter)
573 q = PullRequest.query().filter(user_filter)
574
574
575 # closed,opened
575 # closed,opened
576 if statuses:
576 if statuses:
577 q = q.filter(PullRequest.status.in_(statuses))
577 q = q.filter(PullRequest.status.in_(statuses))
578
578
579 if query:
579 if query:
580 like_expression = u'%{}%'.format(safe_unicode(query))
580 like_expression = u'%{}%'.format(safe_unicode(query))
581 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.join(User, User.user_id == PullRequest.user_id)
582 q = q.filter(or_(
582 q = q.filter(or_(
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 User.username.ilike(like_expression),
584 User.username.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
587 ))
587 ))
588 if order_by:
588
589 order_map = {
589 order_map = {
590 'name_raw': PullRequest.pull_request_id,
590 'name_raw': PullRequest.pull_request_id,
591 'title': PullRequest.title,
591 'title': PullRequest.title,
592 'updated_on_raw': PullRequest.updated_on,
592 'updated_on_raw': PullRequest.updated_on,
593 'target_repo': PullRequest.target_repo_id
593 'target_repo': PullRequest.target_repo_id
594 }
594 }
595 if order_by and order_by in order_map:
595 if order_dir == 'asc':
596 if order_dir == 'asc':
596 q = q.order_by(order_map[order_by].asc())
597 q = q.order_by(order_map[order_by].asc())
597 else:
598 else:
598 q = q.order_by(order_map[order_by].desc())
599 q = q.order_by(order_map[order_by].desc())
599
600
600 return q
601 return q
601
602
602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 return q.count()
605 return q.count()
605
606
606 def get_im_participating_in(
607 def get_im_participating_in(
607 self, user_id=None, statuses=None, query='', offset=0,
608 self, user_id=None, statuses=None, query='', offset=0,
608 length=None, order_by=None, order_dir='desc'):
609 length=None, order_by=None, order_dir='desc'):
609 """
610 """
610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 """
612 """
612
613
613 q = self._prepare_im_participating_query(
614 q = self._prepare_im_participating_query(
614 user_id, statuses=statuses, query=query, order_by=order_by,
615 user_id, statuses=statuses, query=query, order_by=order_by,
615 order_dir=order_dir)
616 order_dir=order_dir)
616
617
617 if length:
618 if length:
618 pull_requests = q.limit(length).offset(offset).all()
619 pull_requests = q.limit(length).offset(offset).all()
619 else:
620 else:
620 pull_requests = q.all()
621 pull_requests = q.all()
621
622
622 return pull_requests
623 return pull_requests
623
624
624 def _prepare_participating_in_for_review_query(
625 def _prepare_participating_in_for_review_query(
625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626
627
627 for_review_statuses = [
628 for_review_statuses = [
628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ]
630 ]
630
631
631 pull_request_alias = aliased(PullRequest)
632 pull_request_alias = aliased(PullRequest)
632 status_alias = aliased(ChangesetStatus)
633 status_alias = aliased(ChangesetStatus)
633 reviewers_alias = aliased(PullRequestReviewers)
634 reviewers_alias = aliased(PullRequestReviewers)
634
635
635 last_ver_subq = Session()\
636 last_ver_subq = Session()\
636 .query(func.min(ChangesetStatus.version)) \
637 .query(func.min(ChangesetStatus.version)) \
637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .subquery()
640 .subquery()
640
641
641 q = Session().query(pull_request_alias) \
642 q = Session().query(pull_request_alias) \
642 .options(lazyload(pull_request_alias.author)) \
643 .options(lazyload(pull_request_alias.author)) \
643 .join(reviewers_alias,
644 .join(reviewers_alias,
644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 .outerjoin(status_alias,
646 .outerjoin(status_alias,
646 and_(status_alias.user_id == reviewers_alias.user_id,
647 and_(status_alias.user_id == reviewers_alias.user_id,
647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 .filter(or_(status_alias.version == null(),
649 .filter(or_(status_alias.version == null(),
649 status_alias.version == last_ver_subq)) \
650 status_alias.version == last_ver_subq)) \
650 .filter(reviewers_alias.user_id == user_id) \
651 .filter(reviewers_alias.user_id == user_id) \
651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .group_by(pull_request_alias)
653 .group_by(pull_request_alias)
653
654
654 # closed,opened
655 # closed,opened
655 if statuses:
656 if statuses:
656 q = q.filter(pull_request_alias.status.in_(statuses))
657 q = q.filter(pull_request_alias.status.in_(statuses))
657
658
658 if query:
659 if query:
659 like_expression = u'%{}%'.format(safe_unicode(query))
660 like_expression = u'%{}%'.format(safe_unicode(query))
660 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.filter(or_(
662 q = q.filter(or_(
662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 User.username.ilike(like_expression),
664 User.username.ilike(like_expression),
664 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.description.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
666 ))
667 ))
667
668
668 if order_by:
669 order_map = {
669 order_map = {
670 'name_raw': pull_request_alias.pull_request_id,
670 'name_raw': pull_request_alias.pull_request_id,
671 'title': pull_request_alias.title,
671 'title': pull_request_alias.title,
672 'updated_on_raw': pull_request_alias.updated_on,
672 'updated_on_raw': pull_request_alias.updated_on,
673 'target_repo': pull_request_alias.target_repo_id
673 'target_repo': pull_request_alias.target_repo_id
674 }
674 }
675 if order_by and order_by in order_map:
675 if order_dir == 'asc':
676 if order_dir == 'asc':
676 q = q.order_by(order_map[order_by].asc())
677 q = q.order_by(order_map[order_by].asc())
677 else:
678 else:
678 q = q.order_by(order_map[order_by].desc())
679 q = q.order_by(order_map[order_by].desc())
679
680
680 return q
681 return q
681
682
682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 return q.count()
685 return q.count()
685
686
686 def get_im_participating_in_for_review(
687 def get_im_participating_in_for_review(
687 self, user_id, statuses=None, query='', offset=0,
688 self, user_id, statuses=None, query='', offset=0,
688 length=None, order_by=None, order_dir='desc'):
689 length=None, order_by=None, order_dir='desc'):
689 """
690 """
690 Get all Pull requests that needs user approval or rejection
691 Get all Pull requests that needs user approval or rejection
691 """
692 """
692
693
693 q = self._prepare_participating_in_for_review_query(
694 q = self._prepare_participating_in_for_review_query(
694 user_id, statuses=statuses, query=query, order_by=order_by,
695 user_id, statuses=statuses, query=query, order_by=order_by,
695 order_dir=order_dir)
696 order_dir=order_dir)
696
697
697 if length:
698 if length:
698 pull_requests = q.limit(length).offset(offset).all()
699 pull_requests = q.limit(length).offset(offset).all()
699 else:
700 else:
700 pull_requests = q.all()
701 pull_requests = q.all()
701
702
702 return pull_requests
703 return pull_requests
703
704
704 def get_versions(self, pull_request):
705 def get_versions(self, pull_request):
705 """
706 """
706 returns version of pull request sorted by ID descending
707 returns version of pull request sorted by ID descending
707 """
708 """
708 return PullRequestVersion.query()\
709 return PullRequestVersion.query()\
709 .filter(PullRequestVersion.pull_request == pull_request)\
710 .filter(PullRequestVersion.pull_request == pull_request)\
710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .all()
712 .all()
712
713
713 def get_pr_version(self, pull_request_id, version=None):
714 def get_pr_version(self, pull_request_id, version=None):
714 at_version = None
715 at_version = None
715
716
716 if version and version == 'latest':
717 if version and version == 'latest':
717 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_obj = pull_request_ver
719 pull_request_obj = pull_request_ver
719 _org_pull_request_obj = pull_request_obj
720 _org_pull_request_obj = pull_request_obj
720 at_version = 'latest'
721 at_version = 'latest'
721 elif version:
722 elif version:
722 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_obj = pull_request_ver
724 pull_request_obj = pull_request_ver
724 _org_pull_request_obj = pull_request_ver.pull_request
725 _org_pull_request_obj = pull_request_ver.pull_request
725 at_version = pull_request_ver.pull_request_version_id
726 at_version = pull_request_ver.pull_request_version_id
726 else:
727 else:
727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 pull_request_id)
729 pull_request_id)
729
730
730 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_obj, _org_pull_request_obj)
732 pull_request_obj, _org_pull_request_obj)
732
733
733 return _org_pull_request_obj, pull_request_obj, \
734 return _org_pull_request_obj, pull_request_obj, \
734 pull_request_display_obj, at_version
735 pull_request_display_obj, at_version
735
736
736 def pr_commits_versions(self, versions):
737 def pr_commits_versions(self, versions):
737 """
738 """
738 Maps the pull-request commits into all known PR versions. This way we can obtain
739 Maps the pull-request commits into all known PR versions. This way we can obtain
739 each pr version the commit was introduced in.
740 each pr version the commit was introduced in.
740 """
741 """
741 commit_versions = collections.defaultdict(list)
742 commit_versions = collections.defaultdict(list)
742 num_versions = [x.pull_request_version_id for x in versions]
743 num_versions = [x.pull_request_version_id for x in versions]
743 for ver in versions:
744 for ver in versions:
744 for commit_id in ver.revisions:
745 for commit_id in ver.revisions:
745 ver_idx = ChangesetComment.get_index_from_version(
746 ver_idx = ChangesetComment.get_index_from_version(
746 ver.pull_request_version_id, num_versions=num_versions)
747 ver.pull_request_version_id, num_versions=num_versions)
747 commit_versions[commit_id].append(ver_idx)
748 commit_versions[commit_id].append(ver_idx)
748 return commit_versions
749 return commit_versions
749
750
750 def create(self, created_by, source_repo, source_ref, target_repo,
751 def create(self, created_by, source_repo, source_ref, target_repo,
751 target_ref, revisions, reviewers, observers, title, description=None,
752 target_ref, revisions, reviewers, observers, title, description=None,
752 common_ancestor_id=None,
753 common_ancestor_id=None,
753 description_renderer=None,
754 description_renderer=None,
754 reviewer_data=None, translator=None, auth_user=None):
755 reviewer_data=None, translator=None, auth_user=None):
755 translator = translator or get_current_request().translate
756 translator = translator or get_current_request().translate
756
757
757 created_by_user = self._get_user(created_by)
758 created_by_user = self._get_user(created_by)
758 auth_user = auth_user or created_by_user.AuthUser()
759 auth_user = auth_user or created_by_user.AuthUser()
759 source_repo = self._get_repo(source_repo)
760 source_repo = self._get_repo(source_repo)
760 target_repo = self._get_repo(target_repo)
761 target_repo = self._get_repo(target_repo)
761
762
762 pull_request = PullRequest()
763 pull_request = PullRequest()
763 pull_request.source_repo = source_repo
764 pull_request.source_repo = source_repo
764 pull_request.source_ref = source_ref
765 pull_request.source_ref = source_ref
765 pull_request.target_repo = target_repo
766 pull_request.target_repo = target_repo
766 pull_request.target_ref = target_ref
767 pull_request.target_ref = target_ref
767 pull_request.revisions = revisions
768 pull_request.revisions = revisions
768 pull_request.title = title
769 pull_request.title = title
769 pull_request.description = description
770 pull_request.description = description
770 pull_request.description_renderer = description_renderer
771 pull_request.description_renderer = description_renderer
771 pull_request.author = created_by_user
772 pull_request.author = created_by_user
772 pull_request.reviewer_data = reviewer_data
773 pull_request.reviewer_data = reviewer_data
773 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.common_ancestor_id = common_ancestor_id
775 pull_request.common_ancestor_id = common_ancestor_id
775
776
776 Session().add(pull_request)
777 Session().add(pull_request)
777 Session().flush()
778 Session().flush()
778
779
779 reviewer_ids = set()
780 reviewer_ids = set()
780 # members / reviewers
781 # members / reviewers
781 for reviewer_object in reviewers:
782 for reviewer_object in reviewers:
782 user_id, reasons, mandatory, role, rules = reviewer_object
783 user_id, reasons, mandatory, role, rules = reviewer_object
783 user = self._get_user(user_id)
784 user = self._get_user(user_id)
784
785
785 # skip duplicates
786 # skip duplicates
786 if user.user_id in reviewer_ids:
787 if user.user_id in reviewer_ids:
787 continue
788 continue
788
789
789 reviewer_ids.add(user.user_id)
790 reviewer_ids.add(user.user_id)
790
791
791 reviewer = PullRequestReviewers()
792 reviewer = PullRequestReviewers()
792 reviewer.user = user
793 reviewer.user = user
793 reviewer.pull_request = pull_request
794 reviewer.pull_request = pull_request
794 reviewer.reasons = reasons
795 reviewer.reasons = reasons
795 reviewer.mandatory = mandatory
796 reviewer.mandatory = mandatory
796 reviewer.role = role
797 reviewer.role = role
797
798
798 # NOTE(marcink): pick only first rule for now
799 # NOTE(marcink): pick only first rule for now
799 rule_id = list(rules)[0] if rules else None
800 rule_id = list(rules)[0] if rules else None
800 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 if rule:
802 if rule:
802 review_group = rule.user_group_vote_rule(user_id)
803 review_group = rule.user_group_vote_rule(user_id)
803 # we check if this particular reviewer is member of a voting group
804 # we check if this particular reviewer is member of a voting group
804 if review_group:
805 if review_group:
805 # NOTE(marcink):
806 # NOTE(marcink):
806 # can be that user is member of more but we pick the first same,
807 # can be that user is member of more but we pick the first same,
807 # same as default reviewers algo
808 # same as default reviewers algo
808 review_group = review_group[0]
809 review_group = review_group[0]
809
810
810 rule_data = {
811 rule_data = {
811 'rule_name':
812 'rule_name':
812 rule.review_rule_name,
813 rule.review_rule_name,
813 'rule_user_group_entry_id':
814 'rule_user_group_entry_id':
814 review_group.repo_review_rule_users_group_id,
815 review_group.repo_review_rule_users_group_id,
815 'rule_user_group_name':
816 'rule_user_group_name':
816 review_group.users_group.users_group_name,
817 review_group.users_group.users_group_name,
817 'rule_user_group_members':
818 'rule_user_group_members':
818 [x.user.username for x in review_group.users_group.members],
819 [x.user.username for x in review_group.users_group.members],
819 'rule_user_group_members_id':
820 'rule_user_group_members_id':
820 [x.user.user_id for x in review_group.users_group.members],
821 [x.user.user_id for x in review_group.users_group.members],
821 }
822 }
822 # e.g {'vote_rule': -1, 'mandatory': True}
823 # e.g {'vote_rule': -1, 'mandatory': True}
823 rule_data.update(review_group.rule_data())
824 rule_data.update(review_group.rule_data())
824
825
825 reviewer.rule_data = rule_data
826 reviewer.rule_data = rule_data
826
827
827 Session().add(reviewer)
828 Session().add(reviewer)
828 Session().flush()
829 Session().flush()
829
830
830 for observer_object in observers:
831 for observer_object in observers:
831 user_id, reasons, mandatory, role, rules = observer_object
832 user_id, reasons, mandatory, role, rules = observer_object
832 user = self._get_user(user_id)
833 user = self._get_user(user_id)
833
834
834 # skip duplicates from reviewers
835 # skip duplicates from reviewers
835 if user.user_id in reviewer_ids:
836 if user.user_id in reviewer_ids:
836 continue
837 continue
837
838
838 #reviewer_ids.add(user.user_id)
839 #reviewer_ids.add(user.user_id)
839
840
840 observer = PullRequestReviewers()
841 observer = PullRequestReviewers()
841 observer.user = user
842 observer.user = user
842 observer.pull_request = pull_request
843 observer.pull_request = pull_request
843 observer.reasons = reasons
844 observer.reasons = reasons
844 observer.mandatory = mandatory
845 observer.mandatory = mandatory
845 observer.role = role
846 observer.role = role
846
847
847 # NOTE(marcink): pick only first rule for now
848 # NOTE(marcink): pick only first rule for now
848 rule_id = list(rules)[0] if rules else None
849 rule_id = list(rules)[0] if rules else None
849 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 if rule:
851 if rule:
851 # TODO(marcink): do we need this for observers ??
852 # TODO(marcink): do we need this for observers ??
852 pass
853 pass
853
854
854 Session().add(observer)
855 Session().add(observer)
855 Session().flush()
856 Session().flush()
856
857
857 # Set approval status to "Under Review" for all commits which are
858 # Set approval status to "Under Review" for all commits which are
858 # part of this pull request.
859 # part of this pull request.
859 ChangesetStatusModel().set_status(
860 ChangesetStatusModel().set_status(
860 repo=target_repo,
861 repo=target_repo,
861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 user=created_by_user,
863 user=created_by_user,
863 pull_request=pull_request
864 pull_request=pull_request
864 )
865 )
865 # we commit early at this point. This has to do with a fact
866 # we commit early at this point. This has to do with a fact
866 # that before queries do some row-locking. And because of that
867 # that before queries do some row-locking. And because of that
867 # we need to commit and finish transaction before below validate call
868 # we need to commit and finish transaction before below validate call
868 # that for large repos could be long resulting in long row locks
869 # that for large repos could be long resulting in long row locks
869 Session().commit()
870 Session().commit()
870
871
871 # prepare workspace, and run initial merge simulation. Set state during that
872 # prepare workspace, and run initial merge simulation. Set state during that
872 # operation
873 # operation
873 pull_request = PullRequest.get(pull_request.pull_request_id)
874 pull_request = PullRequest.get(pull_request.pull_request_id)
874
875
875 # set as merging, for merge simulation, and if finished to created so we mark
876 # set as merging, for merge simulation, and if finished to created so we mark
876 # simulation is working fine
877 # simulation is working fine
877 with pull_request.set_state(PullRequest.STATE_MERGING,
878 with pull_request.set_state(PullRequest.STATE_MERGING,
878 final_state=PullRequest.STATE_CREATED) as state_obj:
879 final_state=PullRequest.STATE_CREATED) as state_obj:
879 MergeCheck.validate(
880 MergeCheck.validate(
880 pull_request, auth_user=auth_user, translator=translator)
881 pull_request, auth_user=auth_user, translator=translator)
881
882
882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884
885
885 creation_data = pull_request.get_api_data(with_merge_state=False)
886 creation_data = pull_request.get_api_data(with_merge_state=False)
886 self._log_audit_action(
887 self._log_audit_action(
887 'repo.pull_request.create', {'data': creation_data},
888 'repo.pull_request.create', {'data': creation_data},
888 auth_user, pull_request)
889 auth_user, pull_request)
889
890
890 return pull_request
891 return pull_request
891
892
892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 pull_request = self.__get_pull_request(pull_request)
894 pull_request = self.__get_pull_request(pull_request)
894 target_scm = pull_request.target_repo.scm_instance()
895 target_scm = pull_request.target_repo.scm_instance()
895 if action == 'create':
896 if action == 'create':
896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 elif action == 'merge':
898 elif action == 'merge':
898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 elif action == 'close':
900 elif action == 'close':
900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 elif action == 'review_status_change':
902 elif action == 'review_status_change':
902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 elif action == 'update':
904 elif action == 'update':
904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 elif action == 'comment':
906 elif action == 'comment':
906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 elif action == 'comment_edit':
908 elif action == 'comment_edit':
908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 else:
910 else:
910 return
911 return
911
912
912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 pull_request, action, trigger_hook)
914 pull_request, action, trigger_hook)
914 trigger_hook(
915 trigger_hook(
915 username=user.username,
916 username=user.username,
916 repo_name=pull_request.target_repo.repo_name,
917 repo_name=pull_request.target_repo.repo_name,
917 repo_type=target_scm.alias,
918 repo_type=target_scm.alias,
918 pull_request=pull_request,
919 pull_request=pull_request,
919 data=data)
920 data=data)
920
921
921 def _get_commit_ids(self, pull_request):
922 def _get_commit_ids(self, pull_request):
922 """
923 """
923 Return the commit ids of the merged pull request.
924 Return the commit ids of the merged pull request.
924
925
925 This method is not dealing correctly yet with the lack of autoupdates
926 This method is not dealing correctly yet with the lack of autoupdates
926 nor with the implicit target updates.
927 nor with the implicit target updates.
927 For example: if a commit in the source repo is already in the target it
928 For example: if a commit in the source repo is already in the target it
928 will be reported anyways.
929 will be reported anyways.
929 """
930 """
930 merge_rev = pull_request.merge_rev
931 merge_rev = pull_request.merge_rev
931 if merge_rev is None:
932 if merge_rev is None:
932 raise ValueError('This pull request was not merged yet')
933 raise ValueError('This pull request was not merged yet')
933
934
934 commit_ids = list(pull_request.revisions)
935 commit_ids = list(pull_request.revisions)
935 if merge_rev not in commit_ids:
936 if merge_rev not in commit_ids:
936 commit_ids.append(merge_rev)
937 commit_ids.append(merge_rev)
937
938
938 return commit_ids
939 return commit_ids
939
940
940 def merge_repo(self, pull_request, user, extras):
941 def merge_repo(self, pull_request, user, extras):
941 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 extras['user_agent'] = 'internal-merge'
943 extras['user_agent'] = 'internal-merge'
943 merge_state = self._merge_pull_request(pull_request, user, extras)
944 merge_state = self._merge_pull_request(pull_request, user, extras)
944 if merge_state.executed:
945 if merge_state.executed:
945 log.debug("Merge was successful, updating the pull request comments.")
946 log.debug("Merge was successful, updating the pull request comments.")
946 self._comment_and_close_pr(pull_request, user, merge_state)
947 self._comment_and_close_pr(pull_request, user, merge_state)
947
948
948 self._log_audit_action(
949 self._log_audit_action(
949 'repo.pull_request.merge',
950 'repo.pull_request.merge',
950 {'merge_state': merge_state.__dict__},
951 {'merge_state': merge_state.__dict__},
951 user, pull_request)
952 user, pull_request)
952
953
953 else:
954 else:
954 log.warn("Merge failed, not updating the pull request.")
955 log.warn("Merge failed, not updating the pull request.")
955 return merge_state
956 return merge_state
956
957
957 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 target_vcs = pull_request.target_repo.scm_instance()
959 target_vcs = pull_request.target_repo.scm_instance()
959 source_vcs = pull_request.source_repo.scm_instance()
960 source_vcs = pull_request.source_repo.scm_instance()
960
961
961 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 pr_id=pull_request.pull_request_id,
963 pr_id=pull_request.pull_request_id,
963 pr_title=pull_request.title,
964 pr_title=pull_request.title,
964 source_repo=source_vcs.name,
965 source_repo=source_vcs.name,
965 source_ref_name=pull_request.source_ref_parts.name,
966 source_ref_name=pull_request.source_ref_parts.name,
966 target_repo=target_vcs.name,
967 target_repo=target_vcs.name,
967 target_ref_name=pull_request.target_ref_parts.name,
968 target_ref_name=pull_request.target_ref_parts.name,
968 )
969 )
969
970
970 workspace_id = self._workspace_id(pull_request)
971 workspace_id = self._workspace_id(pull_request)
971 repo_id = pull_request.target_repo.repo_id
972 repo_id = pull_request.target_repo.repo_id
972 use_rebase = self._use_rebase_for_merging(pull_request)
973 use_rebase = self._use_rebase_for_merging(pull_request)
973 close_branch = self._close_branch_before_merging(pull_request)
974 close_branch = self._close_branch_before_merging(pull_request)
974 user_name = self._user_name_for_merging(pull_request, user)
975 user_name = self._user_name_for_merging(pull_request, user)
975
976
976 target_ref = self._refresh_reference(
977 target_ref = self._refresh_reference(
977 pull_request.target_ref_parts, target_vcs)
978 pull_request.target_ref_parts, target_vcs)
978
979
979 callback_daemon, extras = prepare_callback_daemon(
980 callback_daemon, extras = prepare_callback_daemon(
980 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
981 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
981 host=vcs_settings.HOOKS_HOST,
982 host=vcs_settings.HOOKS_HOST,
982 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
983 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
983
984
984 with callback_daemon:
985 with callback_daemon:
985 # TODO: johbo: Implement a clean way to run a config_override
986 # TODO: johbo: Implement a clean way to run a config_override
986 # for a single call.
987 # for a single call.
987 target_vcs.config.set(
988 target_vcs.config.set(
988 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
989 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
989
990
990 merge_state = target_vcs.merge(
991 merge_state = target_vcs.merge(
991 repo_id, workspace_id, target_ref, source_vcs,
992 repo_id, workspace_id, target_ref, source_vcs,
992 pull_request.source_ref_parts,
993 pull_request.source_ref_parts,
993 user_name=user_name, user_email=user.email,
994 user_name=user_name, user_email=user.email,
994 message=message, use_rebase=use_rebase,
995 message=message, use_rebase=use_rebase,
995 close_branch=close_branch)
996 close_branch=close_branch)
996 return merge_state
997 return merge_state
997
998
998 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
999 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
999 pull_request.merge_rev = merge_state.merge_ref.commit_id
1000 pull_request.merge_rev = merge_state.merge_ref.commit_id
1000 pull_request.updated_on = datetime.datetime.now()
1001 pull_request.updated_on = datetime.datetime.now()
1001 close_msg = close_msg or 'Pull request merged and closed'
1002 close_msg = close_msg or 'Pull request merged and closed'
1002
1003
1003 CommentsModel().create(
1004 CommentsModel().create(
1004 text=safe_unicode(close_msg),
1005 text=safe_unicode(close_msg),
1005 repo=pull_request.target_repo.repo_id,
1006 repo=pull_request.target_repo.repo_id,
1006 user=user.user_id,
1007 user=user.user_id,
1007 pull_request=pull_request.pull_request_id,
1008 pull_request=pull_request.pull_request_id,
1008 f_path=None,
1009 f_path=None,
1009 line_no=None,
1010 line_no=None,
1010 closing_pr=True
1011 closing_pr=True
1011 )
1012 )
1012
1013
1013 Session().add(pull_request)
1014 Session().add(pull_request)
1014 Session().flush()
1015 Session().flush()
1015 # TODO: paris: replace invalidation with less radical solution
1016 # TODO: paris: replace invalidation with less radical solution
1016 ScmModel().mark_for_invalidation(
1017 ScmModel().mark_for_invalidation(
1017 pull_request.target_repo.repo_name)
1018 pull_request.target_repo.repo_name)
1018 self.trigger_pull_request_hook(pull_request, user, 'merge')
1019 self.trigger_pull_request_hook(pull_request, user, 'merge')
1019
1020
1020 def has_valid_update_type(self, pull_request):
1021 def has_valid_update_type(self, pull_request):
1021 source_ref_type = pull_request.source_ref_parts.type
1022 source_ref_type = pull_request.source_ref_parts.type
1022 return source_ref_type in self.REF_TYPES
1023 return source_ref_type in self.REF_TYPES
1023
1024
1024 def get_flow_commits(self, pull_request):
1025 def get_flow_commits(self, pull_request):
1025
1026
1026 # source repo
1027 # source repo
1027 source_ref_name = pull_request.source_ref_parts.name
1028 source_ref_name = pull_request.source_ref_parts.name
1028 source_ref_type = pull_request.source_ref_parts.type
1029 source_ref_type = pull_request.source_ref_parts.type
1029 source_ref_id = pull_request.source_ref_parts.commit_id
1030 source_ref_id = pull_request.source_ref_parts.commit_id
1030 source_repo = pull_request.source_repo.scm_instance()
1031 source_repo = pull_request.source_repo.scm_instance()
1031
1032
1032 try:
1033 try:
1033 if source_ref_type in self.REF_TYPES:
1034 if source_ref_type in self.REF_TYPES:
1034 source_commit = source_repo.get_commit(
1035 source_commit = source_repo.get_commit(
1035 source_ref_name, reference_obj=pull_request.source_ref_parts)
1036 source_ref_name, reference_obj=pull_request.source_ref_parts)
1036 else:
1037 else:
1037 source_commit = source_repo.get_commit(source_ref_id)
1038 source_commit = source_repo.get_commit(source_ref_id)
1038 except CommitDoesNotExistError:
1039 except CommitDoesNotExistError:
1039 raise SourceRefMissing()
1040 raise SourceRefMissing()
1040
1041
1041 # target repo
1042 # target repo
1042 target_ref_name = pull_request.target_ref_parts.name
1043 target_ref_name = pull_request.target_ref_parts.name
1043 target_ref_type = pull_request.target_ref_parts.type
1044 target_ref_type = pull_request.target_ref_parts.type
1044 target_ref_id = pull_request.target_ref_parts.commit_id
1045 target_ref_id = pull_request.target_ref_parts.commit_id
1045 target_repo = pull_request.target_repo.scm_instance()
1046 target_repo = pull_request.target_repo.scm_instance()
1046
1047
1047 try:
1048 try:
1048 if target_ref_type in self.REF_TYPES:
1049 if target_ref_type in self.REF_TYPES:
1049 target_commit = target_repo.get_commit(
1050 target_commit = target_repo.get_commit(
1050 target_ref_name, reference_obj=pull_request.target_ref_parts)
1051 target_ref_name, reference_obj=pull_request.target_ref_parts)
1051 else:
1052 else:
1052 target_commit = target_repo.get_commit(target_ref_id)
1053 target_commit = target_repo.get_commit(target_ref_id)
1053 except CommitDoesNotExistError:
1054 except CommitDoesNotExistError:
1054 raise TargetRefMissing()
1055 raise TargetRefMissing()
1055
1056
1056 return source_commit, target_commit
1057 return source_commit, target_commit
1057
1058
1058 def update_commits(self, pull_request, updating_user):
1059 def update_commits(self, pull_request, updating_user):
1059 """
1060 """
1060 Get the updated list of commits for the pull request
1061 Get the updated list of commits for the pull request
1061 and return the new pull request version and the list
1062 and return the new pull request version and the list
1062 of commits processed by this update action
1063 of commits processed by this update action
1063
1064
1064 updating_user is the user_object who triggered the update
1065 updating_user is the user_object who triggered the update
1065 """
1066 """
1066 pull_request = self.__get_pull_request(pull_request)
1067 pull_request = self.__get_pull_request(pull_request)
1067 source_ref_type = pull_request.source_ref_parts.type
1068 source_ref_type = pull_request.source_ref_parts.type
1068 source_ref_name = pull_request.source_ref_parts.name
1069 source_ref_name = pull_request.source_ref_parts.name
1069 source_ref_id = pull_request.source_ref_parts.commit_id
1070 source_ref_id = pull_request.source_ref_parts.commit_id
1070
1071
1071 target_ref_type = pull_request.target_ref_parts.type
1072 target_ref_type = pull_request.target_ref_parts.type
1072 target_ref_name = pull_request.target_ref_parts.name
1073 target_ref_name = pull_request.target_ref_parts.name
1073 target_ref_id = pull_request.target_ref_parts.commit_id
1074 target_ref_id = pull_request.target_ref_parts.commit_id
1074
1075
1075 if not self.has_valid_update_type(pull_request):
1076 if not self.has_valid_update_type(pull_request):
1076 log.debug("Skipping update of pull request %s due to ref type: %s",
1077 log.debug("Skipping update of pull request %s due to ref type: %s",
1077 pull_request, source_ref_type)
1078 pull_request, source_ref_type)
1078 return UpdateResponse(
1079 return UpdateResponse(
1079 executed=False,
1080 executed=False,
1080 reason=UpdateFailureReason.WRONG_REF_TYPE,
1081 reason=UpdateFailureReason.WRONG_REF_TYPE,
1081 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1082 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1082 source_changed=False, target_changed=False)
1083 source_changed=False, target_changed=False)
1083
1084
1084 try:
1085 try:
1085 source_commit, target_commit = self.get_flow_commits(pull_request)
1086 source_commit, target_commit = self.get_flow_commits(pull_request)
1086 except SourceRefMissing:
1087 except SourceRefMissing:
1087 return UpdateResponse(
1088 return UpdateResponse(
1088 executed=False,
1089 executed=False,
1089 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1090 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1090 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1091 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1091 source_changed=False, target_changed=False)
1092 source_changed=False, target_changed=False)
1092 except TargetRefMissing:
1093 except TargetRefMissing:
1093 return UpdateResponse(
1094 return UpdateResponse(
1094 executed=False,
1095 executed=False,
1095 reason=UpdateFailureReason.MISSING_TARGET_REF,
1096 reason=UpdateFailureReason.MISSING_TARGET_REF,
1096 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1097 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1097 source_changed=False, target_changed=False)
1098 source_changed=False, target_changed=False)
1098
1099
1099 source_changed = source_ref_id != source_commit.raw_id
1100 source_changed = source_ref_id != source_commit.raw_id
1100 target_changed = target_ref_id != target_commit.raw_id
1101 target_changed = target_ref_id != target_commit.raw_id
1101
1102
1102 if not (source_changed or target_changed):
1103 if not (source_changed or target_changed):
1103 log.debug("Nothing changed in pull request %s", pull_request)
1104 log.debug("Nothing changed in pull request %s", pull_request)
1104 return UpdateResponse(
1105 return UpdateResponse(
1105 executed=False,
1106 executed=False,
1106 reason=UpdateFailureReason.NO_CHANGE,
1107 reason=UpdateFailureReason.NO_CHANGE,
1107 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1108 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1108 source_changed=target_changed, target_changed=source_changed)
1109 source_changed=target_changed, target_changed=source_changed)
1109
1110
1110 change_in_found = 'target repo' if target_changed else 'source repo'
1111 change_in_found = 'target repo' if target_changed else 'source repo'
1111 log.debug('Updating pull request because of change in %s detected',
1112 log.debug('Updating pull request because of change in %s detected',
1112 change_in_found)
1113 change_in_found)
1113
1114
1114 # Finally there is a need for an update, in case of source change
1115 # Finally there is a need for an update, in case of source change
1115 # we create a new version, else just an update
1116 # we create a new version, else just an update
1116 if source_changed:
1117 if source_changed:
1117 pull_request_version = self._create_version_from_snapshot(pull_request)
1118 pull_request_version = self._create_version_from_snapshot(pull_request)
1118 self._link_comments_to_version(pull_request_version)
1119 self._link_comments_to_version(pull_request_version)
1119 else:
1120 else:
1120 try:
1121 try:
1121 ver = pull_request.versions[-1]
1122 ver = pull_request.versions[-1]
1122 except IndexError:
1123 except IndexError:
1123 ver = None
1124 ver = None
1124
1125
1125 pull_request.pull_request_version_id = \
1126 pull_request.pull_request_version_id = \
1126 ver.pull_request_version_id if ver else None
1127 ver.pull_request_version_id if ver else None
1127 pull_request_version = pull_request
1128 pull_request_version = pull_request
1128
1129
1129 source_repo = pull_request.source_repo.scm_instance()
1130 source_repo = pull_request.source_repo.scm_instance()
1130 target_repo = pull_request.target_repo.scm_instance()
1131 target_repo = pull_request.target_repo.scm_instance()
1131
1132
1132 # re-compute commit ids
1133 # re-compute commit ids
1133 old_commit_ids = pull_request.revisions
1134 old_commit_ids = pull_request.revisions
1134 pre_load = ["author", "date", "message", "branch"]
1135 pre_load = ["author", "date", "message", "branch"]
1135 commit_ranges = target_repo.compare(
1136 commit_ranges = target_repo.compare(
1136 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1137 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1137 pre_load=pre_load)
1138 pre_load=pre_load)
1138
1139
1139 target_ref = target_commit.raw_id
1140 target_ref = target_commit.raw_id
1140 source_ref = source_commit.raw_id
1141 source_ref = source_commit.raw_id
1141 ancestor_commit_id = target_repo.get_common_ancestor(
1142 ancestor_commit_id = target_repo.get_common_ancestor(
1142 target_ref, source_ref, source_repo)
1143 target_ref, source_ref, source_repo)
1143
1144
1144 if not ancestor_commit_id:
1145 if not ancestor_commit_id:
1145 raise ValueError(
1146 raise ValueError(
1146 'cannot calculate diff info without a common ancestor. '
1147 'cannot calculate diff info without a common ancestor. '
1147 'Make sure both repositories are related, and have a common forking commit.')
1148 'Make sure both repositories are related, and have a common forking commit.')
1148
1149
1149 pull_request.common_ancestor_id = ancestor_commit_id
1150 pull_request.common_ancestor_id = ancestor_commit_id
1150
1151
1151 pull_request.source_ref = '%s:%s:%s' % (
1152 pull_request.source_ref = '%s:%s:%s' % (
1152 source_ref_type, source_ref_name, source_commit.raw_id)
1153 source_ref_type, source_ref_name, source_commit.raw_id)
1153 pull_request.target_ref = '%s:%s:%s' % (
1154 pull_request.target_ref = '%s:%s:%s' % (
1154 target_ref_type, target_ref_name, ancestor_commit_id)
1155 target_ref_type, target_ref_name, ancestor_commit_id)
1155
1156
1156 pull_request.revisions = [
1157 pull_request.revisions = [
1157 commit.raw_id for commit in reversed(commit_ranges)]
1158 commit.raw_id for commit in reversed(commit_ranges)]
1158 pull_request.updated_on = datetime.datetime.now()
1159 pull_request.updated_on = datetime.datetime.now()
1159 Session().add(pull_request)
1160 Session().add(pull_request)
1160 new_commit_ids = pull_request.revisions
1161 new_commit_ids = pull_request.revisions
1161
1162
1162 old_diff_data, new_diff_data = self._generate_update_diffs(
1163 old_diff_data, new_diff_data = self._generate_update_diffs(
1163 pull_request, pull_request_version)
1164 pull_request, pull_request_version)
1164
1165
1165 # calculate commit and file changes
1166 # calculate commit and file changes
1166 commit_changes = self._calculate_commit_id_changes(
1167 commit_changes = self._calculate_commit_id_changes(
1167 old_commit_ids, new_commit_ids)
1168 old_commit_ids, new_commit_ids)
1168 file_changes = self._calculate_file_changes(
1169 file_changes = self._calculate_file_changes(
1169 old_diff_data, new_diff_data)
1170 old_diff_data, new_diff_data)
1170
1171
1171 # set comments as outdated if DIFFS changed
1172 # set comments as outdated if DIFFS changed
1172 CommentsModel().outdate_comments(
1173 CommentsModel().outdate_comments(
1173 pull_request, old_diff_data=old_diff_data,
1174 pull_request, old_diff_data=old_diff_data,
1174 new_diff_data=new_diff_data)
1175 new_diff_data=new_diff_data)
1175
1176
1176 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1177 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1177 file_node_changes = (
1178 file_node_changes = (
1178 file_changes.added or file_changes.modified or file_changes.removed)
1179 file_changes.added or file_changes.modified or file_changes.removed)
1179 pr_has_changes = valid_commit_changes or file_node_changes
1180 pr_has_changes = valid_commit_changes or file_node_changes
1180
1181
1181 # Add an automatic comment to the pull request, in case
1182 # Add an automatic comment to the pull request, in case
1182 # anything has changed
1183 # anything has changed
1183 if pr_has_changes:
1184 if pr_has_changes:
1184 update_comment = CommentsModel().create(
1185 update_comment = CommentsModel().create(
1185 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1186 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1186 repo=pull_request.target_repo,
1187 repo=pull_request.target_repo,
1187 user=pull_request.author,
1188 user=pull_request.author,
1188 pull_request=pull_request,
1189 pull_request=pull_request,
1189 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1190 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1190
1191
1191 # Update status to "Under Review" for added commits
1192 # Update status to "Under Review" for added commits
1192 for commit_id in commit_changes.added:
1193 for commit_id in commit_changes.added:
1193 ChangesetStatusModel().set_status(
1194 ChangesetStatusModel().set_status(
1194 repo=pull_request.source_repo,
1195 repo=pull_request.source_repo,
1195 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1196 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1196 comment=update_comment,
1197 comment=update_comment,
1197 user=pull_request.author,
1198 user=pull_request.author,
1198 pull_request=pull_request,
1199 pull_request=pull_request,
1199 revision=commit_id)
1200 revision=commit_id)
1200
1201
1201 # initial commit
1202 # initial commit
1202 Session().commit()
1203 Session().commit()
1203
1204
1204 if pr_has_changes:
1205 if pr_has_changes:
1205 # send update email to users
1206 # send update email to users
1206 try:
1207 try:
1207 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1208 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1208 ancestor_commit_id=ancestor_commit_id,
1209 ancestor_commit_id=ancestor_commit_id,
1209 commit_changes=commit_changes,
1210 commit_changes=commit_changes,
1210 file_changes=file_changes)
1211 file_changes=file_changes)
1211 Session().commit()
1212 Session().commit()
1212 except Exception:
1213 except Exception:
1213 log.exception('Failed to send email notification to users')
1214 log.exception('Failed to send email notification to users')
1214 Session().rollback()
1215 Session().rollback()
1215
1216
1216 log.debug(
1217 log.debug(
1217 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1218 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1218 'removed_ids: %s', pull_request.pull_request_id,
1219 'removed_ids: %s', pull_request.pull_request_id,
1219 commit_changes.added, commit_changes.common, commit_changes.removed)
1220 commit_changes.added, commit_changes.common, commit_changes.removed)
1220 log.debug(
1221 log.debug(
1221 'Updated pull request with the following file changes: %s',
1222 'Updated pull request with the following file changes: %s',
1222 file_changes)
1223 file_changes)
1223
1224
1224 log.info(
1225 log.info(
1225 "Updated pull request %s from commit %s to commit %s, "
1226 "Updated pull request %s from commit %s to commit %s, "
1226 "stored new version %s of this pull request.",
1227 "stored new version %s of this pull request.",
1227 pull_request.pull_request_id, source_ref_id,
1228 pull_request.pull_request_id, source_ref_id,
1228 pull_request.source_ref_parts.commit_id,
1229 pull_request.source_ref_parts.commit_id,
1229 pull_request_version.pull_request_version_id)
1230 pull_request_version.pull_request_version_id)
1230
1231
1231 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1232 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1232
1233
1233 return UpdateResponse(
1234 return UpdateResponse(
1234 executed=True, reason=UpdateFailureReason.NONE,
1235 executed=True, reason=UpdateFailureReason.NONE,
1235 old=pull_request, new=pull_request_version,
1236 old=pull_request, new=pull_request_version,
1236 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1237 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1237 source_changed=source_changed, target_changed=target_changed)
1238 source_changed=source_changed, target_changed=target_changed)
1238
1239
1239 def _create_version_from_snapshot(self, pull_request):
1240 def _create_version_from_snapshot(self, pull_request):
1240 version = PullRequestVersion()
1241 version = PullRequestVersion()
1241 version.title = pull_request.title
1242 version.title = pull_request.title
1242 version.description = pull_request.description
1243 version.description = pull_request.description
1243 version.status = pull_request.status
1244 version.status = pull_request.status
1244 version.pull_request_state = pull_request.pull_request_state
1245 version.pull_request_state = pull_request.pull_request_state
1245 version.created_on = datetime.datetime.now()
1246 version.created_on = datetime.datetime.now()
1246 version.updated_on = pull_request.updated_on
1247 version.updated_on = pull_request.updated_on
1247 version.user_id = pull_request.user_id
1248 version.user_id = pull_request.user_id
1248 version.source_repo = pull_request.source_repo
1249 version.source_repo = pull_request.source_repo
1249 version.source_ref = pull_request.source_ref
1250 version.source_ref = pull_request.source_ref
1250 version.target_repo = pull_request.target_repo
1251 version.target_repo = pull_request.target_repo
1251 version.target_ref = pull_request.target_ref
1252 version.target_ref = pull_request.target_ref
1252
1253
1253 version._last_merge_source_rev = pull_request._last_merge_source_rev
1254 version._last_merge_source_rev = pull_request._last_merge_source_rev
1254 version._last_merge_target_rev = pull_request._last_merge_target_rev
1255 version._last_merge_target_rev = pull_request._last_merge_target_rev
1255 version.last_merge_status = pull_request.last_merge_status
1256 version.last_merge_status = pull_request.last_merge_status
1256 version.last_merge_metadata = pull_request.last_merge_metadata
1257 version.last_merge_metadata = pull_request.last_merge_metadata
1257 version.shadow_merge_ref = pull_request.shadow_merge_ref
1258 version.shadow_merge_ref = pull_request.shadow_merge_ref
1258 version.merge_rev = pull_request.merge_rev
1259 version.merge_rev = pull_request.merge_rev
1259 version.reviewer_data = pull_request.reviewer_data
1260 version.reviewer_data = pull_request.reviewer_data
1260
1261
1261 version.revisions = pull_request.revisions
1262 version.revisions = pull_request.revisions
1262 version.common_ancestor_id = pull_request.common_ancestor_id
1263 version.common_ancestor_id = pull_request.common_ancestor_id
1263 version.pull_request = pull_request
1264 version.pull_request = pull_request
1264 Session().add(version)
1265 Session().add(version)
1265 Session().flush()
1266 Session().flush()
1266
1267
1267 return version
1268 return version
1268
1269
1269 def _generate_update_diffs(self, pull_request, pull_request_version):
1270 def _generate_update_diffs(self, pull_request, pull_request_version):
1270
1271
1271 diff_context = (
1272 diff_context = (
1272 self.DIFF_CONTEXT +
1273 self.DIFF_CONTEXT +
1273 CommentsModel.needed_extra_diff_context())
1274 CommentsModel.needed_extra_diff_context())
1274 hide_whitespace_changes = False
1275 hide_whitespace_changes = False
1275 source_repo = pull_request_version.source_repo
1276 source_repo = pull_request_version.source_repo
1276 source_ref_id = pull_request_version.source_ref_parts.commit_id
1277 source_ref_id = pull_request_version.source_ref_parts.commit_id
1277 target_ref_id = pull_request_version.target_ref_parts.commit_id
1278 target_ref_id = pull_request_version.target_ref_parts.commit_id
1278 old_diff = self._get_diff_from_pr_or_version(
1279 old_diff = self._get_diff_from_pr_or_version(
1279 source_repo, source_ref_id, target_ref_id,
1280 source_repo, source_ref_id, target_ref_id,
1280 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1281 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1281
1282
1282 source_repo = pull_request.source_repo
1283 source_repo = pull_request.source_repo
1283 source_ref_id = pull_request.source_ref_parts.commit_id
1284 source_ref_id = pull_request.source_ref_parts.commit_id
1284 target_ref_id = pull_request.target_ref_parts.commit_id
1285 target_ref_id = pull_request.target_ref_parts.commit_id
1285
1286
1286 new_diff = self._get_diff_from_pr_or_version(
1287 new_diff = self._get_diff_from_pr_or_version(
1287 source_repo, source_ref_id, target_ref_id,
1288 source_repo, source_ref_id, target_ref_id,
1288 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1289 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1289
1290
1290 old_diff_data = diffs.DiffProcessor(old_diff)
1291 old_diff_data = diffs.DiffProcessor(old_diff)
1291 old_diff_data.prepare()
1292 old_diff_data.prepare()
1292 new_diff_data = diffs.DiffProcessor(new_diff)
1293 new_diff_data = diffs.DiffProcessor(new_diff)
1293 new_diff_data.prepare()
1294 new_diff_data.prepare()
1294
1295
1295 return old_diff_data, new_diff_data
1296 return old_diff_data, new_diff_data
1296
1297
1297 def _link_comments_to_version(self, pull_request_version):
1298 def _link_comments_to_version(self, pull_request_version):
1298 """
1299 """
1299 Link all unlinked comments of this pull request to the given version.
1300 Link all unlinked comments of this pull request to the given version.
1300
1301
1301 :param pull_request_version: The `PullRequestVersion` to which
1302 :param pull_request_version: The `PullRequestVersion` to which
1302 the comments shall be linked.
1303 the comments shall be linked.
1303
1304
1304 """
1305 """
1305 pull_request = pull_request_version.pull_request
1306 pull_request = pull_request_version.pull_request
1306 comments = ChangesetComment.query()\
1307 comments = ChangesetComment.query()\
1307 .filter(
1308 .filter(
1308 # TODO: johbo: Should we query for the repo at all here?
1309 # TODO: johbo: Should we query for the repo at all here?
1309 # Pending decision on how comments of PRs are to be related
1310 # Pending decision on how comments of PRs are to be related
1310 # to either the source repo, the target repo or no repo at all.
1311 # to either the source repo, the target repo or no repo at all.
1311 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1312 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1312 ChangesetComment.pull_request == pull_request,
1313 ChangesetComment.pull_request == pull_request,
1313 ChangesetComment.pull_request_version == None)\
1314 ChangesetComment.pull_request_version == None)\
1314 .order_by(ChangesetComment.comment_id.asc())
1315 .order_by(ChangesetComment.comment_id.asc())
1315
1316
1316 # TODO: johbo: Find out why this breaks if it is done in a bulk
1317 # TODO: johbo: Find out why this breaks if it is done in a bulk
1317 # operation.
1318 # operation.
1318 for comment in comments:
1319 for comment in comments:
1319 comment.pull_request_version_id = (
1320 comment.pull_request_version_id = (
1320 pull_request_version.pull_request_version_id)
1321 pull_request_version.pull_request_version_id)
1321 Session().add(comment)
1322 Session().add(comment)
1322
1323
1323 def _calculate_commit_id_changes(self, old_ids, new_ids):
1324 def _calculate_commit_id_changes(self, old_ids, new_ids):
1324 added = [x for x in new_ids if x not in old_ids]
1325 added = [x for x in new_ids if x not in old_ids]
1325 common = [x for x in new_ids if x in old_ids]
1326 common = [x for x in new_ids if x in old_ids]
1326 removed = [x for x in old_ids if x not in new_ids]
1327 removed = [x for x in old_ids if x not in new_ids]
1327 total = new_ids
1328 total = new_ids
1328 return ChangeTuple(added, common, removed, total)
1329 return ChangeTuple(added, common, removed, total)
1329
1330
1330 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1331 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1331
1332
1332 old_files = OrderedDict()
1333 old_files = OrderedDict()
1333 for diff_data in old_diff_data.parsed_diff:
1334 for diff_data in old_diff_data.parsed_diff:
1334 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1335 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1335
1336
1336 added_files = []
1337 added_files = []
1337 modified_files = []
1338 modified_files = []
1338 removed_files = []
1339 removed_files = []
1339 for diff_data in new_diff_data.parsed_diff:
1340 for diff_data in new_diff_data.parsed_diff:
1340 new_filename = diff_data['filename']
1341 new_filename = diff_data['filename']
1341 new_hash = md5_safe(diff_data['raw_diff'])
1342 new_hash = md5_safe(diff_data['raw_diff'])
1342
1343
1343 old_hash = old_files.get(new_filename)
1344 old_hash = old_files.get(new_filename)
1344 if not old_hash:
1345 if not old_hash:
1345 # file is not present in old diff, we have to figure out from parsed diff
1346 # file is not present in old diff, we have to figure out from parsed diff
1346 # operation ADD/REMOVE
1347 # operation ADD/REMOVE
1347 operations_dict = diff_data['stats']['ops']
1348 operations_dict = diff_data['stats']['ops']
1348 if diffs.DEL_FILENODE in operations_dict:
1349 if diffs.DEL_FILENODE in operations_dict:
1349 removed_files.append(new_filename)
1350 removed_files.append(new_filename)
1350 else:
1351 else:
1351 added_files.append(new_filename)
1352 added_files.append(new_filename)
1352 else:
1353 else:
1353 if new_hash != old_hash:
1354 if new_hash != old_hash:
1354 modified_files.append(new_filename)
1355 modified_files.append(new_filename)
1355 # now remove a file from old, since we have seen it already
1356 # now remove a file from old, since we have seen it already
1356 del old_files[new_filename]
1357 del old_files[new_filename]
1357
1358
1358 # removed files is when there are present in old, but not in NEW,
1359 # removed files is when there are present in old, but not in NEW,
1359 # since we remove old files that are present in new diff, left-overs
1360 # since we remove old files that are present in new diff, left-overs
1360 # if any should be the removed files
1361 # if any should be the removed files
1361 removed_files.extend(old_files.keys())
1362 removed_files.extend(old_files.keys())
1362
1363
1363 return FileChangeTuple(added_files, modified_files, removed_files)
1364 return FileChangeTuple(added_files, modified_files, removed_files)
1364
1365
1365 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1366 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1366 """
1367 """
1367 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1368 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1368 so it's always looking the same disregarding on which default
1369 so it's always looking the same disregarding on which default
1369 renderer system is using.
1370 renderer system is using.
1370
1371
1371 :param ancestor_commit_id: ancestor raw_id
1372 :param ancestor_commit_id: ancestor raw_id
1372 :param changes: changes named tuple
1373 :param changes: changes named tuple
1373 :param file_changes: file changes named tuple
1374 :param file_changes: file changes named tuple
1374
1375
1375 """
1376 """
1376 new_status = ChangesetStatus.get_status_lbl(
1377 new_status = ChangesetStatus.get_status_lbl(
1377 ChangesetStatus.STATUS_UNDER_REVIEW)
1378 ChangesetStatus.STATUS_UNDER_REVIEW)
1378
1379
1379 changed_files = (
1380 changed_files = (
1380 file_changes.added + file_changes.modified + file_changes.removed)
1381 file_changes.added + file_changes.modified + file_changes.removed)
1381
1382
1382 params = {
1383 params = {
1383 'under_review_label': new_status,
1384 'under_review_label': new_status,
1384 'added_commits': changes.added,
1385 'added_commits': changes.added,
1385 'removed_commits': changes.removed,
1386 'removed_commits': changes.removed,
1386 'changed_files': changed_files,
1387 'changed_files': changed_files,
1387 'added_files': file_changes.added,
1388 'added_files': file_changes.added,
1388 'modified_files': file_changes.modified,
1389 'modified_files': file_changes.modified,
1389 'removed_files': file_changes.removed,
1390 'removed_files': file_changes.removed,
1390 'ancestor_commit_id': ancestor_commit_id
1391 'ancestor_commit_id': ancestor_commit_id
1391 }
1392 }
1392 renderer = RstTemplateRenderer()
1393 renderer = RstTemplateRenderer()
1393 return renderer.render('pull_request_update.mako', **params)
1394 return renderer.render('pull_request_update.mako', **params)
1394
1395
1395 def edit(self, pull_request, title, description, description_renderer, user):
1396 def edit(self, pull_request, title, description, description_renderer, user):
1396 pull_request = self.__get_pull_request(pull_request)
1397 pull_request = self.__get_pull_request(pull_request)
1397 old_data = pull_request.get_api_data(with_merge_state=False)
1398 old_data = pull_request.get_api_data(with_merge_state=False)
1398 if pull_request.is_closed():
1399 if pull_request.is_closed():
1399 raise ValueError('This pull request is closed')
1400 raise ValueError('This pull request is closed')
1400 if title:
1401 if title:
1401 pull_request.title = title
1402 pull_request.title = title
1402 pull_request.description = description
1403 pull_request.description = description
1403 pull_request.updated_on = datetime.datetime.now()
1404 pull_request.updated_on = datetime.datetime.now()
1404 pull_request.description_renderer = description_renderer
1405 pull_request.description_renderer = description_renderer
1405 Session().add(pull_request)
1406 Session().add(pull_request)
1406 self._log_audit_action(
1407 self._log_audit_action(
1407 'repo.pull_request.edit', {'old_data': old_data},
1408 'repo.pull_request.edit', {'old_data': old_data},
1408 user, pull_request)
1409 user, pull_request)
1409
1410
1410 def update_reviewers(self, pull_request, reviewer_data, user):
1411 def update_reviewers(self, pull_request, reviewer_data, user):
1411 """
1412 """
1412 Update the reviewers in the pull request
1413 Update the reviewers in the pull request
1413
1414
1414 :param pull_request: the pr to update
1415 :param pull_request: the pr to update
1415 :param reviewer_data: list of tuples
1416 :param reviewer_data: list of tuples
1416 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1417 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1417 :param user: current use who triggers this action
1418 :param user: current use who triggers this action
1418 """
1419 """
1419
1420
1420 pull_request = self.__get_pull_request(pull_request)
1421 pull_request = self.__get_pull_request(pull_request)
1421 if pull_request.is_closed():
1422 if pull_request.is_closed():
1422 raise ValueError('This pull request is closed')
1423 raise ValueError('This pull request is closed')
1423
1424
1424 reviewers = {}
1425 reviewers = {}
1425 for user_id, reasons, mandatory, role, rules in reviewer_data:
1426 for user_id, reasons, mandatory, role, rules in reviewer_data:
1426 if isinstance(user_id, (int, compat.string_types)):
1427 if isinstance(user_id, (int, compat.string_types)):
1427 user_id = self._get_user(user_id).user_id
1428 user_id = self._get_user(user_id).user_id
1428 reviewers[user_id] = {
1429 reviewers[user_id] = {
1429 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1430 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1430
1431
1431 reviewers_ids = set(reviewers.keys())
1432 reviewers_ids = set(reviewers.keys())
1432 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1433 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1433 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1434 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1434
1435
1435 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1436 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1436
1437
1437 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1438 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1438 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1439 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1439
1440
1440 log.debug("Adding %s reviewers", ids_to_add)
1441 log.debug("Adding %s reviewers", ids_to_add)
1441 log.debug("Removing %s reviewers", ids_to_remove)
1442 log.debug("Removing %s reviewers", ids_to_remove)
1442 changed = False
1443 changed = False
1443 added_audit_reviewers = []
1444 added_audit_reviewers = []
1444 removed_audit_reviewers = []
1445 removed_audit_reviewers = []
1445
1446
1446 for uid in ids_to_add:
1447 for uid in ids_to_add:
1447 changed = True
1448 changed = True
1448 _usr = self._get_user(uid)
1449 _usr = self._get_user(uid)
1449 reviewer = PullRequestReviewers()
1450 reviewer = PullRequestReviewers()
1450 reviewer.user = _usr
1451 reviewer.user = _usr
1451 reviewer.pull_request = pull_request
1452 reviewer.pull_request = pull_request
1452 reviewer.reasons = reviewers[uid]['reasons']
1453 reviewer.reasons = reviewers[uid]['reasons']
1453 # NOTE(marcink): mandatory shouldn't be changed now
1454 # NOTE(marcink): mandatory shouldn't be changed now
1454 # reviewer.mandatory = reviewers[uid]['reasons']
1455 # reviewer.mandatory = reviewers[uid]['reasons']
1455 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1456 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1456 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1457 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1457 Session().add(reviewer)
1458 Session().add(reviewer)
1458 added_audit_reviewers.append(reviewer.get_dict())
1459 added_audit_reviewers.append(reviewer.get_dict())
1459
1460
1460 for uid in ids_to_remove:
1461 for uid in ids_to_remove:
1461 changed = True
1462 changed = True
1462 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1463 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1463 # This is an edge case that handles previous state of having the same reviewer twice.
1464 # This is an edge case that handles previous state of having the same reviewer twice.
1464 # this CAN happen due to the lack of DB checks
1465 # this CAN happen due to the lack of DB checks
1465 reviewers = PullRequestReviewers.query()\
1466 reviewers = PullRequestReviewers.query()\
1466 .filter(PullRequestReviewers.user_id == uid,
1467 .filter(PullRequestReviewers.user_id == uid,
1467 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1468 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1468 PullRequestReviewers.pull_request == pull_request)\
1469 PullRequestReviewers.pull_request == pull_request)\
1469 .all()
1470 .all()
1470
1471
1471 for obj in reviewers:
1472 for obj in reviewers:
1472 added_audit_reviewers.append(obj.get_dict())
1473 added_audit_reviewers.append(obj.get_dict())
1473 Session().delete(obj)
1474 Session().delete(obj)
1474
1475
1475 if changed:
1476 if changed:
1476 Session().expire_all()
1477 Session().expire_all()
1477 pull_request.updated_on = datetime.datetime.now()
1478 pull_request.updated_on = datetime.datetime.now()
1478 Session().add(pull_request)
1479 Session().add(pull_request)
1479
1480
1480 # finally store audit logs
1481 # finally store audit logs
1481 for user_data in added_audit_reviewers:
1482 for user_data in added_audit_reviewers:
1482 self._log_audit_action(
1483 self._log_audit_action(
1483 'repo.pull_request.reviewer.add', {'data': user_data},
1484 'repo.pull_request.reviewer.add', {'data': user_data},
1484 user, pull_request)
1485 user, pull_request)
1485 for user_data in removed_audit_reviewers:
1486 for user_data in removed_audit_reviewers:
1486 self._log_audit_action(
1487 self._log_audit_action(
1487 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1488 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1488 user, pull_request)
1489 user, pull_request)
1489
1490
1490 self.notify_reviewers(pull_request, ids_to_add, user)
1491 self.notify_reviewers(pull_request, ids_to_add, user)
1491 return ids_to_add, ids_to_remove
1492 return ids_to_add, ids_to_remove
1492
1493
1493 def update_observers(self, pull_request, observer_data, user):
1494 def update_observers(self, pull_request, observer_data, user):
1494 """
1495 """
1495 Update the observers in the pull request
1496 Update the observers in the pull request
1496
1497
1497 :param pull_request: the pr to update
1498 :param pull_request: the pr to update
1498 :param observer_data: list of tuples
1499 :param observer_data: list of tuples
1499 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1500 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1500 :param user: current use who triggers this action
1501 :param user: current use who triggers this action
1501 """
1502 """
1502 pull_request = self.__get_pull_request(pull_request)
1503 pull_request = self.__get_pull_request(pull_request)
1503 if pull_request.is_closed():
1504 if pull_request.is_closed():
1504 raise ValueError('This pull request is closed')
1505 raise ValueError('This pull request is closed')
1505
1506
1506 observers = {}
1507 observers = {}
1507 for user_id, reasons, mandatory, role, rules in observer_data:
1508 for user_id, reasons, mandatory, role, rules in observer_data:
1508 if isinstance(user_id, (int, compat.string_types)):
1509 if isinstance(user_id, (int, compat.string_types)):
1509 user_id = self._get_user(user_id).user_id
1510 user_id = self._get_user(user_id).user_id
1510 observers[user_id] = {
1511 observers[user_id] = {
1511 'reasons': reasons, 'observers': mandatory, 'role': role}
1512 'reasons': reasons, 'observers': mandatory, 'role': role}
1512
1513
1513 observers_ids = set(observers.keys())
1514 observers_ids = set(observers.keys())
1514 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1515 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1515 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1516 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1516
1517
1517 current_observers_ids = set([x.user.user_id for x in current_observers])
1518 current_observers_ids = set([x.user.user_id for x in current_observers])
1518
1519
1519 ids_to_add = observers_ids.difference(current_observers_ids)
1520 ids_to_add = observers_ids.difference(current_observers_ids)
1520 ids_to_remove = current_observers_ids.difference(observers_ids)
1521 ids_to_remove = current_observers_ids.difference(observers_ids)
1521
1522
1522 log.debug("Adding %s observer", ids_to_add)
1523 log.debug("Adding %s observer", ids_to_add)
1523 log.debug("Removing %s observer", ids_to_remove)
1524 log.debug("Removing %s observer", ids_to_remove)
1524 changed = False
1525 changed = False
1525 added_audit_observers = []
1526 added_audit_observers = []
1526 removed_audit_observers = []
1527 removed_audit_observers = []
1527
1528
1528 for uid in ids_to_add:
1529 for uid in ids_to_add:
1529 changed = True
1530 changed = True
1530 _usr = self._get_user(uid)
1531 _usr = self._get_user(uid)
1531 observer = PullRequestReviewers()
1532 observer = PullRequestReviewers()
1532 observer.user = _usr
1533 observer.user = _usr
1533 observer.pull_request = pull_request
1534 observer.pull_request = pull_request
1534 observer.reasons = observers[uid]['reasons']
1535 observer.reasons = observers[uid]['reasons']
1535 # NOTE(marcink): mandatory shouldn't be changed now
1536 # NOTE(marcink): mandatory shouldn't be changed now
1536 # observer.mandatory = observer[uid]['reasons']
1537 # observer.mandatory = observer[uid]['reasons']
1537
1538
1538 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1539 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1539 observer.role = PullRequestReviewers.ROLE_OBSERVER
1540 observer.role = PullRequestReviewers.ROLE_OBSERVER
1540 Session().add(observer)
1541 Session().add(observer)
1541 added_audit_observers.append(observer.get_dict())
1542 added_audit_observers.append(observer.get_dict())
1542
1543
1543 for uid in ids_to_remove:
1544 for uid in ids_to_remove:
1544 changed = True
1545 changed = True
1545 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1546 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1546 # This is an edge case that handles previous state of having the same reviewer twice.
1547 # This is an edge case that handles previous state of having the same reviewer twice.
1547 # this CAN happen due to the lack of DB checks
1548 # this CAN happen due to the lack of DB checks
1548 observers = PullRequestReviewers.query()\
1549 observers = PullRequestReviewers.query()\
1549 .filter(PullRequestReviewers.user_id == uid,
1550 .filter(PullRequestReviewers.user_id == uid,
1550 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1551 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1551 PullRequestReviewers.pull_request == pull_request)\
1552 PullRequestReviewers.pull_request == pull_request)\
1552 .all()
1553 .all()
1553
1554
1554 for obj in observers:
1555 for obj in observers:
1555 added_audit_observers.append(obj.get_dict())
1556 added_audit_observers.append(obj.get_dict())
1556 Session().delete(obj)
1557 Session().delete(obj)
1557
1558
1558 if changed:
1559 if changed:
1559 Session().expire_all()
1560 Session().expire_all()
1560 pull_request.updated_on = datetime.datetime.now()
1561 pull_request.updated_on = datetime.datetime.now()
1561 Session().add(pull_request)
1562 Session().add(pull_request)
1562
1563
1563 # finally store audit logs
1564 # finally store audit logs
1564 for user_data in added_audit_observers:
1565 for user_data in added_audit_observers:
1565 self._log_audit_action(
1566 self._log_audit_action(
1566 'repo.pull_request.observer.add', {'data': user_data},
1567 'repo.pull_request.observer.add', {'data': user_data},
1567 user, pull_request)
1568 user, pull_request)
1568 for user_data in removed_audit_observers:
1569 for user_data in removed_audit_observers:
1569 self._log_audit_action(
1570 self._log_audit_action(
1570 'repo.pull_request.observer.delete', {'old_data': user_data},
1571 'repo.pull_request.observer.delete', {'old_data': user_data},
1571 user, pull_request)
1572 user, pull_request)
1572
1573
1573 self.notify_observers(pull_request, ids_to_add, user)
1574 self.notify_observers(pull_request, ids_to_add, user)
1574 return ids_to_add, ids_to_remove
1575 return ids_to_add, ids_to_remove
1575
1576
1576 def get_url(self, pull_request, request=None, permalink=False):
1577 def get_url(self, pull_request, request=None, permalink=False):
1577 if not request:
1578 if not request:
1578 request = get_current_request()
1579 request = get_current_request()
1579
1580
1580 if permalink:
1581 if permalink:
1581 return request.route_url(
1582 return request.route_url(
1582 'pull_requests_global',
1583 'pull_requests_global',
1583 pull_request_id=pull_request.pull_request_id,)
1584 pull_request_id=pull_request.pull_request_id,)
1584 else:
1585 else:
1585 return request.route_url('pullrequest_show',
1586 return request.route_url('pullrequest_show',
1586 repo_name=safe_str(pull_request.target_repo.repo_name),
1587 repo_name=safe_str(pull_request.target_repo.repo_name),
1587 pull_request_id=pull_request.pull_request_id,)
1588 pull_request_id=pull_request.pull_request_id,)
1588
1589
1589 def get_shadow_clone_url(self, pull_request, request=None):
1590 def get_shadow_clone_url(self, pull_request, request=None):
1590 """
1591 """
1591 Returns qualified url pointing to the shadow repository. If this pull
1592 Returns qualified url pointing to the shadow repository. If this pull
1592 request is closed there is no shadow repository and ``None`` will be
1593 request is closed there is no shadow repository and ``None`` will be
1593 returned.
1594 returned.
1594 """
1595 """
1595 if pull_request.is_closed():
1596 if pull_request.is_closed():
1596 return None
1597 return None
1597 else:
1598 else:
1598 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1599 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1599 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1600 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1600
1601
1601 def _notify_reviewers(self, pull_request, user_ids, role, user):
1602 def _notify_reviewers(self, pull_request, user_ids, role, user):
1602 # notification to reviewers/observers
1603 # notification to reviewers/observers
1603 if not user_ids:
1604 if not user_ids:
1604 return
1605 return
1605
1606
1606 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1607 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1607
1608
1608 pull_request_obj = pull_request
1609 pull_request_obj = pull_request
1609 # get the current participants of this pull request
1610 # get the current participants of this pull request
1610 recipients = user_ids
1611 recipients = user_ids
1611 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1612 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1612
1613
1613 pr_source_repo = pull_request_obj.source_repo
1614 pr_source_repo = pull_request_obj.source_repo
1614 pr_target_repo = pull_request_obj.target_repo
1615 pr_target_repo = pull_request_obj.target_repo
1615
1616
1616 pr_url = h.route_url('pullrequest_show',
1617 pr_url = h.route_url('pullrequest_show',
1617 repo_name=pr_target_repo.repo_name,
1618 repo_name=pr_target_repo.repo_name,
1618 pull_request_id=pull_request_obj.pull_request_id,)
1619 pull_request_id=pull_request_obj.pull_request_id,)
1619
1620
1620 # set some variables for email notification
1621 # set some variables for email notification
1621 pr_target_repo_url = h.route_url(
1622 pr_target_repo_url = h.route_url(
1622 'repo_summary', repo_name=pr_target_repo.repo_name)
1623 'repo_summary', repo_name=pr_target_repo.repo_name)
1623
1624
1624 pr_source_repo_url = h.route_url(
1625 pr_source_repo_url = h.route_url(
1625 'repo_summary', repo_name=pr_source_repo.repo_name)
1626 'repo_summary', repo_name=pr_source_repo.repo_name)
1626
1627
1627 # pull request specifics
1628 # pull request specifics
1628 pull_request_commits = [
1629 pull_request_commits = [
1629 (x.raw_id, x.message)
1630 (x.raw_id, x.message)
1630 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1631 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1631
1632
1632 current_rhodecode_user = user
1633 current_rhodecode_user = user
1633 kwargs = {
1634 kwargs = {
1634 'user': current_rhodecode_user,
1635 'user': current_rhodecode_user,
1635 'pull_request_author': pull_request.author,
1636 'pull_request_author': pull_request.author,
1636 'pull_request': pull_request_obj,
1637 'pull_request': pull_request_obj,
1637 'pull_request_commits': pull_request_commits,
1638 'pull_request_commits': pull_request_commits,
1638
1639
1639 'pull_request_target_repo': pr_target_repo,
1640 'pull_request_target_repo': pr_target_repo,
1640 'pull_request_target_repo_url': pr_target_repo_url,
1641 'pull_request_target_repo_url': pr_target_repo_url,
1641
1642
1642 'pull_request_source_repo': pr_source_repo,
1643 'pull_request_source_repo': pr_source_repo,
1643 'pull_request_source_repo_url': pr_source_repo_url,
1644 'pull_request_source_repo_url': pr_source_repo_url,
1644
1645
1645 'pull_request_url': pr_url,
1646 'pull_request_url': pr_url,
1646 'thread_ids': [pr_url],
1647 'thread_ids': [pr_url],
1647 'user_role': role
1648 'user_role': role
1648 }
1649 }
1649
1650
1650 # create notification objects, and emails
1651 # create notification objects, and emails
1651 NotificationModel().create(
1652 NotificationModel().create(
1652 created_by=current_rhodecode_user,
1653 created_by=current_rhodecode_user,
1653 notification_subject='', # Filled in based on the notification_type
1654 notification_subject='', # Filled in based on the notification_type
1654 notification_body='', # Filled in based on the notification_type
1655 notification_body='', # Filled in based on the notification_type
1655 notification_type=notification_type,
1656 notification_type=notification_type,
1656 recipients=recipients,
1657 recipients=recipients,
1657 email_kwargs=kwargs,
1658 email_kwargs=kwargs,
1658 )
1659 )
1659
1660
1660 def notify_reviewers(self, pull_request, reviewers_ids, user):
1661 def notify_reviewers(self, pull_request, reviewers_ids, user):
1661 return self._notify_reviewers(pull_request, reviewers_ids,
1662 return self._notify_reviewers(pull_request, reviewers_ids,
1662 PullRequestReviewers.ROLE_REVIEWER, user)
1663 PullRequestReviewers.ROLE_REVIEWER, user)
1663
1664
1664 def notify_observers(self, pull_request, observers_ids, user):
1665 def notify_observers(self, pull_request, observers_ids, user):
1665 return self._notify_reviewers(pull_request, observers_ids,
1666 return self._notify_reviewers(pull_request, observers_ids,
1666 PullRequestReviewers.ROLE_OBSERVER, user)
1667 PullRequestReviewers.ROLE_OBSERVER, user)
1667
1668
1668 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1669 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1669 commit_changes, file_changes):
1670 commit_changes, file_changes):
1670
1671
1671 updating_user_id = updating_user.user_id
1672 updating_user_id = updating_user.user_id
1672 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1673 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1673 # NOTE(marcink): send notification to all other users except to
1674 # NOTE(marcink): send notification to all other users except to
1674 # person who updated the PR
1675 # person who updated the PR
1675 recipients = reviewers.difference(set([updating_user_id]))
1676 recipients = reviewers.difference(set([updating_user_id]))
1676
1677
1677 log.debug('Notify following recipients about pull-request update %s', recipients)
1678 log.debug('Notify following recipients about pull-request update %s', recipients)
1678
1679
1679 pull_request_obj = pull_request
1680 pull_request_obj = pull_request
1680
1681
1681 # send email about the update
1682 # send email about the update
1682 changed_files = (
1683 changed_files = (
1683 file_changes.added + file_changes.modified + file_changes.removed)
1684 file_changes.added + file_changes.modified + file_changes.removed)
1684
1685
1685 pr_source_repo = pull_request_obj.source_repo
1686 pr_source_repo = pull_request_obj.source_repo
1686 pr_target_repo = pull_request_obj.target_repo
1687 pr_target_repo = pull_request_obj.target_repo
1687
1688
1688 pr_url = h.route_url('pullrequest_show',
1689 pr_url = h.route_url('pullrequest_show',
1689 repo_name=pr_target_repo.repo_name,
1690 repo_name=pr_target_repo.repo_name,
1690 pull_request_id=pull_request_obj.pull_request_id,)
1691 pull_request_id=pull_request_obj.pull_request_id,)
1691
1692
1692 # set some variables for email notification
1693 # set some variables for email notification
1693 pr_target_repo_url = h.route_url(
1694 pr_target_repo_url = h.route_url(
1694 'repo_summary', repo_name=pr_target_repo.repo_name)
1695 'repo_summary', repo_name=pr_target_repo.repo_name)
1695
1696
1696 pr_source_repo_url = h.route_url(
1697 pr_source_repo_url = h.route_url(
1697 'repo_summary', repo_name=pr_source_repo.repo_name)
1698 'repo_summary', repo_name=pr_source_repo.repo_name)
1698
1699
1699 email_kwargs = {
1700 email_kwargs = {
1700 'date': datetime.datetime.now(),
1701 'date': datetime.datetime.now(),
1701 'updating_user': updating_user,
1702 'updating_user': updating_user,
1702
1703
1703 'pull_request': pull_request_obj,
1704 'pull_request': pull_request_obj,
1704
1705
1705 'pull_request_target_repo': pr_target_repo,
1706 'pull_request_target_repo': pr_target_repo,
1706 'pull_request_target_repo_url': pr_target_repo_url,
1707 'pull_request_target_repo_url': pr_target_repo_url,
1707
1708
1708 'pull_request_source_repo': pr_source_repo,
1709 'pull_request_source_repo': pr_source_repo,
1709 'pull_request_source_repo_url': pr_source_repo_url,
1710 'pull_request_source_repo_url': pr_source_repo_url,
1710
1711
1711 'pull_request_url': pr_url,
1712 'pull_request_url': pr_url,
1712
1713
1713 'ancestor_commit_id': ancestor_commit_id,
1714 'ancestor_commit_id': ancestor_commit_id,
1714 'added_commits': commit_changes.added,
1715 'added_commits': commit_changes.added,
1715 'removed_commits': commit_changes.removed,
1716 'removed_commits': commit_changes.removed,
1716 'changed_files': changed_files,
1717 'changed_files': changed_files,
1717 'added_files': file_changes.added,
1718 'added_files': file_changes.added,
1718 'modified_files': file_changes.modified,
1719 'modified_files': file_changes.modified,
1719 'removed_files': file_changes.removed,
1720 'removed_files': file_changes.removed,
1720 'thread_ids': [pr_url],
1721 'thread_ids': [pr_url],
1721 }
1722 }
1722
1723
1723 # create notification objects, and emails
1724 # create notification objects, and emails
1724 NotificationModel().create(
1725 NotificationModel().create(
1725 created_by=updating_user,
1726 created_by=updating_user,
1726 notification_subject='', # Filled in based on the notification_type
1727 notification_subject='', # Filled in based on the notification_type
1727 notification_body='', # Filled in based on the notification_type
1728 notification_body='', # Filled in based on the notification_type
1728 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1729 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1729 recipients=recipients,
1730 recipients=recipients,
1730 email_kwargs=email_kwargs,
1731 email_kwargs=email_kwargs,
1731 )
1732 )
1732
1733
1733 def delete(self, pull_request, user=None):
1734 def delete(self, pull_request, user=None):
1734 if not user:
1735 if not user:
1735 user = getattr(get_current_rhodecode_user(), 'username', None)
1736 user = getattr(get_current_rhodecode_user(), 'username', None)
1736
1737
1737 pull_request = self.__get_pull_request(pull_request)
1738 pull_request = self.__get_pull_request(pull_request)
1738 old_data = pull_request.get_api_data(with_merge_state=False)
1739 old_data = pull_request.get_api_data(with_merge_state=False)
1739 self._cleanup_merge_workspace(pull_request)
1740 self._cleanup_merge_workspace(pull_request)
1740 self._log_audit_action(
1741 self._log_audit_action(
1741 'repo.pull_request.delete', {'old_data': old_data},
1742 'repo.pull_request.delete', {'old_data': old_data},
1742 user, pull_request)
1743 user, pull_request)
1743 Session().delete(pull_request)
1744 Session().delete(pull_request)
1744
1745
1745 def close_pull_request(self, pull_request, user):
1746 def close_pull_request(self, pull_request, user):
1746 pull_request = self.__get_pull_request(pull_request)
1747 pull_request = self.__get_pull_request(pull_request)
1747 self._cleanup_merge_workspace(pull_request)
1748 self._cleanup_merge_workspace(pull_request)
1748 pull_request.status = PullRequest.STATUS_CLOSED
1749 pull_request.status = PullRequest.STATUS_CLOSED
1749 pull_request.updated_on = datetime.datetime.now()
1750 pull_request.updated_on = datetime.datetime.now()
1750 Session().add(pull_request)
1751 Session().add(pull_request)
1751 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1752 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1752
1753
1753 pr_data = pull_request.get_api_data(with_merge_state=False)
1754 pr_data = pull_request.get_api_data(with_merge_state=False)
1754 self._log_audit_action(
1755 self._log_audit_action(
1755 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1756 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1756
1757
1757 def close_pull_request_with_comment(
1758 def close_pull_request_with_comment(
1758 self, pull_request, user, repo, message=None, auth_user=None):
1759 self, pull_request, user, repo, message=None, auth_user=None):
1759
1760
1760 pull_request_review_status = pull_request.calculated_review_status()
1761 pull_request_review_status = pull_request.calculated_review_status()
1761
1762
1762 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1763 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1763 # approved only if we have voting consent
1764 # approved only if we have voting consent
1764 status = ChangesetStatus.STATUS_APPROVED
1765 status = ChangesetStatus.STATUS_APPROVED
1765 else:
1766 else:
1766 status = ChangesetStatus.STATUS_REJECTED
1767 status = ChangesetStatus.STATUS_REJECTED
1767 status_lbl = ChangesetStatus.get_status_lbl(status)
1768 status_lbl = ChangesetStatus.get_status_lbl(status)
1768
1769
1769 default_message = (
1770 default_message = (
1770 'Closing with status change {transition_icon} {status}.'
1771 'Closing with status change {transition_icon} {status}.'
1771 ).format(transition_icon='>', status=status_lbl)
1772 ).format(transition_icon='>', status=status_lbl)
1772 text = message or default_message
1773 text = message or default_message
1773
1774
1774 # create a comment, and link it to new status
1775 # create a comment, and link it to new status
1775 comment = CommentsModel().create(
1776 comment = CommentsModel().create(
1776 text=text,
1777 text=text,
1777 repo=repo.repo_id,
1778 repo=repo.repo_id,
1778 user=user.user_id,
1779 user=user.user_id,
1779 pull_request=pull_request.pull_request_id,
1780 pull_request=pull_request.pull_request_id,
1780 status_change=status_lbl,
1781 status_change=status_lbl,
1781 status_change_type=status,
1782 status_change_type=status,
1782 closing_pr=True,
1783 closing_pr=True,
1783 auth_user=auth_user,
1784 auth_user=auth_user,
1784 )
1785 )
1785
1786
1786 # calculate old status before we change it
1787 # calculate old status before we change it
1787 old_calculated_status = pull_request.calculated_review_status()
1788 old_calculated_status = pull_request.calculated_review_status()
1788 ChangesetStatusModel().set_status(
1789 ChangesetStatusModel().set_status(
1789 repo.repo_id,
1790 repo.repo_id,
1790 status,
1791 status,
1791 user.user_id,
1792 user.user_id,
1792 comment=comment,
1793 comment=comment,
1793 pull_request=pull_request.pull_request_id
1794 pull_request=pull_request.pull_request_id
1794 )
1795 )
1795
1796
1796 Session().flush()
1797 Session().flush()
1797
1798
1798 self.trigger_pull_request_hook(pull_request, user, 'comment',
1799 self.trigger_pull_request_hook(pull_request, user, 'comment',
1799 data={'comment': comment})
1800 data={'comment': comment})
1800
1801
1801 # we now calculate the status of pull request again, and based on that
1802 # we now calculate the status of pull request again, and based on that
1802 # calculation trigger status change. This might happen in cases
1803 # calculation trigger status change. This might happen in cases
1803 # that non-reviewer admin closes a pr, which means his vote doesn't
1804 # that non-reviewer admin closes a pr, which means his vote doesn't
1804 # change the status, while if he's a reviewer this might change it.
1805 # change the status, while if he's a reviewer this might change it.
1805 calculated_status = pull_request.calculated_review_status()
1806 calculated_status = pull_request.calculated_review_status()
1806 if old_calculated_status != calculated_status:
1807 if old_calculated_status != calculated_status:
1807 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1808 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1808 data={'status': calculated_status})
1809 data={'status': calculated_status})
1809
1810
1810 # finally close the PR
1811 # finally close the PR
1811 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1812 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1812
1813
1813 return comment, status
1814 return comment, status
1814
1815
1815 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1816 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1816 _ = translator or get_current_request().translate
1817 _ = translator or get_current_request().translate
1817
1818
1818 if not self._is_merge_enabled(pull_request):
1819 if not self._is_merge_enabled(pull_request):
1819 return None, False, _('Server-side pull request merging is disabled.')
1820 return None, False, _('Server-side pull request merging is disabled.')
1820
1821
1821 if pull_request.is_closed():
1822 if pull_request.is_closed():
1822 return None, False, _('This pull request is closed.')
1823 return None, False, _('This pull request is closed.')
1823
1824
1824 merge_possible, msg = self._check_repo_requirements(
1825 merge_possible, msg = self._check_repo_requirements(
1825 target=pull_request.target_repo, source=pull_request.source_repo,
1826 target=pull_request.target_repo, source=pull_request.source_repo,
1826 translator=_)
1827 translator=_)
1827 if not merge_possible:
1828 if not merge_possible:
1828 return None, merge_possible, msg
1829 return None, merge_possible, msg
1829
1830
1830 try:
1831 try:
1831 merge_response = self._try_merge(
1832 merge_response = self._try_merge(
1832 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1833 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1833 log.debug("Merge response: %s", merge_response)
1834 log.debug("Merge response: %s", merge_response)
1834 return merge_response, merge_response.possible, merge_response.merge_status_message
1835 return merge_response, merge_response.possible, merge_response.merge_status_message
1835 except NotImplementedError:
1836 except NotImplementedError:
1836 return None, False, _('Pull request merging is not supported.')
1837 return None, False, _('Pull request merging is not supported.')
1837
1838
1838 def _check_repo_requirements(self, target, source, translator):
1839 def _check_repo_requirements(self, target, source, translator):
1839 """
1840 """
1840 Check if `target` and `source` have compatible requirements.
1841 Check if `target` and `source` have compatible requirements.
1841
1842
1842 Currently this is just checking for largefiles.
1843 Currently this is just checking for largefiles.
1843 """
1844 """
1844 _ = translator
1845 _ = translator
1845 target_has_largefiles = self._has_largefiles(target)
1846 target_has_largefiles = self._has_largefiles(target)
1846 source_has_largefiles = self._has_largefiles(source)
1847 source_has_largefiles = self._has_largefiles(source)
1847 merge_possible = True
1848 merge_possible = True
1848 message = u''
1849 message = u''
1849
1850
1850 if target_has_largefiles != source_has_largefiles:
1851 if target_has_largefiles != source_has_largefiles:
1851 merge_possible = False
1852 merge_possible = False
1852 if source_has_largefiles:
1853 if source_has_largefiles:
1853 message = _(
1854 message = _(
1854 'Target repository large files support is disabled.')
1855 'Target repository large files support is disabled.')
1855 else:
1856 else:
1856 message = _(
1857 message = _(
1857 'Source repository large files support is disabled.')
1858 'Source repository large files support is disabled.')
1858
1859
1859 return merge_possible, message
1860 return merge_possible, message
1860
1861
1861 def _has_largefiles(self, repo):
1862 def _has_largefiles(self, repo):
1862 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1863 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1863 'extensions', 'largefiles')
1864 'extensions', 'largefiles')
1864 return largefiles_ui and largefiles_ui[0].active
1865 return largefiles_ui and largefiles_ui[0].active
1865
1866
1866 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1867 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1867 """
1868 """
1868 Try to merge the pull request and return the merge status.
1869 Try to merge the pull request and return the merge status.
1869 """
1870 """
1870 log.debug(
1871 log.debug(
1871 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1872 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1872 pull_request.pull_request_id, force_shadow_repo_refresh)
1873 pull_request.pull_request_id, force_shadow_repo_refresh)
1873 target_vcs = pull_request.target_repo.scm_instance()
1874 target_vcs = pull_request.target_repo.scm_instance()
1874 # Refresh the target reference.
1875 # Refresh the target reference.
1875 try:
1876 try:
1876 target_ref = self._refresh_reference(
1877 target_ref = self._refresh_reference(
1877 pull_request.target_ref_parts, target_vcs)
1878 pull_request.target_ref_parts, target_vcs)
1878 except CommitDoesNotExistError:
1879 except CommitDoesNotExistError:
1879 merge_state = MergeResponse(
1880 merge_state = MergeResponse(
1880 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1881 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1881 metadata={'target_ref': pull_request.target_ref_parts})
1882 metadata={'target_ref': pull_request.target_ref_parts})
1882 return merge_state
1883 return merge_state
1883
1884
1884 target_locked = pull_request.target_repo.locked
1885 target_locked = pull_request.target_repo.locked
1885 if target_locked and target_locked[0]:
1886 if target_locked and target_locked[0]:
1886 locked_by = 'user:{}'.format(target_locked[0])
1887 locked_by = 'user:{}'.format(target_locked[0])
1887 log.debug("The target repository is locked by %s.", locked_by)
1888 log.debug("The target repository is locked by %s.", locked_by)
1888 merge_state = MergeResponse(
1889 merge_state = MergeResponse(
1889 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1890 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1890 metadata={'locked_by': locked_by})
1891 metadata={'locked_by': locked_by})
1891 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1892 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1892 pull_request, target_ref):
1893 pull_request, target_ref):
1893 log.debug("Refreshing the merge status of the repository.")
1894 log.debug("Refreshing the merge status of the repository.")
1894 merge_state = self._refresh_merge_state(
1895 merge_state = self._refresh_merge_state(
1895 pull_request, target_vcs, target_ref)
1896 pull_request, target_vcs, target_ref)
1896 else:
1897 else:
1897 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1898 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1898 metadata = {
1899 metadata = {
1899 'unresolved_files': '',
1900 'unresolved_files': '',
1900 'target_ref': pull_request.target_ref_parts,
1901 'target_ref': pull_request.target_ref_parts,
1901 'source_ref': pull_request.source_ref_parts,
1902 'source_ref': pull_request.source_ref_parts,
1902 }
1903 }
1903 if pull_request.last_merge_metadata:
1904 if pull_request.last_merge_metadata:
1904 metadata.update(pull_request.last_merge_metadata_parsed)
1905 metadata.update(pull_request.last_merge_metadata_parsed)
1905
1906
1906 if not possible and target_ref.type == 'branch':
1907 if not possible and target_ref.type == 'branch':
1907 # NOTE(marcink): case for mercurial multiple heads on branch
1908 # NOTE(marcink): case for mercurial multiple heads on branch
1908 heads = target_vcs._heads(target_ref.name)
1909 heads = target_vcs._heads(target_ref.name)
1909 if len(heads) != 1:
1910 if len(heads) != 1:
1910 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1911 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1911 metadata.update({
1912 metadata.update({
1912 'heads': heads
1913 'heads': heads
1913 })
1914 })
1914
1915
1915 merge_state = MergeResponse(
1916 merge_state = MergeResponse(
1916 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1917 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1917
1918
1918 return merge_state
1919 return merge_state
1919
1920
1920 def _refresh_reference(self, reference, vcs_repository):
1921 def _refresh_reference(self, reference, vcs_repository):
1921 if reference.type in self.UPDATABLE_REF_TYPES:
1922 if reference.type in self.UPDATABLE_REF_TYPES:
1922 name_or_id = reference.name
1923 name_or_id = reference.name
1923 else:
1924 else:
1924 name_or_id = reference.commit_id
1925 name_or_id = reference.commit_id
1925
1926
1926 refreshed_commit = vcs_repository.get_commit(name_or_id)
1927 refreshed_commit = vcs_repository.get_commit(name_or_id)
1927 refreshed_reference = Reference(
1928 refreshed_reference = Reference(
1928 reference.type, reference.name, refreshed_commit.raw_id)
1929 reference.type, reference.name, refreshed_commit.raw_id)
1929 return refreshed_reference
1930 return refreshed_reference
1930
1931
1931 def _needs_merge_state_refresh(self, pull_request, target_reference):
1932 def _needs_merge_state_refresh(self, pull_request, target_reference):
1932 return not(
1933 return not(
1933 pull_request.revisions and
1934 pull_request.revisions and
1934 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1935 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1935 target_reference.commit_id == pull_request._last_merge_target_rev)
1936 target_reference.commit_id == pull_request._last_merge_target_rev)
1936
1937
1937 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1938 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1938 workspace_id = self._workspace_id(pull_request)
1939 workspace_id = self._workspace_id(pull_request)
1939 source_vcs = pull_request.source_repo.scm_instance()
1940 source_vcs = pull_request.source_repo.scm_instance()
1940 repo_id = pull_request.target_repo.repo_id
1941 repo_id = pull_request.target_repo.repo_id
1941 use_rebase = self._use_rebase_for_merging(pull_request)
1942 use_rebase = self._use_rebase_for_merging(pull_request)
1942 close_branch = self._close_branch_before_merging(pull_request)
1943 close_branch = self._close_branch_before_merging(pull_request)
1943 merge_state = target_vcs.merge(
1944 merge_state = target_vcs.merge(
1944 repo_id, workspace_id,
1945 repo_id, workspace_id,
1945 target_reference, source_vcs, pull_request.source_ref_parts,
1946 target_reference, source_vcs, pull_request.source_ref_parts,
1946 dry_run=True, use_rebase=use_rebase,
1947 dry_run=True, use_rebase=use_rebase,
1947 close_branch=close_branch)
1948 close_branch=close_branch)
1948
1949
1949 # Do not store the response if there was an unknown error.
1950 # Do not store the response if there was an unknown error.
1950 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1951 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1951 pull_request._last_merge_source_rev = \
1952 pull_request._last_merge_source_rev = \
1952 pull_request.source_ref_parts.commit_id
1953 pull_request.source_ref_parts.commit_id
1953 pull_request._last_merge_target_rev = target_reference.commit_id
1954 pull_request._last_merge_target_rev = target_reference.commit_id
1954 pull_request.last_merge_status = merge_state.failure_reason
1955 pull_request.last_merge_status = merge_state.failure_reason
1955 pull_request.last_merge_metadata = merge_state.metadata
1956 pull_request.last_merge_metadata = merge_state.metadata
1956
1957
1957 pull_request.shadow_merge_ref = merge_state.merge_ref
1958 pull_request.shadow_merge_ref = merge_state.merge_ref
1958 Session().add(pull_request)
1959 Session().add(pull_request)
1959 Session().commit()
1960 Session().commit()
1960
1961
1961 return merge_state
1962 return merge_state
1962
1963
1963 def _workspace_id(self, pull_request):
1964 def _workspace_id(self, pull_request):
1964 workspace_id = 'pr-%s' % pull_request.pull_request_id
1965 workspace_id = 'pr-%s' % pull_request.pull_request_id
1965 return workspace_id
1966 return workspace_id
1966
1967
1967 def generate_repo_data(self, repo, commit_id=None, branch=None,
1968 def generate_repo_data(self, repo, commit_id=None, branch=None,
1968 bookmark=None, translator=None):
1969 bookmark=None, translator=None):
1969 from rhodecode.model.repo import RepoModel
1970 from rhodecode.model.repo import RepoModel
1970
1971
1971 all_refs, selected_ref = \
1972 all_refs, selected_ref = \
1972 self._get_repo_pullrequest_sources(
1973 self._get_repo_pullrequest_sources(
1973 repo.scm_instance(), commit_id=commit_id,
1974 repo.scm_instance(), commit_id=commit_id,
1974 branch=branch, bookmark=bookmark, translator=translator)
1975 branch=branch, bookmark=bookmark, translator=translator)
1975
1976
1976 refs_select2 = []
1977 refs_select2 = []
1977 for element in all_refs:
1978 for element in all_refs:
1978 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1979 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1979 refs_select2.append({'text': element[1], 'children': children})
1980 refs_select2.append({'text': element[1], 'children': children})
1980
1981
1981 return {
1982 return {
1982 'user': {
1983 'user': {
1983 'user_id': repo.user.user_id,
1984 'user_id': repo.user.user_id,
1984 'username': repo.user.username,
1985 'username': repo.user.username,
1985 'firstname': repo.user.first_name,
1986 'firstname': repo.user.first_name,
1986 'lastname': repo.user.last_name,
1987 'lastname': repo.user.last_name,
1987 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1988 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1988 },
1989 },
1989 'name': repo.repo_name,
1990 'name': repo.repo_name,
1990 'link': RepoModel().get_url(repo),
1991 'link': RepoModel().get_url(repo),
1991 'description': h.chop_at_smart(repo.description_safe, '\n'),
1992 'description': h.chop_at_smart(repo.description_safe, '\n'),
1992 'refs': {
1993 'refs': {
1993 'all_refs': all_refs,
1994 'all_refs': all_refs,
1994 'selected_ref': selected_ref,
1995 'selected_ref': selected_ref,
1995 'select2_refs': refs_select2
1996 'select2_refs': refs_select2
1996 }
1997 }
1997 }
1998 }
1998
1999
1999 def generate_pullrequest_title(self, source, source_ref, target):
2000 def generate_pullrequest_title(self, source, source_ref, target):
2000 return u'{source}#{at_ref} to {target}'.format(
2001 return u'{source}#{at_ref} to {target}'.format(
2001 source=source,
2002 source=source,
2002 at_ref=source_ref,
2003 at_ref=source_ref,
2003 target=target,
2004 target=target,
2004 )
2005 )
2005
2006
2006 def _cleanup_merge_workspace(self, pull_request):
2007 def _cleanup_merge_workspace(self, pull_request):
2007 # Merging related cleanup
2008 # Merging related cleanup
2008 repo_id = pull_request.target_repo.repo_id
2009 repo_id = pull_request.target_repo.repo_id
2009 target_scm = pull_request.target_repo.scm_instance()
2010 target_scm = pull_request.target_repo.scm_instance()
2010 workspace_id = self._workspace_id(pull_request)
2011 workspace_id = self._workspace_id(pull_request)
2011
2012
2012 try:
2013 try:
2013 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2014 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2014 except NotImplementedError:
2015 except NotImplementedError:
2015 pass
2016 pass
2016
2017
2017 def _get_repo_pullrequest_sources(
2018 def _get_repo_pullrequest_sources(
2018 self, repo, commit_id=None, branch=None, bookmark=None,
2019 self, repo, commit_id=None, branch=None, bookmark=None,
2019 translator=None):
2020 translator=None):
2020 """
2021 """
2021 Return a structure with repo's interesting commits, suitable for
2022 Return a structure with repo's interesting commits, suitable for
2022 the selectors in pullrequest controller
2023 the selectors in pullrequest controller
2023
2024
2024 :param commit_id: a commit that must be in the list somehow
2025 :param commit_id: a commit that must be in the list somehow
2025 and selected by default
2026 and selected by default
2026 :param branch: a branch that must be in the list and selected
2027 :param branch: a branch that must be in the list and selected
2027 by default - even if closed
2028 by default - even if closed
2028 :param bookmark: a bookmark that must be in the list and selected
2029 :param bookmark: a bookmark that must be in the list and selected
2029 """
2030 """
2030 _ = translator or get_current_request().translate
2031 _ = translator or get_current_request().translate
2031
2032
2032 commit_id = safe_str(commit_id) if commit_id else None
2033 commit_id = safe_str(commit_id) if commit_id else None
2033 branch = safe_unicode(branch) if branch else None
2034 branch = safe_unicode(branch) if branch else None
2034 bookmark = safe_unicode(bookmark) if bookmark else None
2035 bookmark = safe_unicode(bookmark) if bookmark else None
2035
2036
2036 selected = None
2037 selected = None
2037
2038
2038 # order matters: first source that has commit_id in it will be selected
2039 # order matters: first source that has commit_id in it will be selected
2039 sources = []
2040 sources = []
2040 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2041 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2041 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2042 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2042
2043
2043 if commit_id:
2044 if commit_id:
2044 ref_commit = (h.short_id(commit_id), commit_id)
2045 ref_commit = (h.short_id(commit_id), commit_id)
2045 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2046 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2046
2047
2047 sources.append(
2048 sources.append(
2048 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2049 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2049 )
2050 )
2050
2051
2051 groups = []
2052 groups = []
2052
2053
2053 for group_key, ref_list, group_name, match in sources:
2054 for group_key, ref_list, group_name, match in sources:
2054 group_refs = []
2055 group_refs = []
2055 for ref_name, ref_id in ref_list:
2056 for ref_name, ref_id in ref_list:
2056 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2057 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2057 group_refs.append((ref_key, ref_name))
2058 group_refs.append((ref_key, ref_name))
2058
2059
2059 if not selected:
2060 if not selected:
2060 if set([commit_id, match]) & set([ref_id, ref_name]):
2061 if set([commit_id, match]) & set([ref_id, ref_name]):
2061 selected = ref_key
2062 selected = ref_key
2062
2063
2063 if group_refs:
2064 if group_refs:
2064 groups.append((group_refs, group_name))
2065 groups.append((group_refs, group_name))
2065
2066
2066 if not selected:
2067 if not selected:
2067 ref = commit_id or branch or bookmark
2068 ref = commit_id or branch or bookmark
2068 if ref:
2069 if ref:
2069 raise CommitDoesNotExistError(
2070 raise CommitDoesNotExistError(
2070 u'No commit refs could be found matching: {}'.format(ref))
2071 u'No commit refs could be found matching: {}'.format(ref))
2071 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2072 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2072 selected = u'branch:{}:{}'.format(
2073 selected = u'branch:{}:{}'.format(
2073 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2074 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2074 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2075 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2075 )
2076 )
2076 elif repo.commit_ids:
2077 elif repo.commit_ids:
2077 # make the user select in this case
2078 # make the user select in this case
2078 selected = None
2079 selected = None
2079 else:
2080 else:
2080 raise EmptyRepositoryError()
2081 raise EmptyRepositoryError()
2081 return groups, selected
2082 return groups, selected
2082
2083
2083 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2084 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2084 hide_whitespace_changes, diff_context):
2085 hide_whitespace_changes, diff_context):
2085
2086
2086 return self._get_diff_from_pr_or_version(
2087 return self._get_diff_from_pr_or_version(
2087 source_repo, source_ref_id, target_ref_id,
2088 source_repo, source_ref_id, target_ref_id,
2088 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2089 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2089
2090
2090 def _get_diff_from_pr_or_version(
2091 def _get_diff_from_pr_or_version(
2091 self, source_repo, source_ref_id, target_ref_id,
2092 self, source_repo, source_ref_id, target_ref_id,
2092 hide_whitespace_changes, diff_context):
2093 hide_whitespace_changes, diff_context):
2093
2094
2094 target_commit = source_repo.get_commit(
2095 target_commit = source_repo.get_commit(
2095 commit_id=safe_str(target_ref_id))
2096 commit_id=safe_str(target_ref_id))
2096 source_commit = source_repo.get_commit(
2097 source_commit = source_repo.get_commit(
2097 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2098 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2098 if isinstance(source_repo, Repository):
2099 if isinstance(source_repo, Repository):
2099 vcs_repo = source_repo.scm_instance()
2100 vcs_repo = source_repo.scm_instance()
2100 else:
2101 else:
2101 vcs_repo = source_repo
2102 vcs_repo = source_repo
2102
2103
2103 # TODO: johbo: In the context of an update, we cannot reach
2104 # TODO: johbo: In the context of an update, we cannot reach
2104 # the old commit anymore with our normal mechanisms. It needs
2105 # the old commit anymore with our normal mechanisms. It needs
2105 # some sort of special support in the vcs layer to avoid this
2106 # some sort of special support in the vcs layer to avoid this
2106 # workaround.
2107 # workaround.
2107 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2108 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2108 vcs_repo.alias == 'git'):
2109 vcs_repo.alias == 'git'):
2109 source_commit.raw_id = safe_str(source_ref_id)
2110 source_commit.raw_id = safe_str(source_ref_id)
2110
2111
2111 log.debug('calculating diff between '
2112 log.debug('calculating diff between '
2112 'source_ref:%s and target_ref:%s for repo `%s`',
2113 'source_ref:%s and target_ref:%s for repo `%s`',
2113 target_ref_id, source_ref_id,
2114 target_ref_id, source_ref_id,
2114 safe_unicode(vcs_repo.path))
2115 safe_unicode(vcs_repo.path))
2115
2116
2116 vcs_diff = vcs_repo.get_diff(
2117 vcs_diff = vcs_repo.get_diff(
2117 commit1=target_commit, commit2=source_commit,
2118 commit1=target_commit, commit2=source_commit,
2118 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2119 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2119 return vcs_diff
2120 return vcs_diff
2120
2121
2121 def _is_merge_enabled(self, pull_request):
2122 def _is_merge_enabled(self, pull_request):
2122 return self._get_general_setting(
2123 return self._get_general_setting(
2123 pull_request, 'rhodecode_pr_merge_enabled')
2124 pull_request, 'rhodecode_pr_merge_enabled')
2124
2125
2125 def _use_rebase_for_merging(self, pull_request):
2126 def _use_rebase_for_merging(self, pull_request):
2126 repo_type = pull_request.target_repo.repo_type
2127 repo_type = pull_request.target_repo.repo_type
2127 if repo_type == 'hg':
2128 if repo_type == 'hg':
2128 return self._get_general_setting(
2129 return self._get_general_setting(
2129 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2130 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2130 elif repo_type == 'git':
2131 elif repo_type == 'git':
2131 return self._get_general_setting(
2132 return self._get_general_setting(
2132 pull_request, 'rhodecode_git_use_rebase_for_merging')
2133 pull_request, 'rhodecode_git_use_rebase_for_merging')
2133
2134
2134 return False
2135 return False
2135
2136
2136 def _user_name_for_merging(self, pull_request, user):
2137 def _user_name_for_merging(self, pull_request, user):
2137 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2138 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2138 if env_user_name_attr and hasattr(user, env_user_name_attr):
2139 if env_user_name_attr and hasattr(user, env_user_name_attr):
2139 user_name_attr = env_user_name_attr
2140 user_name_attr = env_user_name_attr
2140 else:
2141 else:
2141 user_name_attr = 'short_contact'
2142 user_name_attr = 'short_contact'
2142
2143
2143 user_name = getattr(user, user_name_attr)
2144 user_name = getattr(user, user_name_attr)
2144 return user_name
2145 return user_name
2145
2146
2146 def _close_branch_before_merging(self, pull_request):
2147 def _close_branch_before_merging(self, pull_request):
2147 repo_type = pull_request.target_repo.repo_type
2148 repo_type = pull_request.target_repo.repo_type
2148 if repo_type == 'hg':
2149 if repo_type == 'hg':
2149 return self._get_general_setting(
2150 return self._get_general_setting(
2150 pull_request, 'rhodecode_hg_close_branch_before_merging')
2151 pull_request, 'rhodecode_hg_close_branch_before_merging')
2151 elif repo_type == 'git':
2152 elif repo_type == 'git':
2152 return self._get_general_setting(
2153 return self._get_general_setting(
2153 pull_request, 'rhodecode_git_close_branch_before_merging')
2154 pull_request, 'rhodecode_git_close_branch_before_merging')
2154
2155
2155 return False
2156 return False
2156
2157
2157 def _get_general_setting(self, pull_request, settings_key, default=False):
2158 def _get_general_setting(self, pull_request, settings_key, default=False):
2158 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2159 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2159 settings = settings_model.get_general_settings()
2160 settings = settings_model.get_general_settings()
2160 return settings.get(settings_key, default)
2161 return settings.get(settings_key, default)
2161
2162
2162 def _log_audit_action(self, action, action_data, user, pull_request):
2163 def _log_audit_action(self, action, action_data, user, pull_request):
2163 audit_logger.store(
2164 audit_logger.store(
2164 action=action,
2165 action=action,
2165 action_data=action_data,
2166 action_data=action_data,
2166 user=user,
2167 user=user,
2167 repo=pull_request.target_repo)
2168 repo=pull_request.target_repo)
2168
2169
2169 def get_reviewer_functions(self):
2170 def get_reviewer_functions(self):
2170 """
2171 """
2171 Fetches functions for validation and fetching default reviewers.
2172 Fetches functions for validation and fetching default reviewers.
2172 If available we use the EE package, else we fallback to CE
2173 If available we use the EE package, else we fallback to CE
2173 package functions
2174 package functions
2174 """
2175 """
2175 try:
2176 try:
2176 from rc_reviewers.utils import get_default_reviewers_data
2177 from rc_reviewers.utils import get_default_reviewers_data
2177 from rc_reviewers.utils import validate_default_reviewers
2178 from rc_reviewers.utils import validate_default_reviewers
2178 from rc_reviewers.utils import validate_observers
2179 from rc_reviewers.utils import validate_observers
2179 except ImportError:
2180 except ImportError:
2180 from rhodecode.apps.repository.utils import get_default_reviewers_data
2181 from rhodecode.apps.repository.utils import get_default_reviewers_data
2181 from rhodecode.apps.repository.utils import validate_default_reviewers
2182 from rhodecode.apps.repository.utils import validate_default_reviewers
2182 from rhodecode.apps.repository.utils import validate_observers
2183 from rhodecode.apps.repository.utils import validate_observers
2183
2184
2184 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2185 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2185
2186
2186
2187
2187 class MergeCheck(object):
2188 class MergeCheck(object):
2188 """
2189 """
2189 Perform Merge Checks and returns a check object which stores information
2190 Perform Merge Checks and returns a check object which stores information
2190 about merge errors, and merge conditions
2191 about merge errors, and merge conditions
2191 """
2192 """
2192 TODO_CHECK = 'todo'
2193 TODO_CHECK = 'todo'
2193 PERM_CHECK = 'perm'
2194 PERM_CHECK = 'perm'
2194 REVIEW_CHECK = 'review'
2195 REVIEW_CHECK = 'review'
2195 MERGE_CHECK = 'merge'
2196 MERGE_CHECK = 'merge'
2196 WIP_CHECK = 'wip'
2197 WIP_CHECK = 'wip'
2197
2198
2198 def __init__(self):
2199 def __init__(self):
2199 self.review_status = None
2200 self.review_status = None
2200 self.merge_possible = None
2201 self.merge_possible = None
2201 self.merge_msg = ''
2202 self.merge_msg = ''
2202 self.merge_response = None
2203 self.merge_response = None
2203 self.failed = None
2204 self.failed = None
2204 self.errors = []
2205 self.errors = []
2205 self.error_details = OrderedDict()
2206 self.error_details = OrderedDict()
2206 self.source_commit = AttributeDict()
2207 self.source_commit = AttributeDict()
2207 self.target_commit = AttributeDict()
2208 self.target_commit = AttributeDict()
2208 self.reviewers_count = 0
2209 self.reviewers_count = 0
2209 self.observers_count = 0
2210 self.observers_count = 0
2210
2211
2211 def __repr__(self):
2212 def __repr__(self):
2212 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2213 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2213 self.merge_possible, self.failed, self.errors)
2214 self.merge_possible, self.failed, self.errors)
2214
2215
2215 def push_error(self, error_type, message, error_key, details):
2216 def push_error(self, error_type, message, error_key, details):
2216 self.failed = True
2217 self.failed = True
2217 self.errors.append([error_type, message])
2218 self.errors.append([error_type, message])
2218 self.error_details[error_key] = dict(
2219 self.error_details[error_key] = dict(
2219 details=details,
2220 details=details,
2220 error_type=error_type,
2221 error_type=error_type,
2221 message=message
2222 message=message
2222 )
2223 )
2223
2224
2224 @classmethod
2225 @classmethod
2225 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2226 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2226 force_shadow_repo_refresh=False):
2227 force_shadow_repo_refresh=False):
2227 _ = translator
2228 _ = translator
2228 merge_check = cls()
2229 merge_check = cls()
2229
2230
2230 # title has WIP:
2231 # title has WIP:
2231 if pull_request.work_in_progress:
2232 if pull_request.work_in_progress:
2232 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2233 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2233
2234
2234 msg = _('WIP marker in title prevents from accidental merge.')
2235 msg = _('WIP marker in title prevents from accidental merge.')
2235 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2236 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2236 if fail_early:
2237 if fail_early:
2237 return merge_check
2238 return merge_check
2238
2239
2239 # permissions to merge
2240 # permissions to merge
2240 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2241 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2241 if not user_allowed_to_merge:
2242 if not user_allowed_to_merge:
2242 log.debug("MergeCheck: cannot merge, approval is pending.")
2243 log.debug("MergeCheck: cannot merge, approval is pending.")
2243
2244
2244 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2245 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2245 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2246 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2246 if fail_early:
2247 if fail_early:
2247 return merge_check
2248 return merge_check
2248
2249
2249 # permission to merge into the target branch
2250 # permission to merge into the target branch
2250 target_commit_id = pull_request.target_ref_parts.commit_id
2251 target_commit_id = pull_request.target_ref_parts.commit_id
2251 if pull_request.target_ref_parts.type == 'branch':
2252 if pull_request.target_ref_parts.type == 'branch':
2252 branch_name = pull_request.target_ref_parts.name
2253 branch_name = pull_request.target_ref_parts.name
2253 else:
2254 else:
2254 # for mercurial we can always figure out the branch from the commit
2255 # for mercurial we can always figure out the branch from the commit
2255 # in case of bookmark
2256 # in case of bookmark
2256 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2257 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2257 branch_name = target_commit.branch
2258 branch_name = target_commit.branch
2258
2259
2259 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2260 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2260 pull_request.target_repo.repo_name, branch_name)
2261 pull_request.target_repo.repo_name, branch_name)
2261 if branch_perm and branch_perm == 'branch.none':
2262 if branch_perm and branch_perm == 'branch.none':
2262 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2263 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2263 branch_name, rule)
2264 branch_name, rule)
2264 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2265 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2265 if fail_early:
2266 if fail_early:
2266 return merge_check
2267 return merge_check
2267
2268
2268 # review status, must be always present
2269 # review status, must be always present
2269 review_status = pull_request.calculated_review_status()
2270 review_status = pull_request.calculated_review_status()
2270 merge_check.review_status = review_status
2271 merge_check.review_status = review_status
2271 merge_check.reviewers_count = pull_request.reviewers_count
2272 merge_check.reviewers_count = pull_request.reviewers_count
2272 merge_check.observers_count = pull_request.observers_count
2273 merge_check.observers_count = pull_request.observers_count
2273
2274
2274 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2275 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2275 if not status_approved and merge_check.reviewers_count:
2276 if not status_approved and merge_check.reviewers_count:
2276 log.debug("MergeCheck: cannot merge, approval is pending.")
2277 log.debug("MergeCheck: cannot merge, approval is pending.")
2277 msg = _('Pull request reviewer approval is pending.')
2278 msg = _('Pull request reviewer approval is pending.')
2278
2279
2279 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2280 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2280
2281
2281 if fail_early:
2282 if fail_early:
2282 return merge_check
2283 return merge_check
2283
2284
2284 # left over TODOs
2285 # left over TODOs
2285 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2286 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2286 if todos:
2287 if todos:
2287 log.debug("MergeCheck: cannot merge, {} "
2288 log.debug("MergeCheck: cannot merge, {} "
2288 "unresolved TODOs left.".format(len(todos)))
2289 "unresolved TODOs left.".format(len(todos)))
2289
2290
2290 if len(todos) == 1:
2291 if len(todos) == 1:
2291 msg = _('Cannot merge, {} TODO still not resolved.').format(
2292 msg = _('Cannot merge, {} TODO still not resolved.').format(
2292 len(todos))
2293 len(todos))
2293 else:
2294 else:
2294 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2295 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2295 len(todos))
2296 len(todos))
2296
2297
2297 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2298 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2298
2299
2299 if fail_early:
2300 if fail_early:
2300 return merge_check
2301 return merge_check
2301
2302
2302 # merge possible, here is the filesystem simulation + shadow repo
2303 # merge possible, here is the filesystem simulation + shadow repo
2303 merge_response, merge_status, msg = PullRequestModel().merge_status(
2304 merge_response, merge_status, msg = PullRequestModel().merge_status(
2304 pull_request, translator=translator,
2305 pull_request, translator=translator,
2305 force_shadow_repo_refresh=force_shadow_repo_refresh)
2306 force_shadow_repo_refresh=force_shadow_repo_refresh)
2306
2307
2307 merge_check.merge_possible = merge_status
2308 merge_check.merge_possible = merge_status
2308 merge_check.merge_msg = msg
2309 merge_check.merge_msg = msg
2309 merge_check.merge_response = merge_response
2310 merge_check.merge_response = merge_response
2310
2311
2311 source_ref_id = pull_request.source_ref_parts.commit_id
2312 source_ref_id = pull_request.source_ref_parts.commit_id
2312 target_ref_id = pull_request.target_ref_parts.commit_id
2313 target_ref_id = pull_request.target_ref_parts.commit_id
2313
2314
2314 try:
2315 try:
2315 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2316 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2316 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2317 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2317 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2318 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2318 merge_check.source_commit.current_raw_id = source_commit.raw_id
2319 merge_check.source_commit.current_raw_id = source_commit.raw_id
2319 merge_check.source_commit.previous_raw_id = source_ref_id
2320 merge_check.source_commit.previous_raw_id = source_ref_id
2320
2321
2321 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2322 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2322 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2323 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2323 merge_check.target_commit.current_raw_id = target_commit.raw_id
2324 merge_check.target_commit.current_raw_id = target_commit.raw_id
2324 merge_check.target_commit.previous_raw_id = target_ref_id
2325 merge_check.target_commit.previous_raw_id = target_ref_id
2325 except (SourceRefMissing, TargetRefMissing):
2326 except (SourceRefMissing, TargetRefMissing):
2326 pass
2327 pass
2327
2328
2328 if not merge_status:
2329 if not merge_status:
2329 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2330 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2330 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2331 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2331
2332
2332 if fail_early:
2333 if fail_early:
2333 return merge_check
2334 return merge_check
2334
2335
2335 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2336 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2336 return merge_check
2337 return merge_check
2337
2338
2338 @classmethod
2339 @classmethod
2339 def get_merge_conditions(cls, pull_request, translator):
2340 def get_merge_conditions(cls, pull_request, translator):
2340 _ = translator
2341 _ = translator
2341 merge_details = {}
2342 merge_details = {}
2342
2343
2343 model = PullRequestModel()
2344 model = PullRequestModel()
2344 use_rebase = model._use_rebase_for_merging(pull_request)
2345 use_rebase = model._use_rebase_for_merging(pull_request)
2345
2346
2346 if use_rebase:
2347 if use_rebase:
2347 merge_details['merge_strategy'] = dict(
2348 merge_details['merge_strategy'] = dict(
2348 details={},
2349 details={},
2349 message=_('Merge strategy: rebase')
2350 message=_('Merge strategy: rebase')
2350 )
2351 )
2351 else:
2352 else:
2352 merge_details['merge_strategy'] = dict(
2353 merge_details['merge_strategy'] = dict(
2353 details={},
2354 details={},
2354 message=_('Merge strategy: explicit merge commit')
2355 message=_('Merge strategy: explicit merge commit')
2355 )
2356 )
2356
2357
2357 close_branch = model._close_branch_before_merging(pull_request)
2358 close_branch = model._close_branch_before_merging(pull_request)
2358 if close_branch:
2359 if close_branch:
2359 repo_type = pull_request.target_repo.repo_type
2360 repo_type = pull_request.target_repo.repo_type
2360 close_msg = ''
2361 close_msg = ''
2361 if repo_type == 'hg':
2362 if repo_type == 'hg':
2362 close_msg = _('Source branch will be closed before the merge.')
2363 close_msg = _('Source branch will be closed before the merge.')
2363 elif repo_type == 'git':
2364 elif repo_type == 'git':
2364 close_msg = _('Source branch will be deleted after the merge.')
2365 close_msg = _('Source branch will be deleted after the merge.')
2365
2366
2366 merge_details['close_branch'] = dict(
2367 merge_details['close_branch'] = dict(
2367 details={},
2368 details={},
2368 message=close_msg
2369 message=close_msg
2369 )
2370 )
2370
2371
2371 return merge_details
2372 return merge_details
2372
2373
2373
2374
2374 ChangeTuple = collections.namedtuple(
2375 ChangeTuple = collections.namedtuple(
2375 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2376 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2376
2377
2377 FileChangeTuple = collections.namedtuple(
2378 FileChangeTuple = collections.namedtuple(
2378 'FileChangeTuple', ['added', 'modified', 'removed'])
2379 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1183 +1,1185 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import shutil
23 import shutil
24 import time
24 import time
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib import hooks_base
36 from rhodecode.lib import hooks_base
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class RepoModel(BaseModel):
55 class RepoModel(BaseModel):
56
56
57 cls = Repository
57 cls = Repository
58
58
59 def _get_user_group(self, users_group):
59 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
60 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
61 callback=UserGroup.get_by_group_name)
62
62
63 def _get_repo_group(self, repo_group):
63 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
64 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
65 callback=RepoGroup.get_by_group_name)
66
66
67 def _create_default_perms(self, repository, private):
67 def _create_default_perms(self, repository, private):
68 # create default permission
68 # create default permission
69 default = 'repository.read'
69 default = 'repository.read'
70 def_user = User.get_default_user()
70 def_user = User.get_default_user()
71 for p in def_user.user_perms:
71 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
72 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
73 default = p.permission.permission_name
74 break
74 break
75
75
76 default_perm = 'repository.none' if private else default
76 default_perm = 'repository.none' if private else default
77
77
78 repo_to_perm = UserRepoToPerm()
78 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
80
81 repo_to_perm.repository = repository
81 repo_to_perm.repository = repository
82 repo_to_perm.user_id = def_user.user_id
82 repo_to_perm.user_id = def_user.user_id
83
83
84 return repo_to_perm
84 return repo_to_perm
85
85
86 @LazyProperty
86 @LazyProperty
87 def repos_path(self):
87 def repos_path(self):
88 """
88 """
89 Gets the repositories root path from database
89 Gets the repositories root path from database
90 """
90 """
91 settings_model = VcsSettingsModel(sa=self.sa)
91 settings_model = VcsSettingsModel(sa=self.sa)
92 return settings_model.get_repos_location()
92 return settings_model.get_repos_location()
93
93
94 def get(self, repo_id):
94 def get(self, repo_id):
95 repo = self.sa.query(Repository) \
95 repo = self.sa.query(Repository) \
96 .filter(Repository.repo_id == repo_id)
96 .filter(Repository.repo_id == repo_id)
97
97
98 return repo.scalar()
98 return repo.scalar()
99
99
100 def get_repo(self, repository):
100 def get_repo(self, repository):
101 return self._get_repo(repository)
101 return self._get_repo(repository)
102
102
103 def get_by_repo_name(self, repo_name, cache=False):
103 def get_by_repo_name(self, repo_name, cache=False):
104 repo = self.sa.query(Repository) \
104 repo = self.sa.query(Repository) \
105 .filter(Repository.repo_name == repo_name)
105 .filter(Repository.repo_name == repo_name)
106
106
107 if cache:
107 if cache:
108 name_key = _hash_key(repo_name)
108 name_key = _hash_key(repo_name)
109 repo = repo.options(
109 repo = repo.options(
110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
111 return repo.scalar()
111 return repo.scalar()
112
112
113 def _extract_id_from_repo_name(self, repo_name):
113 def _extract_id_from_repo_name(self, repo_name):
114 if repo_name.startswith('/'):
114 if repo_name.startswith('/'):
115 repo_name = repo_name.lstrip('/')
115 repo_name = repo_name.lstrip('/')
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
117 if by_id_match:
117 if by_id_match:
118 return by_id_match.groups()[0]
118 return by_id_match.groups()[0]
119
119
120 def get_repo_by_id(self, repo_name):
120 def get_repo_by_id(self, repo_name):
121 """
121 """
122 Extracts repo_name by id from special urls.
122 Extracts repo_name by id from special urls.
123 Example url is _11/repo_name
123 Example url is _11/repo_name
124
124
125 :param repo_name:
125 :param repo_name:
126 :return: repo object if matched else None
126 :return: repo object if matched else None
127 """
127 """
128
128 _repo_id = None
129 try:
129 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
131 if _repo_id:
132 return self.get(_repo_id)
132 return self.get(_repo_id)
133 except Exception:
133 except Exception:
134 log.exception('Failed to extract repo_name from URL')
134 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
136 Session().rollback()
135
137
136 return None
138 return None
137
139
138 def get_repos_for_root(self, root, traverse=False):
140 def get_repos_for_root(self, root, traverse=False):
139 if traverse:
141 if traverse:
140 like_expression = u'{}%'.format(safe_unicode(root))
142 like_expression = u'{}%'.format(safe_unicode(root))
141 repos = Repository.query().filter(
143 repos = Repository.query().filter(
142 Repository.repo_name.like(like_expression)).all()
144 Repository.repo_name.like(like_expression)).all()
143 else:
145 else:
144 if root and not isinstance(root, RepoGroup):
146 if root and not isinstance(root, RepoGroup):
145 raise ValueError(
147 raise ValueError(
146 'Root must be an instance '
148 'Root must be an instance '
147 'of RepoGroup, got:{} instead'.format(type(root)))
149 'of RepoGroup, got:{} instead'.format(type(root)))
148 repos = Repository.query().filter(Repository.group == root).all()
150 repos = Repository.query().filter(Repository.group == root).all()
149 return repos
151 return repos
150
152
151 def get_url(self, repo, request=None, permalink=False):
153 def get_url(self, repo, request=None, permalink=False):
152 if not request:
154 if not request:
153 request = get_current_request()
155 request = get_current_request()
154
156
155 if not request:
157 if not request:
156 return
158 return
157
159
158 if permalink:
160 if permalink:
159 return request.route_url(
161 return request.route_url(
160 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
161 else:
163 else:
162 return request.route_url(
164 return request.route_url(
163 'repo_summary', repo_name=safe_str(repo.repo_name))
165 'repo_summary', repo_name=safe_str(repo.repo_name))
164
166
165 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
166 if not request:
168 if not request:
167 request = get_current_request()
169 request = get_current_request()
168
170
169 if not request:
171 if not request:
170 return
172 return
171
173
172 if permalink:
174 if permalink:
173 return request.route_url(
175 return request.route_url(
174 'repo_commit', repo_name=safe_str(repo.repo_id),
176 'repo_commit', repo_name=safe_str(repo.repo_id),
175 commit_id=commit_id)
177 commit_id=commit_id)
176
178
177 else:
179 else:
178 return request.route_url(
180 return request.route_url(
179 'repo_commit', repo_name=safe_str(repo.repo_name),
181 'repo_commit', repo_name=safe_str(repo.repo_name),
180 commit_id=commit_id)
182 commit_id=commit_id)
181
183
182 def get_repo_log(self, repo, filter_term):
184 def get_repo_log(self, repo, filter_term):
183 repo_log = UserLog.query()\
185 repo_log = UserLog.query()\
184 .filter(or_(UserLog.repository_id == repo.repo_id,
186 .filter(or_(UserLog.repository_id == repo.repo_id,
185 UserLog.repository_name == repo.repo_name))\
187 UserLog.repository_name == repo.repo_name))\
186 .options(joinedload(UserLog.user))\
188 .options(joinedload(UserLog.user))\
187 .options(joinedload(UserLog.repository))\
189 .options(joinedload(UserLog.repository))\
188 .order_by(UserLog.action_date.desc())
190 .order_by(UserLog.action_date.desc())
189
191
190 repo_log = user_log_filter(repo_log, filter_term)
192 repo_log = user_log_filter(repo_log, filter_term)
191 return repo_log
193 return repo_log
192
194
193 @classmethod
195 @classmethod
194 def update_commit_cache(cls, repositories=None):
196 def update_commit_cache(cls, repositories=None):
195 if not repositories:
197 if not repositories:
196 repositories = Repository.getAll()
198 repositories = Repository.getAll()
197 for repo in repositories:
199 for repo in repositories:
198 repo.update_commit_cache()
200 repo.update_commit_cache()
199
201
200 def get_repos_as_dict(self, repo_list=None, admin=False,
202 def get_repos_as_dict(self, repo_list=None, admin=False,
201 super_user_actions=False, short_name=None):
203 super_user_actions=False, short_name=None):
202
204
203 _render = get_current_request().get_partial_renderer(
205 _render = get_current_request().get_partial_renderer(
204 'rhodecode:templates/data_table/_dt_elements.mako')
206 'rhodecode:templates/data_table/_dt_elements.mako')
205 c = _render.get_call_context()
207 c = _render.get_call_context()
206 h = _render.get_helpers()
208 h = _render.get_helpers()
207
209
208 def quick_menu(repo_name):
210 def quick_menu(repo_name):
209 return _render('quick_menu', repo_name)
211 return _render('quick_menu', repo_name)
210
212
211 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
212 if short_name is not None:
214 if short_name is not None:
213 short_name_var = short_name
215 short_name_var = short_name
214 else:
216 else:
215 short_name_var = not admin
217 short_name_var = not admin
216 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
217 short_name=short_name_var, admin=False)
219 short_name=short_name_var, admin=False)
218
220
219 def last_change(last_change):
221 def last_change(last_change):
220 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
221 ts = time.time()
223 ts = time.time()
222 utc_offset = (datetime.datetime.fromtimestamp(ts)
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
223 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
224 last_change = last_change + datetime.timedelta(seconds=utc_offset)
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
225
227
226 return _render("last_change", last_change)
228 return _render("last_change", last_change)
227
229
228 def rss_lnk(repo_name):
230 def rss_lnk(repo_name):
229 return _render("rss", repo_name)
231 return _render("rss", repo_name)
230
232
231 def atom_lnk(repo_name):
233 def atom_lnk(repo_name):
232 return _render("atom", repo_name)
234 return _render("atom", repo_name)
233
235
234 def last_rev(repo_name, cs_cache):
236 def last_rev(repo_name, cs_cache):
235 return _render('revision', repo_name, cs_cache.get('revision'),
237 return _render('revision', repo_name, cs_cache.get('revision'),
236 cs_cache.get('raw_id'), cs_cache.get('author'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
237 cs_cache.get('message'), cs_cache.get('date'))
239 cs_cache.get('message'), cs_cache.get('date'))
238
240
239 def desc(desc):
241 def desc(desc):
240 return _render('repo_desc', desc, c.visual.stylify_metatags)
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
241
243
242 def state(repo_state):
244 def state(repo_state):
243 return _render("repo_state", repo_state)
245 return _render("repo_state", repo_state)
244
246
245 def repo_actions(repo_name):
247 def repo_actions(repo_name):
246 return _render('repo_actions', repo_name, super_user_actions)
248 return _render('repo_actions', repo_name, super_user_actions)
247
249
248 def user_profile(username):
250 def user_profile(username):
249 return _render('user_profile', username)
251 return _render('user_profile', username)
250
252
251 repos_data = []
253 repos_data = []
252 for repo in repo_list:
254 for repo in repo_list:
253 # NOTE(marcink): because we use only raw column we need to load it like that
255 # NOTE(marcink): because we use only raw column we need to load it like that
254 changeset_cache = Repository._load_changeset_cache(
256 changeset_cache = Repository._load_changeset_cache(
255 repo.repo_id, repo._changeset_cache)
257 repo.repo_id, repo._changeset_cache)
256
258
257 row = {
259 row = {
258 "menu": quick_menu(repo.repo_name),
260 "menu": quick_menu(repo.repo_name),
259
261
260 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
261 repo.private, repo.archived, repo.fork),
263 repo.private, repo.archived, repo.fork),
262
264
263 "desc": desc(h.escape(repo.description)),
265 "desc": desc(h.escape(repo.description)),
264
266
265 "last_change": last_change(repo.updated_on),
267 "last_change": last_change(repo.updated_on),
266
268
267 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
268 "last_changeset_raw": changeset_cache.get('revision'),
270 "last_changeset_raw": changeset_cache.get('revision'),
269
271
270 "owner": user_profile(repo.User.username),
272 "owner": user_profile(repo.User.username),
271
273
272 "state": state(repo.repo_state),
274 "state": state(repo.repo_state),
273 "rss": rss_lnk(repo.repo_name),
275 "rss": rss_lnk(repo.repo_name),
274 "atom": atom_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
275 }
277 }
276 if admin:
278 if admin:
277 row.update({
279 row.update({
278 "action": repo_actions(repo.repo_name),
280 "action": repo_actions(repo.repo_name),
279 })
281 })
280 repos_data.append(row)
282 repos_data.append(row)
281
283
282 return repos_data
284 return repos_data
283
285
284 def get_repos_data_table(
286 def get_repos_data_table(
285 self, draw, start, limit,
287 self, draw, start, limit,
286 search_q, order_by, order_dir,
288 search_q, order_by, order_dir,
287 auth_user, repo_group_id):
289 auth_user, repo_group_id):
288 from rhodecode.model.scm import RepoList
290 from rhodecode.model.scm import RepoList
289
291
290 _perms = ['repository.read', 'repository.write', 'repository.admin']
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
291
293
292 repos = Repository.query() \
294 repos = Repository.query() \
293 .filter(Repository.group_id == repo_group_id) \
295 .filter(Repository.group_id == repo_group_id) \
294 .all()
296 .all()
295 auth_repo_list = RepoList(
297 auth_repo_list = RepoList(
296 repos, perm_set=_perms,
298 repos, perm_set=_perms,
297 extra_kwargs=dict(user=auth_user))
299 extra_kwargs=dict(user=auth_user))
298
300
299 allowed_ids = [-1]
301 allowed_ids = [-1]
300 for repo in auth_repo_list:
302 for repo in auth_repo_list:
301 allowed_ids.append(repo.repo_id)
303 allowed_ids.append(repo.repo_id)
302
304
303 repos_data_total_count = Repository.query() \
305 repos_data_total_count = Repository.query() \
304 .filter(Repository.group_id == repo_group_id) \
306 .filter(Repository.group_id == repo_group_id) \
305 .filter(or_(
307 .filter(or_(
306 # generate multiple IN to fix limitation problems
308 # generate multiple IN to fix limitation problems
307 *in_filter_generator(Repository.repo_id, allowed_ids))
309 *in_filter_generator(Repository.repo_id, allowed_ids))
308 ) \
310 ) \
309 .count()
311 .count()
310
312
311 base_q = Session.query(
313 base_q = Session.query(
312 Repository.repo_id,
314 Repository.repo_id,
313 Repository.repo_name,
315 Repository.repo_name,
314 Repository.description,
316 Repository.description,
315 Repository.repo_type,
317 Repository.repo_type,
316 Repository.repo_state,
318 Repository.repo_state,
317 Repository.private,
319 Repository.private,
318 Repository.archived,
320 Repository.archived,
319 Repository.fork,
321 Repository.fork,
320 Repository.updated_on,
322 Repository.updated_on,
321 Repository._changeset_cache,
323 Repository._changeset_cache,
322 User,
324 User,
323 ) \
325 ) \
324 .filter(Repository.group_id == repo_group_id) \
326 .filter(Repository.group_id == repo_group_id) \
325 .filter(or_(
327 .filter(or_(
326 # generate multiple IN to fix limitation problems
328 # generate multiple IN to fix limitation problems
327 *in_filter_generator(Repository.repo_id, allowed_ids))
329 *in_filter_generator(Repository.repo_id, allowed_ids))
328 ) \
330 ) \
329 .join(User, User.user_id == Repository.user_id) \
331 .join(User, User.user_id == Repository.user_id) \
330 .group_by(Repository, User)
332 .group_by(Repository, User)
331
333
332 repos_data_total_filtered_count = base_q.count()
334 repos_data_total_filtered_count = base_q.count()
333
335
334 sort_defined = False
336 sort_defined = False
335 if order_by == 'repo_name':
337 if order_by == 'repo_name':
336 sort_col = func.lower(Repository.repo_name)
338 sort_col = func.lower(Repository.repo_name)
337 sort_defined = True
339 sort_defined = True
338 elif order_by == 'user_username':
340 elif order_by == 'user_username':
339 sort_col = User.username
341 sort_col = User.username
340 else:
342 else:
341 sort_col = getattr(Repository, order_by, None)
343 sort_col = getattr(Repository, order_by, None)
342
344
343 if sort_defined or sort_col:
345 if sort_defined or sort_col:
344 if order_dir == 'asc':
346 if order_dir == 'asc':
345 sort_col = sort_col.asc()
347 sort_col = sort_col.asc()
346 else:
348 else:
347 sort_col = sort_col.desc()
349 sort_col = sort_col.desc()
348
350
349 base_q = base_q.order_by(sort_col)
351 base_q = base_q.order_by(sort_col)
350 base_q = base_q.offset(start).limit(limit)
352 base_q = base_q.offset(start).limit(limit)
351
353
352 repos_list = base_q.all()
354 repos_list = base_q.all()
353
355
354 repos_data = RepoModel().get_repos_as_dict(
356 repos_data = RepoModel().get_repos_as_dict(
355 repo_list=repos_list, admin=False)
357 repo_list=repos_list, admin=False)
356
358
357 data = ({
359 data = ({
358 'draw': draw,
360 'draw': draw,
359 'data': repos_data,
361 'data': repos_data,
360 'recordsTotal': repos_data_total_count,
362 'recordsTotal': repos_data_total_count,
361 'recordsFiltered': repos_data_total_filtered_count,
363 'recordsFiltered': repos_data_total_filtered_count,
362 })
364 })
363 return data
365 return data
364
366
365 def _get_defaults(self, repo_name):
367 def _get_defaults(self, repo_name):
366 """
368 """
367 Gets information about repository, and returns a dict for
369 Gets information about repository, and returns a dict for
368 usage in forms
370 usage in forms
369
371
370 :param repo_name:
372 :param repo_name:
371 """
373 """
372
374
373 repo_info = Repository.get_by_repo_name(repo_name)
375 repo_info = Repository.get_by_repo_name(repo_name)
374
376
375 if repo_info is None:
377 if repo_info is None:
376 return None
378 return None
377
379
378 defaults = repo_info.get_dict()
380 defaults = repo_info.get_dict()
379 defaults['repo_name'] = repo_info.just_name
381 defaults['repo_name'] = repo_info.just_name
380
382
381 groups = repo_info.groups_with_parents
383 groups = repo_info.groups_with_parents
382 parent_group = groups[-1] if groups else None
384 parent_group = groups[-1] if groups else None
383
385
384 # we use -1 as this is how in HTML, we mark an empty group
386 # we use -1 as this is how in HTML, we mark an empty group
385 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
386
388
387 keys_to_process = (
389 keys_to_process = (
388 {'k': 'repo_type', 'strip': False},
390 {'k': 'repo_type', 'strip': False},
389 {'k': 'repo_enable_downloads', 'strip': True},
391 {'k': 'repo_enable_downloads', 'strip': True},
390 {'k': 'repo_description', 'strip': True},
392 {'k': 'repo_description', 'strip': True},
391 {'k': 'repo_enable_locking', 'strip': True},
393 {'k': 'repo_enable_locking', 'strip': True},
392 {'k': 'repo_landing_rev', 'strip': True},
394 {'k': 'repo_landing_rev', 'strip': True},
393 {'k': 'clone_uri', 'strip': False},
395 {'k': 'clone_uri', 'strip': False},
394 {'k': 'push_uri', 'strip': False},
396 {'k': 'push_uri', 'strip': False},
395 {'k': 'repo_private', 'strip': True},
397 {'k': 'repo_private', 'strip': True},
396 {'k': 'repo_enable_statistics', 'strip': True}
398 {'k': 'repo_enable_statistics', 'strip': True}
397 )
399 )
398
400
399 for item in keys_to_process:
401 for item in keys_to_process:
400 attr = item['k']
402 attr = item['k']
401 if item['strip']:
403 if item['strip']:
402 attr = remove_prefix(item['k'], 'repo_')
404 attr = remove_prefix(item['k'], 'repo_')
403
405
404 val = defaults[attr]
406 val = defaults[attr]
405 if item['k'] == 'repo_landing_rev':
407 if item['k'] == 'repo_landing_rev':
406 val = ':'.join(defaults[attr])
408 val = ':'.join(defaults[attr])
407 defaults[item['k']] = val
409 defaults[item['k']] = val
408 if item['k'] == 'clone_uri':
410 if item['k'] == 'clone_uri':
409 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
410 if item['k'] == 'push_uri':
412 if item['k'] == 'push_uri':
411 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
412
414
413 # fill owner
415 # fill owner
414 if repo_info.user:
416 if repo_info.user:
415 defaults.update({'user': repo_info.user.username})
417 defaults.update({'user': repo_info.user.username})
416 else:
418 else:
417 replacement_user = User.get_first_super_admin().username
419 replacement_user = User.get_first_super_admin().username
418 defaults.update({'user': replacement_user})
420 defaults.update({'user': replacement_user})
419
421
420 return defaults
422 return defaults
421
423
422 def update(self, repo, **kwargs):
424 def update(self, repo, **kwargs):
423 try:
425 try:
424 cur_repo = self._get_repo(repo)
426 cur_repo = self._get_repo(repo)
425 source_repo_name = cur_repo.repo_name
427 source_repo_name = cur_repo.repo_name
426
428
427 affected_user_ids = []
429 affected_user_ids = []
428 if 'user' in kwargs:
430 if 'user' in kwargs:
429 old_owner_id = cur_repo.user.user_id
431 old_owner_id = cur_repo.user.user_id
430 new_owner = User.get_by_username(kwargs['user'])
432 new_owner = User.get_by_username(kwargs['user'])
431 cur_repo.user = new_owner
433 cur_repo.user = new_owner
432
434
433 if old_owner_id != new_owner.user_id:
435 if old_owner_id != new_owner.user_id:
434 affected_user_ids = [new_owner.user_id, old_owner_id]
436 affected_user_ids = [new_owner.user_id, old_owner_id]
435
437
436 if 'repo_group' in kwargs:
438 if 'repo_group' in kwargs:
437 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
438 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
439
441
440 update_keys = [
442 update_keys = [
441 (1, 'repo_description'),
443 (1, 'repo_description'),
442 (1, 'repo_landing_rev'),
444 (1, 'repo_landing_rev'),
443 (1, 'repo_private'),
445 (1, 'repo_private'),
444 (1, 'repo_enable_downloads'),
446 (1, 'repo_enable_downloads'),
445 (1, 'repo_enable_locking'),
447 (1, 'repo_enable_locking'),
446 (1, 'repo_enable_statistics'),
448 (1, 'repo_enable_statistics'),
447 (0, 'clone_uri'),
449 (0, 'clone_uri'),
448 (0, 'push_uri'),
450 (0, 'push_uri'),
449 (0, 'fork_id')
451 (0, 'fork_id')
450 ]
452 ]
451 for strip, k in update_keys:
453 for strip, k in update_keys:
452 if k in kwargs:
454 if k in kwargs:
453 val = kwargs[k]
455 val = kwargs[k]
454 if strip:
456 if strip:
455 k = remove_prefix(k, 'repo_')
457 k = remove_prefix(k, 'repo_')
456
458
457 setattr(cur_repo, k, val)
459 setattr(cur_repo, k, val)
458
460
459 new_name = cur_repo.get_new_name(kwargs['repo_name'])
461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
460 cur_repo.repo_name = new_name
462 cur_repo.repo_name = new_name
461
463
462 # if private flag is set, reset default permission to NONE
464 # if private flag is set, reset default permission to NONE
463 if kwargs.get('repo_private'):
465 if kwargs.get('repo_private'):
464 EMPTY_PERM = 'repository.none'
466 EMPTY_PERM = 'repository.none'
465 RepoModel().grant_user_permission(
467 RepoModel().grant_user_permission(
466 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
467 )
469 )
468
470
469 # handle extra fields
471 # handle extra fields
470 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
472 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
471 k = RepositoryField.un_prefix_key(field)
473 k = RepositoryField.un_prefix_key(field)
472 ex_field = RepositoryField.get_by_key_name(
474 ex_field = RepositoryField.get_by_key_name(
473 key=k, repo=cur_repo)
475 key=k, repo=cur_repo)
474 if ex_field:
476 if ex_field:
475 ex_field.field_value = kwargs[field]
477 ex_field.field_value = kwargs[field]
476 self.sa.add(ex_field)
478 self.sa.add(ex_field)
477
479
478 self.sa.add(cur_repo)
480 self.sa.add(cur_repo)
479
481
480 if source_repo_name != new_name:
482 if source_repo_name != new_name:
481 # rename repository
483 # rename repository
482 self._rename_filesystem_repo(
484 self._rename_filesystem_repo(
483 old=source_repo_name, new=new_name)
485 old=source_repo_name, new=new_name)
484
486
485 if affected_user_ids:
487 if affected_user_ids:
486 PermissionModel().trigger_permission_flush(affected_user_ids)
488 PermissionModel().trigger_permission_flush(affected_user_ids)
487
489
488 return cur_repo
490 return cur_repo
489 except Exception:
491 except Exception:
490 log.error(traceback.format_exc())
492 log.error(traceback.format_exc())
491 raise
493 raise
492
494
493 def _create_repo(self, repo_name, repo_type, description, owner,
495 def _create_repo(self, repo_name, repo_type, description, owner,
494 private=False, clone_uri=None, repo_group=None,
496 private=False, clone_uri=None, repo_group=None,
495 landing_rev='rev:tip', fork_of=None,
497 landing_rev='rev:tip', fork_of=None,
496 copy_fork_permissions=False, enable_statistics=False,
498 copy_fork_permissions=False, enable_statistics=False,
497 enable_locking=False, enable_downloads=False,
499 enable_locking=False, enable_downloads=False,
498 copy_group_permissions=False,
500 copy_group_permissions=False,
499 state=Repository.STATE_PENDING):
501 state=Repository.STATE_PENDING):
500 """
502 """
501 Create repository inside database with PENDING state, this should be
503 Create repository inside database with PENDING state, this should be
502 only executed by create() repo. With exception of importing existing
504 only executed by create() repo. With exception of importing existing
503 repos
505 repos
504 """
506 """
505 from rhodecode.model.scm import ScmModel
507 from rhodecode.model.scm import ScmModel
506
508
507 owner = self._get_user(owner)
509 owner = self._get_user(owner)
508 fork_of = self._get_repo(fork_of)
510 fork_of = self._get_repo(fork_of)
509 repo_group = self._get_repo_group(safe_int(repo_group))
511 repo_group = self._get_repo_group(safe_int(repo_group))
510
512
511 try:
513 try:
512 repo_name = safe_unicode(repo_name)
514 repo_name = safe_unicode(repo_name)
513 description = safe_unicode(description)
515 description = safe_unicode(description)
514 # repo name is just a name of repository
516 # repo name is just a name of repository
515 # while repo_name_full is a full qualified name that is combined
517 # while repo_name_full is a full qualified name that is combined
516 # with name and path of group
518 # with name and path of group
517 repo_name_full = repo_name
519 repo_name_full = repo_name
518 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
520 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
519
521
520 new_repo = Repository()
522 new_repo = Repository()
521 new_repo.repo_state = state
523 new_repo.repo_state = state
522 new_repo.enable_statistics = False
524 new_repo.enable_statistics = False
523 new_repo.repo_name = repo_name_full
525 new_repo.repo_name = repo_name_full
524 new_repo.repo_type = repo_type
526 new_repo.repo_type = repo_type
525 new_repo.user = owner
527 new_repo.user = owner
526 new_repo.group = repo_group
528 new_repo.group = repo_group
527 new_repo.description = description or repo_name
529 new_repo.description = description or repo_name
528 new_repo.private = private
530 new_repo.private = private
529 new_repo.archived = False
531 new_repo.archived = False
530 new_repo.clone_uri = clone_uri
532 new_repo.clone_uri = clone_uri
531 new_repo.landing_rev = landing_rev
533 new_repo.landing_rev = landing_rev
532
534
533 new_repo.enable_statistics = enable_statistics
535 new_repo.enable_statistics = enable_statistics
534 new_repo.enable_locking = enable_locking
536 new_repo.enable_locking = enable_locking
535 new_repo.enable_downloads = enable_downloads
537 new_repo.enable_downloads = enable_downloads
536
538
537 if repo_group:
539 if repo_group:
538 new_repo.enable_locking = repo_group.enable_locking
540 new_repo.enable_locking = repo_group.enable_locking
539
541
540 if fork_of:
542 if fork_of:
541 parent_repo = fork_of
543 parent_repo = fork_of
542 new_repo.fork = parent_repo
544 new_repo.fork = parent_repo
543
545
544 events.trigger(events.RepoPreCreateEvent(new_repo))
546 events.trigger(events.RepoPreCreateEvent(new_repo))
545
547
546 self.sa.add(new_repo)
548 self.sa.add(new_repo)
547
549
548 EMPTY_PERM = 'repository.none'
550 EMPTY_PERM = 'repository.none'
549 if fork_of and copy_fork_permissions:
551 if fork_of and copy_fork_permissions:
550 repo = fork_of
552 repo = fork_of
551 user_perms = UserRepoToPerm.query() \
553 user_perms = UserRepoToPerm.query() \
552 .filter(UserRepoToPerm.repository == repo).all()
554 .filter(UserRepoToPerm.repository == repo).all()
553 group_perms = UserGroupRepoToPerm.query() \
555 group_perms = UserGroupRepoToPerm.query() \
554 .filter(UserGroupRepoToPerm.repository == repo).all()
556 .filter(UserGroupRepoToPerm.repository == repo).all()
555
557
556 for perm in user_perms:
558 for perm in user_perms:
557 UserRepoToPerm.create(
559 UserRepoToPerm.create(
558 perm.user, new_repo, perm.permission)
560 perm.user, new_repo, perm.permission)
559
561
560 for perm in group_perms:
562 for perm in group_perms:
561 UserGroupRepoToPerm.create(
563 UserGroupRepoToPerm.create(
562 perm.users_group, new_repo, perm.permission)
564 perm.users_group, new_repo, perm.permission)
563 # in case we copy permissions and also set this repo to private
565 # in case we copy permissions and also set this repo to private
564 # override the default user permission to make it a private repo
566 # override the default user permission to make it a private repo
565 if private:
567 if private:
566 RepoModel(self.sa).grant_user_permission(
568 RepoModel(self.sa).grant_user_permission(
567 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
569 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
568
570
569 elif repo_group and copy_group_permissions:
571 elif repo_group and copy_group_permissions:
570 user_perms = UserRepoGroupToPerm.query() \
572 user_perms = UserRepoGroupToPerm.query() \
571 .filter(UserRepoGroupToPerm.group == repo_group).all()
573 .filter(UserRepoGroupToPerm.group == repo_group).all()
572
574
573 group_perms = UserGroupRepoGroupToPerm.query() \
575 group_perms = UserGroupRepoGroupToPerm.query() \
574 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
576 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
575
577
576 for perm in user_perms:
578 for perm in user_perms:
577 perm_name = perm.permission.permission_name.replace(
579 perm_name = perm.permission.permission_name.replace(
578 'group.', 'repository.')
580 'group.', 'repository.')
579 perm_obj = Permission.get_by_key(perm_name)
581 perm_obj = Permission.get_by_key(perm_name)
580 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
582 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
581
583
582 for perm in group_perms:
584 for perm in group_perms:
583 perm_name = perm.permission.permission_name.replace(
585 perm_name = perm.permission.permission_name.replace(
584 'group.', 'repository.')
586 'group.', 'repository.')
585 perm_obj = Permission.get_by_key(perm_name)
587 perm_obj = Permission.get_by_key(perm_name)
586 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
588 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
587
589
588 if private:
590 if private:
589 RepoModel(self.sa).grant_user_permission(
591 RepoModel(self.sa).grant_user_permission(
590 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
592 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
591
593
592 else:
594 else:
593 perm_obj = self._create_default_perms(new_repo, private)
595 perm_obj = self._create_default_perms(new_repo, private)
594 self.sa.add(perm_obj)
596 self.sa.add(perm_obj)
595
597
596 # now automatically start following this repository as owner
598 # now automatically start following this repository as owner
597 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
599 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
598
600
599 # we need to flush here, in order to check if database won't
601 # we need to flush here, in order to check if database won't
600 # throw any exceptions, create filesystem dirs at the very end
602 # throw any exceptions, create filesystem dirs at the very end
601 self.sa.flush()
603 self.sa.flush()
602 events.trigger(events.RepoCreateEvent(new_repo))
604 events.trigger(events.RepoCreateEvent(new_repo))
603 return new_repo
605 return new_repo
604
606
605 except Exception:
607 except Exception:
606 log.error(traceback.format_exc())
608 log.error(traceback.format_exc())
607 raise
609 raise
608
610
609 def create(self, form_data, cur_user):
611 def create(self, form_data, cur_user):
610 """
612 """
611 Create repository using celery tasks
613 Create repository using celery tasks
612
614
613 :param form_data:
615 :param form_data:
614 :param cur_user:
616 :param cur_user:
615 """
617 """
616 from rhodecode.lib.celerylib import tasks, run_task
618 from rhodecode.lib.celerylib import tasks, run_task
617 return run_task(tasks.create_repo, form_data, cur_user)
619 return run_task(tasks.create_repo, form_data, cur_user)
618
620
619 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
621 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
620 perm_deletions=None, check_perms=True,
622 perm_deletions=None, check_perms=True,
621 cur_user=None):
623 cur_user=None):
622 if not perm_additions:
624 if not perm_additions:
623 perm_additions = []
625 perm_additions = []
624 if not perm_updates:
626 if not perm_updates:
625 perm_updates = []
627 perm_updates = []
626 if not perm_deletions:
628 if not perm_deletions:
627 perm_deletions = []
629 perm_deletions = []
628
630
629 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
631 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
630
632
631 changes = {
633 changes = {
632 'added': [],
634 'added': [],
633 'updated': [],
635 'updated': [],
634 'deleted': [],
636 'deleted': [],
635 'default_user_changed': None
637 'default_user_changed': None
636 }
638 }
637
639
638 repo = self._get_repo(repo)
640 repo = self._get_repo(repo)
639
641
640 # update permissions
642 # update permissions
641 for member_id, perm, member_type in perm_updates:
643 for member_id, perm, member_type in perm_updates:
642 member_id = int(member_id)
644 member_id = int(member_id)
643 if member_type == 'user':
645 if member_type == 'user':
644 member_name = User.get(member_id).username
646 member_name = User.get(member_id).username
645 if member_name == User.DEFAULT_USER:
647 if member_name == User.DEFAULT_USER:
646 # NOTE(dan): detect if we changed permissions for default user
648 # NOTE(dan): detect if we changed permissions for default user
647 perm_obj = self.sa.query(UserRepoToPerm) \
649 perm_obj = self.sa.query(UserRepoToPerm) \
648 .filter(UserRepoToPerm.user_id == member_id) \
650 .filter(UserRepoToPerm.user_id == member_id) \
649 .filter(UserRepoToPerm.repository == repo) \
651 .filter(UserRepoToPerm.repository == repo) \
650 .scalar()
652 .scalar()
651 if perm_obj and perm_obj.permission.permission_name != perm:
653 if perm_obj and perm_obj.permission.permission_name != perm:
652 changes['default_user_changed'] = True
654 changes['default_user_changed'] = True
653
655
654 # this updates also current one if found
656 # this updates also current one if found
655 self.grant_user_permission(
657 self.grant_user_permission(
656 repo=repo, user=member_id, perm=perm)
658 repo=repo, user=member_id, perm=perm)
657 elif member_type == 'user_group':
659 elif member_type == 'user_group':
658 # check if we have permissions to alter this usergroup
660 # check if we have permissions to alter this usergroup
659 member_name = UserGroup.get(member_id).users_group_name
661 member_name = UserGroup.get(member_id).users_group_name
660 if not check_perms or HasUserGroupPermissionAny(
662 if not check_perms or HasUserGroupPermissionAny(
661 *req_perms)(member_name, user=cur_user):
663 *req_perms)(member_name, user=cur_user):
662 self.grant_user_group_permission(
664 self.grant_user_group_permission(
663 repo=repo, group_name=member_id, perm=perm)
665 repo=repo, group_name=member_id, perm=perm)
664 else:
666 else:
665 raise ValueError("member_type must be 'user' or 'user_group' "
667 raise ValueError("member_type must be 'user' or 'user_group' "
666 "got {} instead".format(member_type))
668 "got {} instead".format(member_type))
667 changes['updated'].append({'type': member_type, 'id': member_id,
669 changes['updated'].append({'type': member_type, 'id': member_id,
668 'name': member_name, 'new_perm': perm})
670 'name': member_name, 'new_perm': perm})
669
671
670 # set new permissions
672 # set new permissions
671 for member_id, perm, member_type in perm_additions:
673 for member_id, perm, member_type in perm_additions:
672 member_id = int(member_id)
674 member_id = int(member_id)
673 if member_type == 'user':
675 if member_type == 'user':
674 member_name = User.get(member_id).username
676 member_name = User.get(member_id).username
675 self.grant_user_permission(
677 self.grant_user_permission(
676 repo=repo, user=member_id, perm=perm)
678 repo=repo, user=member_id, perm=perm)
677 elif member_type == 'user_group':
679 elif member_type == 'user_group':
678 # check if we have permissions to alter this usergroup
680 # check if we have permissions to alter this usergroup
679 member_name = UserGroup.get(member_id).users_group_name
681 member_name = UserGroup.get(member_id).users_group_name
680 if not check_perms or HasUserGroupPermissionAny(
682 if not check_perms or HasUserGroupPermissionAny(
681 *req_perms)(member_name, user=cur_user):
683 *req_perms)(member_name, user=cur_user):
682 self.grant_user_group_permission(
684 self.grant_user_group_permission(
683 repo=repo, group_name=member_id, perm=perm)
685 repo=repo, group_name=member_id, perm=perm)
684 else:
686 else:
685 raise ValueError("member_type must be 'user' or 'user_group' "
687 raise ValueError("member_type must be 'user' or 'user_group' "
686 "got {} instead".format(member_type))
688 "got {} instead".format(member_type))
687
689
688 changes['added'].append({'type': member_type, 'id': member_id,
690 changes['added'].append({'type': member_type, 'id': member_id,
689 'name': member_name, 'new_perm': perm})
691 'name': member_name, 'new_perm': perm})
690 # delete permissions
692 # delete permissions
691 for member_id, perm, member_type in perm_deletions:
693 for member_id, perm, member_type in perm_deletions:
692 member_id = int(member_id)
694 member_id = int(member_id)
693 if member_type == 'user':
695 if member_type == 'user':
694 member_name = User.get(member_id).username
696 member_name = User.get(member_id).username
695 self.revoke_user_permission(repo=repo, user=member_id)
697 self.revoke_user_permission(repo=repo, user=member_id)
696 elif member_type == 'user_group':
698 elif member_type == 'user_group':
697 # check if we have permissions to alter this usergroup
699 # check if we have permissions to alter this usergroup
698 member_name = UserGroup.get(member_id).users_group_name
700 member_name = UserGroup.get(member_id).users_group_name
699 if not check_perms or HasUserGroupPermissionAny(
701 if not check_perms or HasUserGroupPermissionAny(
700 *req_perms)(member_name, user=cur_user):
702 *req_perms)(member_name, user=cur_user):
701 self.revoke_user_group_permission(
703 self.revoke_user_group_permission(
702 repo=repo, group_name=member_id)
704 repo=repo, group_name=member_id)
703 else:
705 else:
704 raise ValueError("member_type must be 'user' or 'user_group' "
706 raise ValueError("member_type must be 'user' or 'user_group' "
705 "got {} instead".format(member_type))
707 "got {} instead".format(member_type))
706
708
707 changes['deleted'].append({'type': member_type, 'id': member_id,
709 changes['deleted'].append({'type': member_type, 'id': member_id,
708 'name': member_name, 'new_perm': perm})
710 'name': member_name, 'new_perm': perm})
709 return changes
711 return changes
710
712
711 def create_fork(self, form_data, cur_user):
713 def create_fork(self, form_data, cur_user):
712 """
714 """
713 Simple wrapper into executing celery task for fork creation
715 Simple wrapper into executing celery task for fork creation
714
716
715 :param form_data:
717 :param form_data:
716 :param cur_user:
718 :param cur_user:
717 """
719 """
718 from rhodecode.lib.celerylib import tasks, run_task
720 from rhodecode.lib.celerylib import tasks, run_task
719 return run_task(tasks.create_repo_fork, form_data, cur_user)
721 return run_task(tasks.create_repo_fork, form_data, cur_user)
720
722
721 def archive(self, repo):
723 def archive(self, repo):
722 """
724 """
723 Archive given repository. Set archive flag.
725 Archive given repository. Set archive flag.
724
726
725 :param repo:
727 :param repo:
726 """
728 """
727 repo = self._get_repo(repo)
729 repo = self._get_repo(repo)
728 if repo:
730 if repo:
729
731
730 try:
732 try:
731 repo.archived = True
733 repo.archived = True
732 self.sa.add(repo)
734 self.sa.add(repo)
733 self.sa.commit()
735 self.sa.commit()
734 except Exception:
736 except Exception:
735 log.error(traceback.format_exc())
737 log.error(traceback.format_exc())
736 raise
738 raise
737
739
738 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
740 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
739 """
741 """
740 Delete given repository, forks parameter defines what do do with
742 Delete given repository, forks parameter defines what do do with
741 attached forks. Throws AttachedForksError if deleted repo has attached
743 attached forks. Throws AttachedForksError if deleted repo has attached
742 forks
744 forks
743
745
744 :param repo:
746 :param repo:
745 :param forks: str 'delete' or 'detach'
747 :param forks: str 'delete' or 'detach'
746 :param pull_requests: str 'delete' or None
748 :param pull_requests: str 'delete' or None
747 :param fs_remove: remove(archive) repo from filesystem
749 :param fs_remove: remove(archive) repo from filesystem
748 """
750 """
749 if not cur_user:
751 if not cur_user:
750 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
752 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
751 repo = self._get_repo(repo)
753 repo = self._get_repo(repo)
752 if repo:
754 if repo:
753 if forks == 'detach':
755 if forks == 'detach':
754 for r in repo.forks:
756 for r in repo.forks:
755 r.fork = None
757 r.fork = None
756 self.sa.add(r)
758 self.sa.add(r)
757 elif forks == 'delete':
759 elif forks == 'delete':
758 for r in repo.forks:
760 for r in repo.forks:
759 self.delete(r, forks='delete')
761 self.delete(r, forks='delete')
760 elif [f for f in repo.forks]:
762 elif [f for f in repo.forks]:
761 raise AttachedForksError()
763 raise AttachedForksError()
762
764
763 # check for pull requests
765 # check for pull requests
764 pr_sources = repo.pull_requests_source
766 pr_sources = repo.pull_requests_source
765 pr_targets = repo.pull_requests_target
767 pr_targets = repo.pull_requests_target
766 if pull_requests != 'delete' and (pr_sources or pr_targets):
768 if pull_requests != 'delete' and (pr_sources or pr_targets):
767 raise AttachedPullRequestsError()
769 raise AttachedPullRequestsError()
768
770
769 old_repo_dict = repo.get_dict()
771 old_repo_dict = repo.get_dict()
770 events.trigger(events.RepoPreDeleteEvent(repo))
772 events.trigger(events.RepoPreDeleteEvent(repo))
771 try:
773 try:
772 self.sa.delete(repo)
774 self.sa.delete(repo)
773 if fs_remove:
775 if fs_remove:
774 self._delete_filesystem_repo(repo)
776 self._delete_filesystem_repo(repo)
775 else:
777 else:
776 log.debug('skipping removal from filesystem')
778 log.debug('skipping removal from filesystem')
777 old_repo_dict.update({
779 old_repo_dict.update({
778 'deleted_by': cur_user,
780 'deleted_by': cur_user,
779 'deleted_on': time.time(),
781 'deleted_on': time.time(),
780 })
782 })
781 hooks_base.delete_repository(**old_repo_dict)
783 hooks_base.delete_repository(**old_repo_dict)
782 events.trigger(events.RepoDeleteEvent(repo))
784 events.trigger(events.RepoDeleteEvent(repo))
783 except Exception:
785 except Exception:
784 log.error(traceback.format_exc())
786 log.error(traceback.format_exc())
785 raise
787 raise
786
788
787 def grant_user_permission(self, repo, user, perm):
789 def grant_user_permission(self, repo, user, perm):
788 """
790 """
789 Grant permission for user on given repository, or update existing one
791 Grant permission for user on given repository, or update existing one
790 if found
792 if found
791
793
792 :param repo: Instance of Repository, repository_id, or repository name
794 :param repo: Instance of Repository, repository_id, or repository name
793 :param user: Instance of User, user_id or username
795 :param user: Instance of User, user_id or username
794 :param perm: Instance of Permission, or permission_name
796 :param perm: Instance of Permission, or permission_name
795 """
797 """
796 user = self._get_user(user)
798 user = self._get_user(user)
797 repo = self._get_repo(repo)
799 repo = self._get_repo(repo)
798 permission = self._get_perm(perm)
800 permission = self._get_perm(perm)
799
801
800 # check if we have that permission already
802 # check if we have that permission already
801 obj = self.sa.query(UserRepoToPerm) \
803 obj = self.sa.query(UserRepoToPerm) \
802 .filter(UserRepoToPerm.user == user) \
804 .filter(UserRepoToPerm.user == user) \
803 .filter(UserRepoToPerm.repository == repo) \
805 .filter(UserRepoToPerm.repository == repo) \
804 .scalar()
806 .scalar()
805 if obj is None:
807 if obj is None:
806 # create new !
808 # create new !
807 obj = UserRepoToPerm()
809 obj = UserRepoToPerm()
808 obj.repository = repo
810 obj.repository = repo
809 obj.user = user
811 obj.user = user
810 obj.permission = permission
812 obj.permission = permission
811 self.sa.add(obj)
813 self.sa.add(obj)
812 log.debug('Granted perm %s to %s on %s', perm, user, repo)
814 log.debug('Granted perm %s to %s on %s', perm, user, repo)
813 action_logger_generic(
815 action_logger_generic(
814 'granted permission: {} to user: {} on repo: {}'.format(
816 'granted permission: {} to user: {} on repo: {}'.format(
815 perm, user, repo), namespace='security.repo')
817 perm, user, repo), namespace='security.repo')
816 return obj
818 return obj
817
819
818 def revoke_user_permission(self, repo, user):
820 def revoke_user_permission(self, repo, user):
819 """
821 """
820 Revoke permission for user on given repository
822 Revoke permission for user on given repository
821
823
822 :param repo: Instance of Repository, repository_id, or repository name
824 :param repo: Instance of Repository, repository_id, or repository name
823 :param user: Instance of User, user_id or username
825 :param user: Instance of User, user_id or username
824 """
826 """
825
827
826 user = self._get_user(user)
828 user = self._get_user(user)
827 repo = self._get_repo(repo)
829 repo = self._get_repo(repo)
828
830
829 obj = self.sa.query(UserRepoToPerm) \
831 obj = self.sa.query(UserRepoToPerm) \
830 .filter(UserRepoToPerm.repository == repo) \
832 .filter(UserRepoToPerm.repository == repo) \
831 .filter(UserRepoToPerm.user == user) \
833 .filter(UserRepoToPerm.user == user) \
832 .scalar()
834 .scalar()
833 if obj:
835 if obj:
834 self.sa.delete(obj)
836 self.sa.delete(obj)
835 log.debug('Revoked perm on %s on %s', repo, user)
837 log.debug('Revoked perm on %s on %s', repo, user)
836 action_logger_generic(
838 action_logger_generic(
837 'revoked permission from user: {} on repo: {}'.format(
839 'revoked permission from user: {} on repo: {}'.format(
838 user, repo), namespace='security.repo')
840 user, repo), namespace='security.repo')
839
841
840 def grant_user_group_permission(self, repo, group_name, perm):
842 def grant_user_group_permission(self, repo, group_name, perm):
841 """
843 """
842 Grant permission for user group on given repository, or update
844 Grant permission for user group on given repository, or update
843 existing one if found
845 existing one if found
844
846
845 :param repo: Instance of Repository, repository_id, or repository name
847 :param repo: Instance of Repository, repository_id, or repository name
846 :param group_name: Instance of UserGroup, users_group_id,
848 :param group_name: Instance of UserGroup, users_group_id,
847 or user group name
849 or user group name
848 :param perm: Instance of Permission, or permission_name
850 :param perm: Instance of Permission, or permission_name
849 """
851 """
850 repo = self._get_repo(repo)
852 repo = self._get_repo(repo)
851 group_name = self._get_user_group(group_name)
853 group_name = self._get_user_group(group_name)
852 permission = self._get_perm(perm)
854 permission = self._get_perm(perm)
853
855
854 # check if we have that permission already
856 # check if we have that permission already
855 obj = self.sa.query(UserGroupRepoToPerm) \
857 obj = self.sa.query(UserGroupRepoToPerm) \
856 .filter(UserGroupRepoToPerm.users_group == group_name) \
858 .filter(UserGroupRepoToPerm.users_group == group_name) \
857 .filter(UserGroupRepoToPerm.repository == repo) \
859 .filter(UserGroupRepoToPerm.repository == repo) \
858 .scalar()
860 .scalar()
859
861
860 if obj is None:
862 if obj is None:
861 # create new
863 # create new
862 obj = UserGroupRepoToPerm()
864 obj = UserGroupRepoToPerm()
863
865
864 obj.repository = repo
866 obj.repository = repo
865 obj.users_group = group_name
867 obj.users_group = group_name
866 obj.permission = permission
868 obj.permission = permission
867 self.sa.add(obj)
869 self.sa.add(obj)
868 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
870 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
869 action_logger_generic(
871 action_logger_generic(
870 'granted permission: {} to usergroup: {} on repo: {}'.format(
872 'granted permission: {} to usergroup: {} on repo: {}'.format(
871 perm, group_name, repo), namespace='security.repo')
873 perm, group_name, repo), namespace='security.repo')
872
874
873 return obj
875 return obj
874
876
875 def revoke_user_group_permission(self, repo, group_name):
877 def revoke_user_group_permission(self, repo, group_name):
876 """
878 """
877 Revoke permission for user group on given repository
879 Revoke permission for user group on given repository
878
880
879 :param repo: Instance of Repository, repository_id, or repository name
881 :param repo: Instance of Repository, repository_id, or repository name
880 :param group_name: Instance of UserGroup, users_group_id,
882 :param group_name: Instance of UserGroup, users_group_id,
881 or user group name
883 or user group name
882 """
884 """
883 repo = self._get_repo(repo)
885 repo = self._get_repo(repo)
884 group_name = self._get_user_group(group_name)
886 group_name = self._get_user_group(group_name)
885
887
886 obj = self.sa.query(UserGroupRepoToPerm) \
888 obj = self.sa.query(UserGroupRepoToPerm) \
887 .filter(UserGroupRepoToPerm.repository == repo) \
889 .filter(UserGroupRepoToPerm.repository == repo) \
888 .filter(UserGroupRepoToPerm.users_group == group_name) \
890 .filter(UserGroupRepoToPerm.users_group == group_name) \
889 .scalar()
891 .scalar()
890 if obj:
892 if obj:
891 self.sa.delete(obj)
893 self.sa.delete(obj)
892 log.debug('Revoked perm to %s on %s', repo, group_name)
894 log.debug('Revoked perm to %s on %s', repo, group_name)
893 action_logger_generic(
895 action_logger_generic(
894 'revoked permission from usergroup: {} on repo: {}'.format(
896 'revoked permission from usergroup: {} on repo: {}'.format(
895 group_name, repo), namespace='security.repo')
897 group_name, repo), namespace='security.repo')
896
898
897 def delete_stats(self, repo_name):
899 def delete_stats(self, repo_name):
898 """
900 """
899 removes stats for given repo
901 removes stats for given repo
900
902
901 :param repo_name:
903 :param repo_name:
902 """
904 """
903 repo = self._get_repo(repo_name)
905 repo = self._get_repo(repo_name)
904 try:
906 try:
905 obj = self.sa.query(Statistics) \
907 obj = self.sa.query(Statistics) \
906 .filter(Statistics.repository == repo).scalar()
908 .filter(Statistics.repository == repo).scalar()
907 if obj:
909 if obj:
908 self.sa.delete(obj)
910 self.sa.delete(obj)
909 except Exception:
911 except Exception:
910 log.error(traceback.format_exc())
912 log.error(traceback.format_exc())
911 raise
913 raise
912
914
913 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
915 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
914 field_type='str', field_desc=''):
916 field_type='str', field_desc=''):
915
917
916 repo = self._get_repo(repo_name)
918 repo = self._get_repo(repo_name)
917
919
918 new_field = RepositoryField()
920 new_field = RepositoryField()
919 new_field.repository = repo
921 new_field.repository = repo
920 new_field.field_key = field_key
922 new_field.field_key = field_key
921 new_field.field_type = field_type # python type
923 new_field.field_type = field_type # python type
922 new_field.field_value = field_value
924 new_field.field_value = field_value
923 new_field.field_desc = field_desc
925 new_field.field_desc = field_desc
924 new_field.field_label = field_label
926 new_field.field_label = field_label
925 self.sa.add(new_field)
927 self.sa.add(new_field)
926 return new_field
928 return new_field
927
929
928 def delete_repo_field(self, repo_name, field_key):
930 def delete_repo_field(self, repo_name, field_key):
929 repo = self._get_repo(repo_name)
931 repo = self._get_repo(repo_name)
930 field = RepositoryField.get_by_key_name(field_key, repo)
932 field = RepositoryField.get_by_key_name(field_key, repo)
931 if field:
933 if field:
932 self.sa.delete(field)
934 self.sa.delete(field)
933
935
934 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
936 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
935 clone_uri=None, repo_store_location=None,
937 clone_uri=None, repo_store_location=None,
936 use_global_config=False, install_hooks=True):
938 use_global_config=False, install_hooks=True):
937 """
939 """
938 makes repository on filesystem. It's group aware means it'll create
940 makes repository on filesystem. It's group aware means it'll create
939 a repository within a group, and alter the paths accordingly of
941 a repository within a group, and alter the paths accordingly of
940 group location
942 group location
941
943
942 :param repo_name:
944 :param repo_name:
943 :param alias:
945 :param alias:
944 :param parent:
946 :param parent:
945 :param clone_uri:
947 :param clone_uri:
946 :param repo_store_location:
948 :param repo_store_location:
947 """
949 """
948 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
950 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
949 from rhodecode.model.scm import ScmModel
951 from rhodecode.model.scm import ScmModel
950
952
951 if Repository.NAME_SEP in repo_name:
953 if Repository.NAME_SEP in repo_name:
952 raise ValueError(
954 raise ValueError(
953 'repo_name must not contain groups got `%s`' % repo_name)
955 'repo_name must not contain groups got `%s`' % repo_name)
954
956
955 if isinstance(repo_group, RepoGroup):
957 if isinstance(repo_group, RepoGroup):
956 new_parent_path = os.sep.join(repo_group.full_path_splitted)
958 new_parent_path = os.sep.join(repo_group.full_path_splitted)
957 else:
959 else:
958 new_parent_path = repo_group or ''
960 new_parent_path = repo_group or ''
959
961
960 if repo_store_location:
962 if repo_store_location:
961 _paths = [repo_store_location]
963 _paths = [repo_store_location]
962 else:
964 else:
963 _paths = [self.repos_path, new_parent_path, repo_name]
965 _paths = [self.repos_path, new_parent_path, repo_name]
964 # we need to make it str for mercurial
966 # we need to make it str for mercurial
965 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
967 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
966
968
967 # check if this path is not a repository
969 # check if this path is not a repository
968 if is_valid_repo(repo_path, self.repos_path):
970 if is_valid_repo(repo_path, self.repos_path):
969 raise Exception('This path %s is a valid repository' % repo_path)
971 raise Exception('This path %s is a valid repository' % repo_path)
970
972
971 # check if this path is a group
973 # check if this path is a group
972 if is_valid_repo_group(repo_path, self.repos_path):
974 if is_valid_repo_group(repo_path, self.repos_path):
973 raise Exception('This path %s is a valid group' % repo_path)
975 raise Exception('This path %s is a valid group' % repo_path)
974
976
975 log.info('creating repo %s in %s from url: `%s`',
977 log.info('creating repo %s in %s from url: `%s`',
976 repo_name, safe_unicode(repo_path),
978 repo_name, safe_unicode(repo_path),
977 obfuscate_url_pw(clone_uri))
979 obfuscate_url_pw(clone_uri))
978
980
979 backend = get_backend(repo_type)
981 backend = get_backend(repo_type)
980
982
981 config_repo = None if use_global_config else repo_name
983 config_repo = None if use_global_config else repo_name
982 if config_repo and new_parent_path:
984 if config_repo and new_parent_path:
983 config_repo = Repository.NAME_SEP.join(
985 config_repo = Repository.NAME_SEP.join(
984 (new_parent_path, config_repo))
986 (new_parent_path, config_repo))
985 config = make_db_config(clear_session=False, repo=config_repo)
987 config = make_db_config(clear_session=False, repo=config_repo)
986 config.set('extensions', 'largefiles', '')
988 config.set('extensions', 'largefiles', '')
987
989
988 # patch and reset hooks section of UI config to not run any
990 # patch and reset hooks section of UI config to not run any
989 # hooks on creating remote repo
991 # hooks on creating remote repo
990 config.clear_section('hooks')
992 config.clear_section('hooks')
991
993
992 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
994 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
993 if repo_type == 'git':
995 if repo_type == 'git':
994 repo = backend(
996 repo = backend(
995 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
997 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
996 with_wire={"cache": False})
998 with_wire={"cache": False})
997 else:
999 else:
998 repo = backend(
1000 repo = backend(
999 repo_path, config=config, create=True, src_url=clone_uri,
1001 repo_path, config=config, create=True, src_url=clone_uri,
1000 with_wire={"cache": False})
1002 with_wire={"cache": False})
1001
1003
1002 if install_hooks:
1004 if install_hooks:
1003 repo.install_hooks()
1005 repo.install_hooks()
1004
1006
1005 log.debug('Created repo %s with %s backend',
1007 log.debug('Created repo %s with %s backend',
1006 safe_unicode(repo_name), safe_unicode(repo_type))
1008 safe_unicode(repo_name), safe_unicode(repo_type))
1007 return repo
1009 return repo
1008
1010
1009 def _rename_filesystem_repo(self, old, new):
1011 def _rename_filesystem_repo(self, old, new):
1010 """
1012 """
1011 renames repository on filesystem
1013 renames repository on filesystem
1012
1014
1013 :param old: old name
1015 :param old: old name
1014 :param new: new name
1016 :param new: new name
1015 """
1017 """
1016 log.info('renaming repo from %s to %s', old, new)
1018 log.info('renaming repo from %s to %s', old, new)
1017
1019
1018 old_path = os.path.join(self.repos_path, old)
1020 old_path = os.path.join(self.repos_path, old)
1019 new_path = os.path.join(self.repos_path, new)
1021 new_path = os.path.join(self.repos_path, new)
1020 if os.path.isdir(new_path):
1022 if os.path.isdir(new_path):
1021 raise Exception(
1023 raise Exception(
1022 'Was trying to rename to already existing dir %s' % new_path
1024 'Was trying to rename to already existing dir %s' % new_path
1023 )
1025 )
1024 shutil.move(old_path, new_path)
1026 shutil.move(old_path, new_path)
1025
1027
1026 def _delete_filesystem_repo(self, repo):
1028 def _delete_filesystem_repo(self, repo):
1027 """
1029 """
1028 removes repo from filesystem, the removal is acctually made by
1030 removes repo from filesystem, the removal is acctually made by
1029 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1031 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1030 repository is no longer valid for rhodecode, can be undeleted later on
1032 repository is no longer valid for rhodecode, can be undeleted later on
1031 by reverting the renames on this repository
1033 by reverting the renames on this repository
1032
1034
1033 :param repo: repo object
1035 :param repo: repo object
1034 """
1036 """
1035 rm_path = os.path.join(self.repos_path, repo.repo_name)
1037 rm_path = os.path.join(self.repos_path, repo.repo_name)
1036 repo_group = repo.group
1038 repo_group = repo.group
1037 log.info("Removing repository %s", rm_path)
1039 log.info("Removing repository %s", rm_path)
1038 # disable hg/git internal that it doesn't get detected as repo
1040 # disable hg/git internal that it doesn't get detected as repo
1039 alias = repo.repo_type
1041 alias = repo.repo_type
1040
1042
1041 config = make_db_config(clear_session=False)
1043 config = make_db_config(clear_session=False)
1042 config.set('extensions', 'largefiles', '')
1044 config.set('extensions', 'largefiles', '')
1043 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1045 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1044
1046
1045 # skip this for bare git repos
1047 # skip this for bare git repos
1046 if not bare:
1048 if not bare:
1047 # disable VCS repo
1049 # disable VCS repo
1048 vcs_path = os.path.join(rm_path, '.%s' % alias)
1050 vcs_path = os.path.join(rm_path, '.%s' % alias)
1049 if os.path.exists(vcs_path):
1051 if os.path.exists(vcs_path):
1050 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1052 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1051
1053
1052 _now = datetime.datetime.now()
1054 _now = datetime.datetime.now()
1053 _ms = str(_now.microsecond).rjust(6, '0')
1055 _ms = str(_now.microsecond).rjust(6, '0')
1054 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1056 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1055 repo.just_name)
1057 repo.just_name)
1056 if repo_group:
1058 if repo_group:
1057 # if repository is in group, prefix the removal path with the group
1059 # if repository is in group, prefix the removal path with the group
1058 args = repo_group.full_path_splitted + [_d]
1060 args = repo_group.full_path_splitted + [_d]
1059 _d = os.path.join(*args)
1061 _d = os.path.join(*args)
1060
1062
1061 if os.path.isdir(rm_path):
1063 if os.path.isdir(rm_path):
1062 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1064 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1063
1065
1064 # finally cleanup diff-cache if it exists
1066 # finally cleanup diff-cache if it exists
1065 cached_diffs_dir = repo.cached_diffs_dir
1067 cached_diffs_dir = repo.cached_diffs_dir
1066 if os.path.isdir(cached_diffs_dir):
1068 if os.path.isdir(cached_diffs_dir):
1067 shutil.rmtree(cached_diffs_dir)
1069 shutil.rmtree(cached_diffs_dir)
1068
1070
1069
1071
1070 class ReadmeFinder:
1072 class ReadmeFinder:
1071 """
1073 """
1072 Utility which knows how to find a readme for a specific commit.
1074 Utility which knows how to find a readme for a specific commit.
1073
1075
1074 The main idea is that this is a configurable algorithm. When creating an
1076 The main idea is that this is a configurable algorithm. When creating an
1075 instance you can define parameters, currently only the `default_renderer`.
1077 instance you can define parameters, currently only the `default_renderer`.
1076 Based on this configuration the method :meth:`search` behaves slightly
1078 Based on this configuration the method :meth:`search` behaves slightly
1077 different.
1079 different.
1078 """
1080 """
1079
1081
1080 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1082 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1081 path_re = re.compile(r'^docs?', re.IGNORECASE)
1083 path_re = re.compile(r'^docs?', re.IGNORECASE)
1082
1084
1083 default_priorities = {
1085 default_priorities = {
1084 None: 0,
1086 None: 0,
1085 '.text': 2,
1087 '.text': 2,
1086 '.txt': 3,
1088 '.txt': 3,
1087 '.rst': 1,
1089 '.rst': 1,
1088 '.rest': 2,
1090 '.rest': 2,
1089 '.md': 1,
1091 '.md': 1,
1090 '.mkdn': 2,
1092 '.mkdn': 2,
1091 '.mdown': 3,
1093 '.mdown': 3,
1092 '.markdown': 4,
1094 '.markdown': 4,
1093 }
1095 }
1094
1096
1095 path_priority = {
1097 path_priority = {
1096 'doc': 0,
1098 'doc': 0,
1097 'docs': 1,
1099 'docs': 1,
1098 }
1100 }
1099
1101
1100 FALLBACK_PRIORITY = 99
1102 FALLBACK_PRIORITY = 99
1101
1103
1102 RENDERER_TO_EXTENSION = {
1104 RENDERER_TO_EXTENSION = {
1103 'rst': ['.rst', '.rest'],
1105 'rst': ['.rst', '.rest'],
1104 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1106 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1105 }
1107 }
1106
1108
1107 def __init__(self, default_renderer=None):
1109 def __init__(self, default_renderer=None):
1108 self._default_renderer = default_renderer
1110 self._default_renderer = default_renderer
1109 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1111 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1110 default_renderer, [])
1112 default_renderer, [])
1111
1113
1112 def search(self, commit, path=u'/'):
1114 def search(self, commit, path=u'/'):
1113 """
1115 """
1114 Find a readme in the given `commit`.
1116 Find a readme in the given `commit`.
1115 """
1117 """
1116 nodes = commit.get_nodes(path)
1118 nodes = commit.get_nodes(path)
1117 matches = self._match_readmes(nodes)
1119 matches = self._match_readmes(nodes)
1118 matches = self._sort_according_to_priority(matches)
1120 matches = self._sort_according_to_priority(matches)
1119 if matches:
1121 if matches:
1120 return matches[0].node
1122 return matches[0].node
1121
1123
1122 paths = self._match_paths(nodes)
1124 paths = self._match_paths(nodes)
1123 paths = self._sort_paths_according_to_priority(paths)
1125 paths = self._sort_paths_according_to_priority(paths)
1124 for path in paths:
1126 for path in paths:
1125 match = self.search(commit, path=path)
1127 match = self.search(commit, path=path)
1126 if match:
1128 if match:
1127 return match
1129 return match
1128
1130
1129 return None
1131 return None
1130
1132
1131 def _match_readmes(self, nodes):
1133 def _match_readmes(self, nodes):
1132 for node in nodes:
1134 for node in nodes:
1133 if not node.is_file():
1135 if not node.is_file():
1134 continue
1136 continue
1135 path = node.path.rsplit('/', 1)[-1]
1137 path = node.path.rsplit('/', 1)[-1]
1136 match = self.readme_re.match(path)
1138 match = self.readme_re.match(path)
1137 if match:
1139 if match:
1138 extension = match.group(1)
1140 extension = match.group(1)
1139 yield ReadmeMatch(node, match, self._priority(extension))
1141 yield ReadmeMatch(node, match, self._priority(extension))
1140
1142
1141 def _match_paths(self, nodes):
1143 def _match_paths(self, nodes):
1142 for node in nodes:
1144 for node in nodes:
1143 if not node.is_dir():
1145 if not node.is_dir():
1144 continue
1146 continue
1145 match = self.path_re.match(node.path)
1147 match = self.path_re.match(node.path)
1146 if match:
1148 if match:
1147 yield node.path
1149 yield node.path
1148
1150
1149 def _priority(self, extension):
1151 def _priority(self, extension):
1150 renderer_priority = (
1152 renderer_priority = (
1151 0 if extension in self._renderer_extensions else 1)
1153 0 if extension in self._renderer_extensions else 1)
1152 extension_priority = self.default_priorities.get(
1154 extension_priority = self.default_priorities.get(
1153 extension, self.FALLBACK_PRIORITY)
1155 extension, self.FALLBACK_PRIORITY)
1154 return (renderer_priority, extension_priority)
1156 return (renderer_priority, extension_priority)
1155
1157
1156 def _sort_according_to_priority(self, matches):
1158 def _sort_according_to_priority(self, matches):
1157
1159
1158 def priority_and_path(match):
1160 def priority_and_path(match):
1159 return (match.priority, match.path)
1161 return (match.priority, match.path)
1160
1162
1161 return sorted(matches, key=priority_and_path)
1163 return sorted(matches, key=priority_and_path)
1162
1164
1163 def _sort_paths_according_to_priority(self, paths):
1165 def _sort_paths_according_to_priority(self, paths):
1164
1166
1165 def priority_and_path(path):
1167 def priority_and_path(path):
1166 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1168 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1167
1169
1168 return sorted(paths, key=priority_and_path)
1170 return sorted(paths, key=priority_and_path)
1169
1171
1170
1172
1171 class ReadmeMatch:
1173 class ReadmeMatch:
1172
1174
1173 def __init__(self, node, match, priority):
1175 def __init__(self, node, match, priority):
1174 self.node = node
1176 self.node = node
1175 self._match = match
1177 self._match = match
1176 self.priority = priority
1178 self.priority = priority
1177
1179
1178 @property
1180 @property
1179 def path(self):
1181 def path(self):
1180 return self.node.path
1182 return self.node.path
1181
1183
1182 def __repr__(self):
1184 def __repr__(self):
1183 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1185 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,1024 +1,1025 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
144 continue
145
145
146 yield db_obj
146 yield db_obj
147
147
148
148
149 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
150
150
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
152 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
229 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
231 with_wire={"cache": False})
232 except OSError:
232 except OSError:
233 continue
233 continue
234 except RepositoryError:
234 except RepositoryError:
235 log.exception('Failed to create a repo')
235 log.exception('Failed to create a repo')
236 continue
236 continue
237
237
238 log.debug('found %s paths with repositories', len(repos))
238 log.debug('found %s paths with repositories', len(repos))
239 return repos
239 return repos
240
240
241 def get_repos(self, all_repos=None, sort_key=None):
241 def get_repos(self, all_repos=None, sort_key=None):
242 """
242 """
243 Get all repositories from db and for each repo create it's
243 Get all repositories from db and for each repo create it's
244 backend instance and fill that backed with information from database
244 backend instance and fill that backed with information from database
245
245
246 :param all_repos: list of repository names as strings
246 :param all_repos: list of repository names as strings
247 give specific repositories list, good for filtering
247 give specific repositories list, good for filtering
248
248
249 :param sort_key: initial sorting of repositories
249 :param sort_key: initial sorting of repositories
250 """
250 """
251 if all_repos is None:
251 if all_repos is None:
252 all_repos = self.sa.query(Repository)\
252 all_repos = self.sa.query(Repository)\
253 .filter(Repository.group_id == None)\
253 .filter(Repository.group_id == None)\
254 .order_by(func.lower(Repository.repo_name)).all()
254 .order_by(func.lower(Repository.repo_name)).all()
255 repo_iter = SimpleCachedRepoList(
255 repo_iter = SimpleCachedRepoList(
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 return repo_iter
257 return repo_iter
258
258
259 def get_repo_groups(self, all_groups=None):
259 def get_repo_groups(self, all_groups=None):
260 if all_groups is None:
260 if all_groups is None:
261 all_groups = RepoGroup.query()\
261 all_groups = RepoGroup.query()\
262 .filter(RepoGroup.group_parent_id == None).all()
262 .filter(RepoGroup.group_parent_id == None).all()
263 return [x for x in RepoGroupList(all_groups)]
263 return [x for x in RepoGroupList(all_groups)]
264
264
265 def mark_for_invalidation(self, repo_name, delete=False):
265 def mark_for_invalidation(self, repo_name, delete=False):
266 """
266 """
267 Mark caches of this repo invalid in the database. `delete` flag
267 Mark caches of this repo invalid in the database. `delete` flag
268 removes the cache entries
268 removes the cache entries
269
269
270 :param repo_name: the repo_name for which caches should be marked
270 :param repo_name: the repo_name for which caches should be marked
271 invalid, or deleted
271 invalid, or deleted
272 :param delete: delete the entry keys instead of setting bool
272 :param delete: delete the entry keys instead of setting bool
273 flag on them, and also purge caches used by the dogpile
273 flag on them, and also purge caches used by the dogpile
274 """
274 """
275 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
276
276
277 if repo:
277 if repo:
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 repo_id=repo.repo_id)
279 repo_id=repo.repo_id)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281
281
282 repo_id = repo.repo_id
282 repo_id = repo.repo_id
283 config = repo._config
283 config = repo._config
284 config.set('extensions', 'largefiles', '')
284 config.set('extensions', 'largefiles', '')
285 repo.update_commit_cache(config=config, cs_cache=None)
285 repo.update_commit_cache(config=config, cs_cache=None)
286 if delete:
286 if delete:
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
288 rc_cache.clear_cache_namespace(
289 'cache_repo', cache_namespace_uid, invalidate=True)
289
290
290 def toggle_following_repo(self, follow_repo_id, user_id):
291 def toggle_following_repo(self, follow_repo_id, user_id):
291
292
292 f = self.sa.query(UserFollowing)\
293 f = self.sa.query(UserFollowing)\
293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 .filter(UserFollowing.user_id == user_id).scalar()
295 .filter(UserFollowing.user_id == user_id).scalar()
295
296
296 if f is not None:
297 if f is not None:
297 try:
298 try:
298 self.sa.delete(f)
299 self.sa.delete(f)
299 return
300 return
300 except Exception:
301 except Exception:
301 log.error(traceback.format_exc())
302 log.error(traceback.format_exc())
302 raise
303 raise
303
304
304 try:
305 try:
305 f = UserFollowing()
306 f = UserFollowing()
306 f.user_id = user_id
307 f.user_id = user_id
307 f.follows_repo_id = follow_repo_id
308 f.follows_repo_id = follow_repo_id
308 self.sa.add(f)
309 self.sa.add(f)
309 except Exception:
310 except Exception:
310 log.error(traceback.format_exc())
311 log.error(traceback.format_exc())
311 raise
312 raise
312
313
313 def toggle_following_user(self, follow_user_id, user_id):
314 def toggle_following_user(self, follow_user_id, user_id):
314 f = self.sa.query(UserFollowing)\
315 f = self.sa.query(UserFollowing)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.user_id == user_id).scalar()
317 .filter(UserFollowing.user_id == user_id).scalar()
317
318
318 if f is not None:
319 if f is not None:
319 try:
320 try:
320 self.sa.delete(f)
321 self.sa.delete(f)
321 return
322 return
322 except Exception:
323 except Exception:
323 log.error(traceback.format_exc())
324 log.error(traceback.format_exc())
324 raise
325 raise
325
326
326 try:
327 try:
327 f = UserFollowing()
328 f = UserFollowing()
328 f.user_id = user_id
329 f.user_id = user_id
329 f.follows_user_id = follow_user_id
330 f.follows_user_id = follow_user_id
330 self.sa.add(f)
331 self.sa.add(f)
331 except Exception:
332 except Exception:
332 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
333 raise
334 raise
334
335
335 def is_following_repo(self, repo_name, user_id, cache=False):
336 def is_following_repo(self, repo_name, user_id, cache=False):
336 r = self.sa.query(Repository)\
337 r = self.sa.query(Repository)\
337 .filter(Repository.repo_name == repo_name).scalar()
338 .filter(Repository.repo_name == repo_name).scalar()
338
339
339 f = self.sa.query(UserFollowing)\
340 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.user_id == user_id).scalar()
342 .filter(UserFollowing.user_id == user_id).scalar()
342
343
343 return f is not None
344 return f is not None
344
345
345 def is_following_user(self, username, user_id, cache=False):
346 def is_following_user(self, username, user_id, cache=False):
346 u = User.get_by_username(username)
347 u = User.get_by_username(username)
347
348
348 f = self.sa.query(UserFollowing)\
349 f = self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.user_id == user_id).scalar()
351 .filter(UserFollowing.user_id == user_id).scalar()
351
352
352 return f is not None
353 return f is not None
353
354
354 def get_followers(self, repo):
355 def get_followers(self, repo):
355 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
356
357
357 return self.sa.query(UserFollowing)\
358 return self.sa.query(UserFollowing)\
358 .filter(UserFollowing.follows_repository == repo).count()
359 .filter(UserFollowing.follows_repository == repo).count()
359
360
360 def get_forks(self, repo):
361 def get_forks(self, repo):
361 repo = self._get_repo(repo)
362 repo = self._get_repo(repo)
362 return self.sa.query(Repository)\
363 return self.sa.query(Repository)\
363 .filter(Repository.fork == repo).count()
364 .filter(Repository.fork == repo).count()
364
365
365 def get_pull_requests(self, repo):
366 def get_pull_requests(self, repo):
366 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
367 return self.sa.query(PullRequest)\
368 return self.sa.query(PullRequest)\
368 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370
371
371 def get_artifacts(self, repo):
372 def get_artifacts(self, repo):
372 repo = self._get_repo(repo)
373 repo = self._get_repo(repo)
373 return self.sa.query(FileStore)\
374 return self.sa.query(FileStore)\
374 .filter(FileStore.repo == repo)\
375 .filter(FileStore.repo == repo)\
375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376
377
377 def mark_as_fork(self, repo, fork, user):
378 def mark_as_fork(self, repo, fork, user):
378 repo = self._get_repo(repo)
379 repo = self._get_repo(repo)
379 fork = self._get_repo(fork)
380 fork = self._get_repo(fork)
380 if fork and repo.repo_id == fork.repo_id:
381 if fork and repo.repo_id == fork.repo_id:
381 raise Exception("Cannot set repository as fork of itself")
382 raise Exception("Cannot set repository as fork of itself")
382
383
383 if fork and repo.repo_type != fork.repo_type:
384 if fork and repo.repo_type != fork.repo_type:
384 raise RepositoryError(
385 raise RepositoryError(
385 "Cannot set repository as fork of repository with other type")
386 "Cannot set repository as fork of repository with other type")
386
387
387 repo.fork = fork
388 repo.fork = fork
388 self.sa.add(repo)
389 self.sa.add(repo)
389 return repo
390 return repo
390
391
391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 dbrepo = self._get_repo(repo)
393 dbrepo = self._get_repo(repo)
393 remote_uri = remote_uri or dbrepo.clone_uri
394 remote_uri = remote_uri or dbrepo.clone_uri
394 if not remote_uri:
395 if not remote_uri:
395 raise Exception("This repository doesn't have a clone uri")
396 raise Exception("This repository doesn't have a clone uri")
396
397
397 repo = dbrepo.scm_instance(cache=False)
398 repo = dbrepo.scm_instance(cache=False)
398 repo.config.clear_section('hooks')
399 repo.config.clear_section('hooks')
399
400
400 try:
401 try:
401 # NOTE(marcink): add extra validation so we skip invalid urls
402 # NOTE(marcink): add extra validation so we skip invalid urls
402 # this is due this tasks can be executed via scheduler without
403 # this is due this tasks can be executed via scheduler without
403 # proper validation of remote_uri
404 # proper validation of remote_uri
404 if validate_uri:
405 if validate_uri:
405 config = make_db_config(clear_session=False)
406 config = make_db_config(clear_session=False)
406 url_validator(remote_uri, dbrepo.repo_type, config)
407 url_validator(remote_uri, dbrepo.repo_type, config)
407 except InvalidCloneUrl:
408 except InvalidCloneUrl:
408 raise
409 raise
409
410
410 repo_name = dbrepo.repo_name
411 repo_name = dbrepo.repo_name
411 try:
412 try:
412 # TODO: we need to make sure those operations call proper hooks !
413 # TODO: we need to make sure those operations call proper hooks !
413 repo.fetch(remote_uri)
414 repo.fetch(remote_uri)
414
415
415 self.mark_for_invalidation(repo_name)
416 self.mark_for_invalidation(repo_name)
416 except Exception:
417 except Exception:
417 log.error(traceback.format_exc())
418 log.error(traceback.format_exc())
418 raise
419 raise
419
420
420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 dbrepo = self._get_repo(repo)
422 dbrepo = self._get_repo(repo)
422 remote_uri = remote_uri or dbrepo.push_uri
423 remote_uri = remote_uri or dbrepo.push_uri
423 if not remote_uri:
424 if not remote_uri:
424 raise Exception("This repository doesn't have a clone uri")
425 raise Exception("This repository doesn't have a clone uri")
425
426
426 repo = dbrepo.scm_instance(cache=False)
427 repo = dbrepo.scm_instance(cache=False)
427 repo.config.clear_section('hooks')
428 repo.config.clear_section('hooks')
428
429
429 try:
430 try:
430 # NOTE(marcink): add extra validation so we skip invalid urls
431 # NOTE(marcink): add extra validation so we skip invalid urls
431 # this is due this tasks can be executed via scheduler without
432 # this is due this tasks can be executed via scheduler without
432 # proper validation of remote_uri
433 # proper validation of remote_uri
433 if validate_uri:
434 if validate_uri:
434 config = make_db_config(clear_session=False)
435 config = make_db_config(clear_session=False)
435 url_validator(remote_uri, dbrepo.repo_type, config)
436 url_validator(remote_uri, dbrepo.repo_type, config)
436 except InvalidCloneUrl:
437 except InvalidCloneUrl:
437 raise
438 raise
438
439
439 try:
440 try:
440 repo.push(remote_uri)
441 repo.push(remote_uri)
441 except Exception:
442 except Exception:
442 log.error(traceback.format_exc())
443 log.error(traceback.format_exc())
443 raise
444 raise
444
445
445 def commit_change(self, repo, repo_name, commit, user, author, message,
446 def commit_change(self, repo, repo_name, commit, user, author, message,
446 content, f_path):
447 content, f_path):
447 """
448 """
448 Commits changes
449 Commits changes
449
450
450 :param repo: SCM instance
451 :param repo: SCM instance
451
452
452 """
453 """
453 user = self._get_user(user)
454 user = self._get_user(user)
454
455
455 # decoding here will force that we have proper encoded values
456 # decoding here will force that we have proper encoded values
456 # in any other case this will throw exceptions and deny commit
457 # in any other case this will throw exceptions and deny commit
457 content = safe_str(content)
458 content = safe_str(content)
458 path = safe_str(f_path)
459 path = safe_str(f_path)
459 # message and author needs to be unicode
460 # message and author needs to be unicode
460 # proper backend should then translate that into required type
461 # proper backend should then translate that into required type
461 message = safe_unicode(message)
462 message = safe_unicode(message)
462 author = safe_unicode(author)
463 author = safe_unicode(author)
463 imc = repo.in_memory_commit
464 imc = repo.in_memory_commit
464 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
465 try:
466 try:
466 # TODO: handle pre-push action !
467 # TODO: handle pre-push action !
467 tip = imc.commit(
468 tip = imc.commit(
468 message=message, author=author, parents=[commit],
469 message=message, author=author, parents=[commit],
469 branch=commit.branch)
470 branch=commit.branch)
470 except Exception as e:
471 except Exception as e:
471 log.error(traceback.format_exc())
472 log.error(traceback.format_exc())
472 raise IMCCommitError(str(e))
473 raise IMCCommitError(str(e))
473 finally:
474 finally:
474 # always clear caches, if commit fails we want fresh object also
475 # always clear caches, if commit fails we want fresh object also
475 self.mark_for_invalidation(repo_name)
476 self.mark_for_invalidation(repo_name)
476
477
477 # We trigger the post-push action
478 # We trigger the post-push action
478 hooks_utils.trigger_post_push_hook(
479 hooks_utils.trigger_post_push_hook(
479 username=user.username, action='push_local', hook_type='post_push',
480 username=user.username, action='push_local', hook_type='post_push',
480 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 return tip
482 return tip
482
483
483 def _sanitize_path(self, f_path):
484 def _sanitize_path(self, f_path):
484 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
485 raise NonRelativePathError('%s is not an relative path' % f_path)
486 raise NonRelativePathError('%s is not an relative path' % f_path)
486 if f_path:
487 if f_path:
487 f_path = os.path.normpath(f_path)
488 f_path = os.path.normpath(f_path)
488 return f_path
489 return f_path
489
490
490 def get_dirnode_metadata(self, request, commit, dir_node):
491 def get_dirnode_metadata(self, request, commit, dir_node):
491 if not dir_node.is_dir():
492 if not dir_node.is_dir():
492 return []
493 return []
493
494
494 data = []
495 data = []
495 for node in dir_node:
496 for node in dir_node:
496 if not node.is_file():
497 if not node.is_file():
497 # we skip file-nodes
498 # we skip file-nodes
498 continue
499 continue
499
500
500 last_commit = node.last_commit
501 last_commit = node.last_commit
501 last_commit_date = last_commit.date
502 last_commit_date = last_commit.date
502 data.append({
503 data.append({
503 'name': node.name,
504 'name': node.name,
504 'size': h.format_byte_size_binary(node.size),
505 'size': h.format_byte_size_binary(node.size),
505 'modified_at': h.format_date(last_commit_date),
506 'modified_at': h.format_date(last_commit_date),
506 'modified_ts': last_commit_date.isoformat(),
507 'modified_ts': last_commit_date.isoformat(),
507 'revision': last_commit.revision,
508 'revision': last_commit.revision,
508 'short_id': last_commit.short_id,
509 'short_id': last_commit.short_id,
509 'message': h.escape(last_commit.message),
510 'message': h.escape(last_commit.message),
510 'author': h.escape(last_commit.author),
511 'author': h.escape(last_commit.author),
511 'user_profile': h.gravatar_with_user(
512 'user_profile': h.gravatar_with_user(
512 request, last_commit.author),
513 request, last_commit.author),
513 })
514 })
514
515
515 return data
516 return data
516
517
517 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
518 extended_info=False, content=False, max_file_bytes=None):
519 extended_info=False, content=False, max_file_bytes=None):
519 """
520 """
520 recursive walk in root dir and return a set of all path in that dir
521 recursive walk in root dir and return a set of all path in that dir
521 based on repository walk function
522 based on repository walk function
522
523
523 :param repo_name: name of repository
524 :param repo_name: name of repository
524 :param commit_id: commit id for which to list nodes
525 :param commit_id: commit id for which to list nodes
525 :param root_path: root path to list
526 :param root_path: root path to list
526 :param flat: return as a list, if False returns a dict with description
527 :param flat: return as a list, if False returns a dict with description
527 :param extended_info: show additional info such as md5, binary, size etc
528 :param extended_info: show additional info such as md5, binary, size etc
528 :param content: add nodes content to the return data
529 :param content: add nodes content to the return data
529 :param max_file_bytes: will not return file contents over this limit
530 :param max_file_bytes: will not return file contents over this limit
530
531
531 """
532 """
532 _files = list()
533 _files = list()
533 _dirs = list()
534 _dirs = list()
534 try:
535 try:
535 _repo = self._get_repo(repo_name)
536 _repo = self._get_repo(repo_name)
536 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 root_path = root_path.lstrip('/')
538 root_path = root_path.lstrip('/')
538 for __, dirs, files in commit.walk(root_path):
539 for __, dirs, files in commit.walk(root_path):
539
540
540 for f in files:
541 for f in files:
541 _content = None
542 _content = None
542 _data = f_name = f.unicode_path
543 _data = f_name = f.unicode_path
543
544
544 if not flat:
545 if not flat:
545 _data = {
546 _data = {
546 "name": h.escape(f_name),
547 "name": h.escape(f_name),
547 "type": "file",
548 "type": "file",
548 }
549 }
549 if extended_info:
550 if extended_info:
550 _data.update({
551 _data.update({
551 "md5": f.md5,
552 "md5": f.md5,
552 "binary": f.is_binary,
553 "binary": f.is_binary,
553 "size": f.size,
554 "size": f.size,
554 "extension": f.extension,
555 "extension": f.extension,
555 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
556 "lines": f.lines()[0]
557 "lines": f.lines()[0]
557 })
558 })
558
559
559 if content:
560 if content:
560 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
561 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
562 full_content = None
563 full_content = None
563 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
564 full_content = safe_str(f.content)
565 full_content = safe_str(f.content)
565
566
566 _data.update({
567 _data.update({
567 "content": full_content,
568 "content": full_content,
568 })
569 })
569 _files.append(_data)
570 _files.append(_data)
570
571
571 for d in dirs:
572 for d in dirs:
572 _data = d_name = d.unicode_path
573 _data = d_name = d.unicode_path
573 if not flat:
574 if not flat:
574 _data = {
575 _data = {
575 "name": h.escape(d_name),
576 "name": h.escape(d_name),
576 "type": "dir",
577 "type": "dir",
577 }
578 }
578 if extended_info:
579 if extended_info:
579 _data.update({
580 _data.update({
580 "md5": None,
581 "md5": None,
581 "binary": None,
582 "binary": None,
582 "size": None,
583 "size": None,
583 "extension": None,
584 "extension": None,
584 })
585 })
585 if content:
586 if content:
586 _data.update({
587 _data.update({
587 "content": None
588 "content": None
588 })
589 })
589 _dirs.append(_data)
590 _dirs.append(_data)
590 except RepositoryError:
591 except RepositoryError:
591 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
592 raise
593 raise
593
594
594 return _dirs, _files
595 return _dirs, _files
595
596
596 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 """
598 """
598 Generate files for quick filter in files view
599 Generate files for quick filter in files view
599 """
600 """
600
601
601 _files = list()
602 _files = list()
602 _dirs = list()
603 _dirs = list()
603 try:
604 try:
604 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
605 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
607 for __, dirs, files in commit.walk(root_path):
608 for __, dirs, files in commit.walk(root_path):
608
609
609 for f in files:
610 for f in files:
610
611
611 _data = {
612 _data = {
612 "name": h.escape(f.unicode_path),
613 "name": h.escape(f.unicode_path),
613 "type": "file",
614 "type": "file",
614 }
615 }
615
616
616 _files.append(_data)
617 _files.append(_data)
617
618
618 for d in dirs:
619 for d in dirs:
619
620
620 _data = {
621 _data = {
621 "name": h.escape(d.unicode_path),
622 "name": h.escape(d.unicode_path),
622 "type": "dir",
623 "type": "dir",
623 }
624 }
624
625
625 _dirs.append(_data)
626 _dirs.append(_data)
626 except RepositoryError:
627 except RepositoryError:
627 log.exception("Exception in get_quick_filter_nodes")
628 log.exception("Exception in get_quick_filter_nodes")
628 raise
629 raise
629
630
630 return _dirs, _files
631 return _dirs, _files
631
632
632 def get_node(self, repo_name, commit_id, file_path,
633 def get_node(self, repo_name, commit_id, file_path,
633 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 """
635 """
635 retrieve single node from commit
636 retrieve single node from commit
636 """
637 """
637 try:
638 try:
638
639
639 _repo = self._get_repo(repo_name)
640 _repo = self._get_repo(repo_name)
640 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
641
642
642 file_node = commit.get_node(file_path)
643 file_node = commit.get_node(file_path)
643 if file_node.is_dir():
644 if file_node.is_dir():
644 raise RepositoryError('The given path is a directory')
645 raise RepositoryError('The given path is a directory')
645
646
646 _content = None
647 _content = None
647 f_name = file_node.unicode_path
648 f_name = file_node.unicode_path
648
649
649 file_data = {
650 file_data = {
650 "name": h.escape(f_name),
651 "name": h.escape(f_name),
651 "type": "file",
652 "type": "file",
652 }
653 }
653
654
654 if extended_info:
655 if extended_info:
655 file_data.update({
656 file_data.update({
656 "extension": file_node.extension,
657 "extension": file_node.extension,
657 "mimetype": file_node.mimetype,
658 "mimetype": file_node.mimetype,
658 })
659 })
659
660
660 if cache:
661 if cache:
661 md5 = file_node.md5
662 md5 = file_node.md5
662 is_binary = file_node.is_binary
663 is_binary = file_node.is_binary
663 size = file_node.size
664 size = file_node.size
664 else:
665 else:
665 is_binary, md5, size, _content = file_node.metadata_uncached()
666 is_binary, md5, size, _content = file_node.metadata_uncached()
666
667
667 file_data.update({
668 file_data.update({
668 "md5": md5,
669 "md5": md5,
669 "binary": is_binary,
670 "binary": is_binary,
670 "size": size,
671 "size": size,
671 })
672 })
672
673
673 if content and cache:
674 if content and cache:
674 # get content + cache
675 # get content + cache
675 size = file_node.size
676 size = file_node.size
676 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
677 full_content = None
678 full_content = None
678 all_lines = 0
679 all_lines = 0
679 if not file_node.is_binary and not over_size_limit:
680 if not file_node.is_binary and not over_size_limit:
680 full_content = safe_unicode(file_node.content)
681 full_content = safe_unicode(file_node.content)
681 all_lines, empty_lines = file_node.count_lines(full_content)
682 all_lines, empty_lines = file_node.count_lines(full_content)
682
683
683 file_data.update({
684 file_data.update({
684 "content": full_content,
685 "content": full_content,
685 "lines": all_lines
686 "lines": all_lines
686 })
687 })
687 elif content:
688 elif content:
688 # get content *without* cache
689 # get content *without* cache
689 if _content is None:
690 if _content is None:
690 is_binary, md5, size, _content = file_node.metadata_uncached()
691 is_binary, md5, size, _content = file_node.metadata_uncached()
691
692
692 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
693 full_content = None
694 full_content = None
694 all_lines = 0
695 all_lines = 0
695 if not is_binary and not over_size_limit:
696 if not is_binary and not over_size_limit:
696 full_content = safe_unicode(_content)
697 full_content = safe_unicode(_content)
697 all_lines, empty_lines = file_node.count_lines(full_content)
698 all_lines, empty_lines = file_node.count_lines(full_content)
698
699
699 file_data.update({
700 file_data.update({
700 "content": full_content,
701 "content": full_content,
701 "lines": all_lines
702 "lines": all_lines
702 })
703 })
703
704
704 except RepositoryError:
705 except RepositoryError:
705 log.exception("Exception in get_node")
706 log.exception("Exception in get_node")
706 raise
707 raise
707
708
708 return file_data
709 return file_data
709
710
710 def get_fts_data(self, repo_name, commit_id, root_path='/'):
711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
711 """
712 """
712 Fetch node tree for usage in full text search
713 Fetch node tree for usage in full text search
713 """
714 """
714
715
715 tree_info = list()
716 tree_info = list()
716
717
717 try:
718 try:
718 _repo = self._get_repo(repo_name)
719 _repo = self._get_repo(repo_name)
719 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 root_path = root_path.lstrip('/')
721 root_path = root_path.lstrip('/')
721 for __, dirs, files in commit.walk(root_path):
722 for __, dirs, files in commit.walk(root_path):
722
723
723 for f in files:
724 for f in files:
724 is_binary, md5, size, _content = f.metadata_uncached()
725 is_binary, md5, size, _content = f.metadata_uncached()
725 _data = {
726 _data = {
726 "name": f.unicode_path,
727 "name": f.unicode_path,
727 "md5": md5,
728 "md5": md5,
728 "extension": f.extension,
729 "extension": f.extension,
729 "binary": is_binary,
730 "binary": is_binary,
730 "size": size
731 "size": size
731 }
732 }
732
733
733 tree_info.append(_data)
734 tree_info.append(_data)
734
735
735 except RepositoryError:
736 except RepositoryError:
736 log.exception("Exception in get_nodes")
737 log.exception("Exception in get_nodes")
737 raise
738 raise
738
739
739 return tree_info
740 return tree_info
740
741
741 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
742 author=None, trigger_push_hook=True):
743 author=None, trigger_push_hook=True):
743 """
744 """
744 Commits given multiple nodes into repo
745 Commits given multiple nodes into repo
745
746
746 :param user: RhodeCode User object or user_id, the commiter
747 :param user: RhodeCode User object or user_id, the commiter
747 :param repo: RhodeCode Repository object
748 :param repo: RhodeCode Repository object
748 :param message: commit message
749 :param message: commit message
749 :param nodes: mapping {filename:{'content':content},...}
750 :param nodes: mapping {filename:{'content':content},...}
750 :param parent_commit: parent commit, can be empty than it's
751 :param parent_commit: parent commit, can be empty than it's
751 initial commit
752 initial commit
752 :param author: author of commit, cna be different that commiter
753 :param author: author of commit, cna be different that commiter
753 only for git
754 only for git
754 :param trigger_push_hook: trigger push hooks
755 :param trigger_push_hook: trigger push hooks
755
756
756 :returns: new commited commit
757 :returns: new commited commit
757 """
758 """
758
759
759 user = self._get_user(user)
760 user = self._get_user(user)
760 scm_instance = repo.scm_instance(cache=False)
761 scm_instance = repo.scm_instance(cache=False)
761
762
762 processed_nodes = []
763 processed_nodes = []
763 for f_path in nodes:
764 for f_path in nodes:
764 f_path = self._sanitize_path(f_path)
765 f_path = self._sanitize_path(f_path)
765 content = nodes[f_path]['content']
766 content = nodes[f_path]['content']
766 f_path = safe_str(f_path)
767 f_path = safe_str(f_path)
767 # decoding here will force that we have proper encoded values
768 # decoding here will force that we have proper encoded values
768 # in any other case this will throw exceptions and deny commit
769 # in any other case this will throw exceptions and deny commit
769 if isinstance(content, (basestring,)):
770 if isinstance(content, (basestring,)):
770 content = safe_str(content)
771 content = safe_str(content)
771 elif isinstance(content, (file, cStringIO.OutputType,)):
772 elif isinstance(content, (file, cStringIO.OutputType,)):
772 content = content.read()
773 content = content.read()
773 else:
774 else:
774 raise Exception('Content is of unrecognized type %s' % (
775 raise Exception('Content is of unrecognized type %s' % (
775 type(content)
776 type(content)
776 ))
777 ))
777 processed_nodes.append((f_path, content))
778 processed_nodes.append((f_path, content))
778
779
779 message = safe_unicode(message)
780 message = safe_unicode(message)
780 commiter = user.full_contact
781 commiter = user.full_contact
781 author = safe_unicode(author) if author else commiter
782 author = safe_unicode(author) if author else commiter
782
783
783 imc = scm_instance.in_memory_commit
784 imc = scm_instance.in_memory_commit
784
785
785 if not parent_commit:
786 if not parent_commit:
786 parent_commit = EmptyCommit(alias=scm_instance.alias)
787 parent_commit = EmptyCommit(alias=scm_instance.alias)
787
788
788 if isinstance(parent_commit, EmptyCommit):
789 if isinstance(parent_commit, EmptyCommit):
789 # EmptyCommit means we we're editing empty repository
790 # EmptyCommit means we we're editing empty repository
790 parents = None
791 parents = None
791 else:
792 else:
792 parents = [parent_commit]
793 parents = [parent_commit]
793 # add multiple nodes
794 # add multiple nodes
794 for path, content in processed_nodes:
795 for path, content in processed_nodes:
795 imc.add(FileNode(path, content=content))
796 imc.add(FileNode(path, content=content))
796 # TODO: handle pre push scenario
797 # TODO: handle pre push scenario
797 tip = imc.commit(message=message,
798 tip = imc.commit(message=message,
798 author=author,
799 author=author,
799 parents=parents,
800 parents=parents,
800 branch=parent_commit.branch)
801 branch=parent_commit.branch)
801
802
802 self.mark_for_invalidation(repo.repo_name)
803 self.mark_for_invalidation(repo.repo_name)
803 if trigger_push_hook:
804 if trigger_push_hook:
804 hooks_utils.trigger_post_push_hook(
805 hooks_utils.trigger_post_push_hook(
805 username=user.username, action='push_local',
806 username=user.username, action='push_local',
806 repo_name=repo.repo_name, repo_type=scm_instance.alias,
807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
807 hook_type='post_push',
808 hook_type='post_push',
808 commit_ids=[tip.raw_id])
809 commit_ids=[tip.raw_id])
809 return tip
810 return tip
810
811
811 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
812 author=None, trigger_push_hook=True):
813 author=None, trigger_push_hook=True):
813 user = self._get_user(user)
814 user = self._get_user(user)
814 scm_instance = repo.scm_instance(cache=False)
815 scm_instance = repo.scm_instance(cache=False)
815
816
816 message = safe_unicode(message)
817 message = safe_unicode(message)
817 commiter = user.full_contact
818 commiter = user.full_contact
818 author = safe_unicode(author) if author else commiter
819 author = safe_unicode(author) if author else commiter
819
820
820 imc = scm_instance.in_memory_commit
821 imc = scm_instance.in_memory_commit
821
822
822 if not parent_commit:
823 if not parent_commit:
823 parent_commit = EmptyCommit(alias=scm_instance.alias)
824 parent_commit = EmptyCommit(alias=scm_instance.alias)
824
825
825 if isinstance(parent_commit, EmptyCommit):
826 if isinstance(parent_commit, EmptyCommit):
826 # EmptyCommit means we we're editing empty repository
827 # EmptyCommit means we we're editing empty repository
827 parents = None
828 parents = None
828 else:
829 else:
829 parents = [parent_commit]
830 parents = [parent_commit]
830
831
831 # add multiple nodes
832 # add multiple nodes
832 for _filename, data in nodes.items():
833 for _filename, data in nodes.items():
833 # new filename, can be renamed from the old one, also sanitaze
834 # new filename, can be renamed from the old one, also sanitaze
834 # the path for any hack around relative paths like ../../ etc.
835 # the path for any hack around relative paths like ../../ etc.
835 filename = self._sanitize_path(data['filename'])
836 filename = self._sanitize_path(data['filename'])
836 old_filename = self._sanitize_path(_filename)
837 old_filename = self._sanitize_path(_filename)
837 content = data['content']
838 content = data['content']
838 file_mode = data.get('mode')
839 file_mode = data.get('mode')
839 filenode = FileNode(old_filename, content=content, mode=file_mode)
840 filenode = FileNode(old_filename, content=content, mode=file_mode)
840 op = data['op']
841 op = data['op']
841 if op == 'add':
842 if op == 'add':
842 imc.add(filenode)
843 imc.add(filenode)
843 elif op == 'del':
844 elif op == 'del':
844 imc.remove(filenode)
845 imc.remove(filenode)
845 elif op == 'mod':
846 elif op == 'mod':
846 if filename != old_filename:
847 if filename != old_filename:
847 # TODO: handle renames more efficient, needs vcs lib changes
848 # TODO: handle renames more efficient, needs vcs lib changes
848 imc.remove(filenode)
849 imc.remove(filenode)
849 imc.add(FileNode(filename, content=content, mode=file_mode))
850 imc.add(FileNode(filename, content=content, mode=file_mode))
850 else:
851 else:
851 imc.change(filenode)
852 imc.change(filenode)
852
853
853 try:
854 try:
854 # TODO: handle pre push scenario commit changes
855 # TODO: handle pre push scenario commit changes
855 tip = imc.commit(message=message,
856 tip = imc.commit(message=message,
856 author=author,
857 author=author,
857 parents=parents,
858 parents=parents,
858 branch=parent_commit.branch)
859 branch=parent_commit.branch)
859 except NodeNotChangedError:
860 except NodeNotChangedError:
860 raise
861 raise
861 except Exception as e:
862 except Exception as e:
862 log.exception("Unexpected exception during call to imc.commit")
863 log.exception("Unexpected exception during call to imc.commit")
863 raise IMCCommitError(str(e))
864 raise IMCCommitError(str(e))
864 finally:
865 finally:
865 # always clear caches, if commit fails we want fresh object also
866 # always clear caches, if commit fails we want fresh object also
866 self.mark_for_invalidation(repo.repo_name)
867 self.mark_for_invalidation(repo.repo_name)
867
868
868 if trigger_push_hook:
869 if trigger_push_hook:
869 hooks_utils.trigger_post_push_hook(
870 hooks_utils.trigger_post_push_hook(
870 username=user.username, action='push_local', hook_type='post_push',
871 username=user.username, action='push_local', hook_type='post_push',
871 repo_name=repo.repo_name, repo_type=scm_instance.alias,
872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
872 commit_ids=[tip.raw_id])
873 commit_ids=[tip.raw_id])
873
874
874 return tip
875 return tip
875
876
876 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
877 author=None, trigger_push_hook=True):
878 author=None, trigger_push_hook=True):
878 """
879 """
879 Deletes given multiple nodes into `repo`
880 Deletes given multiple nodes into `repo`
880
881
881 :param user: RhodeCode User object or user_id, the committer
882 :param user: RhodeCode User object or user_id, the committer
882 :param repo: RhodeCode Repository object
883 :param repo: RhodeCode Repository object
883 :param message: commit message
884 :param message: commit message
884 :param nodes: mapping {filename:{'content':content},...}
885 :param nodes: mapping {filename:{'content':content},...}
885 :param parent_commit: parent commit, can be empty than it's initial
886 :param parent_commit: parent commit, can be empty than it's initial
886 commit
887 commit
887 :param author: author of commit, cna be different that commiter only
888 :param author: author of commit, cna be different that commiter only
888 for git
889 for git
889 :param trigger_push_hook: trigger push hooks
890 :param trigger_push_hook: trigger push hooks
890
891
891 :returns: new commit after deletion
892 :returns: new commit after deletion
892 """
893 """
893
894
894 user = self._get_user(user)
895 user = self._get_user(user)
895 scm_instance = repo.scm_instance(cache=False)
896 scm_instance = repo.scm_instance(cache=False)
896
897
897 processed_nodes = []
898 processed_nodes = []
898 for f_path in nodes:
899 for f_path in nodes:
899 f_path = self._sanitize_path(f_path)
900 f_path = self._sanitize_path(f_path)
900 # content can be empty but for compatabilty it allows same dicts
901 # content can be empty but for compatabilty it allows same dicts
901 # structure as add_nodes
902 # structure as add_nodes
902 content = nodes[f_path].get('content')
903 content = nodes[f_path].get('content')
903 processed_nodes.append((f_path, content))
904 processed_nodes.append((f_path, content))
904
905
905 message = safe_unicode(message)
906 message = safe_unicode(message)
906 commiter = user.full_contact
907 commiter = user.full_contact
907 author = safe_unicode(author) if author else commiter
908 author = safe_unicode(author) if author else commiter
908
909
909 imc = scm_instance.in_memory_commit
910 imc = scm_instance.in_memory_commit
910
911
911 if not parent_commit:
912 if not parent_commit:
912 parent_commit = EmptyCommit(alias=scm_instance.alias)
913 parent_commit = EmptyCommit(alias=scm_instance.alias)
913
914
914 if isinstance(parent_commit, EmptyCommit):
915 if isinstance(parent_commit, EmptyCommit):
915 # EmptyCommit means we we're editing empty repository
916 # EmptyCommit means we we're editing empty repository
916 parents = None
917 parents = None
917 else:
918 else:
918 parents = [parent_commit]
919 parents = [parent_commit]
919 # add multiple nodes
920 # add multiple nodes
920 for path, content in processed_nodes:
921 for path, content in processed_nodes:
921 imc.remove(FileNode(path, content=content))
922 imc.remove(FileNode(path, content=content))
922
923
923 # TODO: handle pre push scenario
924 # TODO: handle pre push scenario
924 tip = imc.commit(message=message,
925 tip = imc.commit(message=message,
925 author=author,
926 author=author,
926 parents=parents,
927 parents=parents,
927 branch=parent_commit.branch)
928 branch=parent_commit.branch)
928
929
929 self.mark_for_invalidation(repo.repo_name)
930 self.mark_for_invalidation(repo.repo_name)
930 if trigger_push_hook:
931 if trigger_push_hook:
931 hooks_utils.trigger_post_push_hook(
932 hooks_utils.trigger_post_push_hook(
932 username=user.username, action='push_local', hook_type='post_push',
933 username=user.username, action='push_local', hook_type='post_push',
933 repo_name=repo.repo_name, repo_type=scm_instance.alias,
934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
934 commit_ids=[tip.raw_id])
935 commit_ids=[tip.raw_id])
935 return tip
936 return tip
936
937
937 def strip(self, repo, commit_id, branch):
938 def strip(self, repo, commit_id, branch):
938 scm_instance = repo.scm_instance(cache=False)
939 scm_instance = repo.scm_instance(cache=False)
939 scm_instance.config.clear_section('hooks')
940 scm_instance.config.clear_section('hooks')
940 scm_instance.strip(commit_id, branch)
941 scm_instance.strip(commit_id, branch)
941 self.mark_for_invalidation(repo.repo_name)
942 self.mark_for_invalidation(repo.repo_name)
942
943
943 def get_unread_journal(self):
944 def get_unread_journal(self):
944 return self.sa.query(UserLog).count()
945 return self.sa.query(UserLog).count()
945
946
946 @classmethod
947 @classmethod
947 def backend_landing_ref(cls, repo_type):
948 def backend_landing_ref(cls, repo_type):
948 """
949 """
949 Return a default landing ref based on a repository type.
950 Return a default landing ref based on a repository type.
950 """
951 """
951
952
952 landing_ref = {
953 landing_ref = {
953 'hg': ('branch:default', 'default'),
954 'hg': ('branch:default', 'default'),
954 'git': ('branch:master', 'master'),
955 'git': ('branch:master', 'master'),
955 'svn': ('rev:tip', 'latest tip'),
956 'svn': ('rev:tip', 'latest tip'),
956 'default': ('rev:tip', 'latest tip'),
957 'default': ('rev:tip', 'latest tip'),
957 }
958 }
958
959
959 return landing_ref.get(repo_type) or landing_ref['default']
960 return landing_ref.get(repo_type) or landing_ref['default']
960
961
961 def get_repo_landing_revs(self, translator, repo=None):
962 def get_repo_landing_revs(self, translator, repo=None):
962 """
963 """
963 Generates select option with tags branches and bookmarks (for hg only)
964 Generates select option with tags branches and bookmarks (for hg only)
964 grouped by type
965 grouped by type
965
966
966 :param repo:
967 :param repo:
967 """
968 """
968 _ = translator
969 _ = translator
969 repo = self._get_repo(repo)
970 repo = self._get_repo(repo)
970
971
971 if repo:
972 if repo:
972 repo_type = repo.repo_type
973 repo_type = repo.repo_type
973 else:
974 else:
974 repo_type = 'default'
975 repo_type = 'default'
975
976
976 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
977 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
977
978
978 default_ref_options = [
979 default_ref_options = [
979 [default_landing_ref, landing_ref_lbl]
980 [default_landing_ref, landing_ref_lbl]
980 ]
981 ]
981 default_choices = [
982 default_choices = [
982 default_landing_ref
983 default_landing_ref
983 ]
984 ]
984
985
985 if not repo:
986 if not repo:
986 return default_choices, default_ref_options
987 return default_choices, default_ref_options
987
988
988 repo = repo.scm_instance()
989 repo = repo.scm_instance()
989
990
990 ref_options = [('rev:tip', 'latest tip')]
991 ref_options = [('rev:tip', 'latest tip')]
991 choices = ['rev:tip']
992 choices = ['rev:tip']
992
993
993 # branches
994 # branches
994 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
995 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
995 if not branch_group:
996 if not branch_group:
996 # new repo, or without maybe a branch?
997 # new repo, or without maybe a branch?
997 branch_group = default_ref_options
998 branch_group = default_ref_options
998
999
999 branches_group = (branch_group, _("Branches"))
1000 branches_group = (branch_group, _("Branches"))
1000 ref_options.append(branches_group)
1001 ref_options.append(branches_group)
1001 choices.extend([x[0] for x in branches_group[0]])
1002 choices.extend([x[0] for x in branches_group[0]])
1002
1003
1003 # bookmarks for HG
1004 # bookmarks for HG
1004 if repo.alias == 'hg':
1005 if repo.alias == 'hg':
1005 bookmarks_group = (
1006 bookmarks_group = (
1006 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1007 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1007 for b in repo.bookmarks],
1008 for b in repo.bookmarks],
1008 _("Bookmarks"))
1009 _("Bookmarks"))
1009 ref_options.append(bookmarks_group)
1010 ref_options.append(bookmarks_group)
1010 choices.extend([x[0] for x in bookmarks_group[0]])
1011 choices.extend([x[0] for x in bookmarks_group[0]])
1011
1012
1012 # tags
1013 # tags
1013 tags_group = (
1014 tags_group = (
1014 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1015 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1015 for t in repo.tags],
1016 for t in repo.tags],
1016 _("Tags"))
1017 _("Tags"))
1017 ref_options.append(tags_group)
1018 ref_options.append(tags_group)
1018 choices.extend([x[0] for x in tags_group[0]])
1019 choices.extend([x[0] for x in tags_group[0]])
1019
1020
1020 return choices, ref_options
1021 return choices, ref_options
1021
1022
1022 def get_server_info(self, environ=None):
1023 def get_server_info(self, environ=None):
1023 server_info = get_system_info(environ)
1024 server_info = get_system_info(environ)
1024 return server_info
1025 return server_info
@@ -1,411 +1,411 b''
1
1
2 /******************************************************************************
2 /******************************************************************************
3 * *
3 * *
4 * DO NOT CHANGE THIS FILE MANUALLY *
4 * DO NOT CHANGE THIS FILE MANUALLY *
5 * *
5 * *
6 * *
6 * *
7 * This file is automatically generated when the app starts up with *
7 * This file is automatically generated when the app starts up with *
8 * generate_js_files = true *
8 * generate_js_files = true *
9 * *
9 * *
10 * To add a route here pass jsroute=True to the route definition in the app *
10 * To add a route here pass jsroute=True to the route definition in the app *
11 * *
11 * *
12 ******************************************************************************/
12 ******************************************************************************/
13 function registerRCRoutes() {
13 function registerRCRoutes() {
14 // routes registration
14 // routes registration
15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
21 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
21 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
22 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
22 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
23 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
23 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
24 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
24 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
25 pyroutes.register('admin_home', '/_admin', []);
25 pyroutes.register('admin_home', '/_admin', []);
26 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
26 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
27 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
27 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
28 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
28 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
29 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
29 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
30 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
30 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
31 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
31 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
32 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
32 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
33 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
33 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
34 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
34 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
35 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
35 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
36 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
36 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
37 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
37 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
38 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
38 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
39 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
39 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
40 pyroutes.register('admin_settings', '/_admin/settings', []);
40 pyroutes.register('admin_settings', '/_admin/settings', []);
41 pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []);
41 pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []);
42 pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']);
42 pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']);
43 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
43 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
44 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
44 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
45 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
45 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
46 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
46 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
47 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
47 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
48 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
48 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
49 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
49 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
50 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
50 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
51 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
51 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
52 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
52 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
53 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
53 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
54 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
54 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
55 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
55 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
56 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
56 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
57 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
57 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
58 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
58 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
59 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
59 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
60 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
60 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
61 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
61 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
62 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
62 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
63 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
63 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
64 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
64 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
65 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
65 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
66 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
66 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
67 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
67 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
68 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
68 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
69 pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []);
69 pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []);
70 pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']);
70 pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']);
71 pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']);
71 pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']);
72 pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']);
72 pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']);
73 pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []);
73 pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []);
74 pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []);
74 pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []);
75 pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []);
75 pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []);
76 pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']);
76 pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']);
77 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
77 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
78 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
78 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
79 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
79 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
80 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
80 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
81 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
81 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
82 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
82 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
83 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
83 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
84 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
84 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
85 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
85 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
86 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
86 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
87 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
87 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
88 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
88 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
89 pyroutes.register('apiv2', '/_admin/api', []);
89 pyroutes.register('apiv2', '/_admin/api', []);
90 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
90 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
91 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
91 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
92 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
92 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
93 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
93 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
94 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
94 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
95 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
95 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
96 pyroutes.register('channelstream_proxy', '/_channelstream', []);
96 pyroutes.register('channelstream_proxy', '/_channelstream', []);
97 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
97 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
98 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
98 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
99 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
99 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
100 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
100 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
101 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
101 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
102 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
102 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
103 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
103 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
104 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
104 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
105 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
105 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
106 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
106 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
107 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
107 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
108 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
108 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
109 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
109 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
110 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
110 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
111 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
111 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
112 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
112 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
113 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
113 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
114 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
114 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
115 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
115 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
116 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
116 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
117 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
117 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
118 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
118 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
119 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
119 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
120 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
120 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
121 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
121 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
122 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
122 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
123 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
123 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
124 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
124 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
125 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
125 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
126 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
126 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
127 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
127 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
128 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
128 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
129 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
129 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
130 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
130 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
131 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
131 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
132 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
132 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
133 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
133 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
134 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
134 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
135 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
135 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
136 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
136 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
137 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
137 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
138 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
138 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
139 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
139 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
140 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
140 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
141 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
141 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
142 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
142 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
143 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
143 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
144 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
144 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
145 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
145 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
146 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
146 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
147 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
147 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
148 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
148 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
149 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
149 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
150 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
150 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
151 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
151 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
152 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
152 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
153 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
153 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
154 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
154 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
155 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
155 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
156 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
156 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
157 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
157 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
158 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
158 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
159 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
159 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
160 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
160 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
161 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
161 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
162 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
162 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
163 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
163 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
164 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
164 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
165 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
165 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
166 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
166 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
167 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
167 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
168 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
168 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
169 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
169 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
170 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
170 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
171 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
171 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
172 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
172 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
173 pyroutes.register('favicon', '/favicon.ico', []);
173 pyroutes.register('favicon', '/favicon.ico', []);
174 pyroutes.register('file_preview', '/_file_preview', []);
174 pyroutes.register('file_preview', '/_file_preview', []);
175 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
175 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
176 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
176 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
177 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
177 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
178 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
178 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
179 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
179 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
180 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
180 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
181 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
181 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
182 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
182 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
183 pyroutes.register('gists_create', '/_admin/gists/create', []);
183 pyroutes.register('gists_create', '/_admin/gists/create', []);
184 pyroutes.register('gists_new', '/_admin/gists/new', []);
184 pyroutes.register('gists_new', '/_admin/gists/new', []);
185 pyroutes.register('gists_show', '/_admin/gists', []);
185 pyroutes.register('gists_show', '/_admin/gists', []);
186 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
186 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
187 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
187 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
188 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
188 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
189 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
189 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
190 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
190 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
191 pyroutes.register('goto_switcher_data', '/_goto_data', []);
191 pyroutes.register('goto_switcher_data', '/_goto_data', []);
192 pyroutes.register('home', '/', []);
192 pyroutes.register('home', '/', []);
193 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
193 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
194 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
194 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
195 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
195 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
196 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
196 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
197 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
197 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
198 pyroutes.register('journal', '/_admin/journal', []);
198 pyroutes.register('journal', '/_admin/journal', []);
199 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
199 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
200 pyroutes.register('journal_public', '/_admin/public_journal', []);
200 pyroutes.register('journal_public', '/_admin/public_journal', []);
201 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
201 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
202 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
202 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
203 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
203 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
204 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
204 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
205 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
205 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
206 pyroutes.register('login', '/_admin/login', []);
206 pyroutes.register('login', '/_admin/login', []);
207 pyroutes.register('logout', '/_admin/logout', []);
207 pyroutes.register('logout', '/_admin/logout', []);
208 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
208 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
209 pyroutes.register('main_page_repos_data', '/_home_repos', []);
209 pyroutes.register('main_page_repos_data', '/_home_repos', []);
210 pyroutes.register('markup_preview', '/_markup_preview', []);
210 pyroutes.register('markup_preview', '/_markup_preview', []);
211 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
211 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
212 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
212 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
213 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
213 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
214 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
214 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
215 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
215 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
216 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
216 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
217 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
217 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
218 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
218 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
219 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
219 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
220 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
220 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
221 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
221 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
222 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
222 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
223 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
223 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
224 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
224 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
225 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
225 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
226 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
226 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
227 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
227 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
228 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
228 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
229 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
229 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
230 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
230 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
231 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
231 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
232 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
232 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
233 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
233 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
234 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
234 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
235 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
235 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
236 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
236 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
237 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
237 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
238 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
238 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
239 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
239 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
240 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
240 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
241 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
241 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
242 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
242 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
243 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
243 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
244 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
244 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
245 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
245 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
246 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
246 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
247 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
247 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
248 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
248 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
249 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
249 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
250 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
250 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
251 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
251 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
252 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
252 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
253 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
253 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
254 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
254 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
255 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
255 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
256 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
256 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
257 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
257 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
258 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
258 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
259 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
259 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
260 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
260 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
261 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
261 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
262 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
262 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
263 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
263 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
264 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
264 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
265 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
265 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
266 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
266 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
267 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
267 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
268 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
268 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
269 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
269 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
270 pyroutes.register('register', '/_admin/register', []);
270 pyroutes.register('register', '/_admin/register', []);
271 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
271 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
272 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
272 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
273 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
273 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
274 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
274 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
275 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
275 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
276 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
276 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
277 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
277 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
278 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
278 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
279 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
279 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
280 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
280 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
281 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
281 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
282 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
282 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
283 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
283 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
284 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
284 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
285 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
285 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
286 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
286 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
287 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
287 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
288 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
288 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
289 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
289 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
290 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
290 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
291 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
291 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
292 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_history_id)s/history_view', ['repo_name', 'commit_id', 'comment_history_id']);
292 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']);
293 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
293 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
294 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
294 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
295 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
295 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
296 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
296 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
297 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
297 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
298 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
298 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
299 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
299 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
300 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
300 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
301 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
301 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
302 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
302 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
303 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
303 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
304 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
304 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
305 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
305 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
306 pyroutes.register('repo_create', '/_admin/repos/create', []);
306 pyroutes.register('repo_create', '/_admin/repos/create', []);
307 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
307 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
308 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
308 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
309 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
309 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
310 pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']);
310 pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']);
311 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
311 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
312 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
312 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
313 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
313 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
314 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
314 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
315 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
315 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
316 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
316 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
317 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
317 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
318 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
318 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
319 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
319 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
320 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
320 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
321 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
321 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
322 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
322 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
323 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
323 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
324 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
324 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
325 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
325 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
326 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
326 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
327 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
327 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
328 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
328 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
329 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
329 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
330 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
330 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
331 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
331 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
332 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
332 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
333 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
333 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
334 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
334 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
335 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
335 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
336 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
336 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
337 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
337 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
338 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
338 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
339 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
339 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
340 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
340 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
341 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
341 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
342 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
342 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
343 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
343 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
344 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
344 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
345 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
345 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
346 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
346 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
347 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
347 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
348 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
348 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
349 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
349 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
350 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
350 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
351 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
351 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
352 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
352 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
353 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
353 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
354 pyroutes.register('repo_list_data', '/_repos', []);
354 pyroutes.register('repo_list_data', '/_repos', []);
355 pyroutes.register('repo_new', '/_admin/repos/new', []);
355 pyroutes.register('repo_new', '/_admin/repos/new', []);
356 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
356 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
357 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
357 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
358 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
358 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
359 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
359 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
360 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
360 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
361 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
361 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
362 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
362 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
363 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
363 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
364 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
364 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
365 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
365 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
366 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
366 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
367 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
367 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
368 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
368 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
369 pyroutes.register('repos', '/_admin/repos', []);
369 pyroutes.register('repos', '/_admin/repos', []);
370 pyroutes.register('repos_data', '/_admin/repos_data', []);
370 pyroutes.register('repos_data', '/_admin/repos_data', []);
371 pyroutes.register('reset_password', '/_admin/password_reset', []);
371 pyroutes.register('reset_password', '/_admin/password_reset', []);
372 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
372 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
373 pyroutes.register('robots', '/robots.txt', []);
373 pyroutes.register('robots', '/robots.txt', []);
374 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
374 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
375 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
375 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
376 pyroutes.register('search', '/_admin/search', []);
376 pyroutes.register('search', '/_admin/search', []);
377 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
377 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
378 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
378 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
379 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
379 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
380 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
380 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
381 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
381 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
382 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
382 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
383 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
383 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
384 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
384 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
385 pyroutes.register('upload_file', '/_file_store/upload', []);
385 pyroutes.register('upload_file', '/_file_store/upload', []);
386 pyroutes.register('user_autocomplete_data', '/_users', []);
386 pyroutes.register('user_autocomplete_data', '/_users', []);
387 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
387 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
388 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
388 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
389 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
389 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
390 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
390 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
391 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
391 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
392 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
392 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
393 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
393 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
394 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
394 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
395 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
395 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
396 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
396 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
397 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
397 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
398 pyroutes.register('user_groups', '/_admin/user_groups', []);
398 pyroutes.register('user_groups', '/_admin/user_groups', []);
399 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
399 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
400 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
400 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
401 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
401 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
402 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
402 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
403 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
403 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
404 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
404 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
405 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
405 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
406 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
406 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
407 pyroutes.register('users', '/_admin/users', []);
407 pyroutes.register('users', '/_admin/users', []);
408 pyroutes.register('users_create', '/_admin/users/create', []);
408 pyroutes.register('users_create', '/_admin/users/create', []);
409 pyroutes.register('users_data', '/_admin/users_data', []);
409 pyroutes.register('users_data', '/_admin/users_data', []);
410 pyroutes.register('users_new', '/_admin/users/new', []);
410 pyroutes.register('users_new', '/_admin/users/new', []);
411 }
411 }
@@ -1,1644 +1,1645 b''
1 // # Copyright (C) 2010-2020 RhodeCode GmbH
1 // # Copyright (C) 2010-2020 RhodeCode GmbH
2 // #
2 // #
3 // # This program is free software: you can redistribute it and/or modify
3 // # This program is free software: you can redistribute it and/or modify
4 // # it under the terms of the GNU Affero General Public License, version 3
4 // # it under the terms of the GNU Affero General Public License, version 3
5 // # (only), as published by the Free Software Foundation.
5 // # (only), as published by the Free Software Foundation.
6 // #
6 // #
7 // # This program is distributed in the hope that it will be useful,
7 // # This program is distributed in the hope that it will be useful,
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 // # GNU General Public License for more details.
10 // # GNU General Public License for more details.
11 // #
11 // #
12 // # You should have received a copy of the GNU Affero General Public License
12 // # You should have received a copy of the GNU Affero General Public License
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 // #
14 // #
15 // # This program is dual-licensed. If you wish to learn more about the
15 // # This program is dual-licensed. If you wish to learn more about the
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 var firefoxAnchorFix = function() {
19 var firefoxAnchorFix = function() {
20 // hack to make anchor links behave properly on firefox, in our inline
20 // hack to make anchor links behave properly on firefox, in our inline
21 // comments generation when comments are injected firefox is misbehaving
21 // comments generation when comments are injected firefox is misbehaving
22 // when jumping to anchor links
22 // when jumping to anchor links
23 if (location.href.indexOf('#') > -1) {
23 if (location.href.indexOf('#') > -1) {
24 location.href += '';
24 location.href += '';
25 }
25 }
26 };
26 };
27
27
28
28
29 var linkifyComments = function(comments) {
29 var linkifyComments = function(comments) {
30 var firstCommentId = null;
30 var firstCommentId = null;
31 if (comments) {
31 if (comments) {
32 firstCommentId = $(comments[0]).data('comment-id');
32 firstCommentId = $(comments[0]).data('comment-id');
33 }
33 }
34
34
35 if (firstCommentId){
35 if (firstCommentId){
36 $('#inline-comments-counter').attr('href', '#comment-' + firstCommentId);
36 $('#inline-comments-counter').attr('href', '#comment-' + firstCommentId);
37 }
37 }
38 };
38 };
39
39
40
40
41 var bindToggleButtons = function() {
41 var bindToggleButtons = function() {
42 $('.comment-toggle').on('click', function() {
42 $('.comment-toggle').on('click', function() {
43 $(this).parent().nextUntil('tr.line').toggle('inline-comments');
43 $(this).parent().nextUntil('tr.line').toggle('inline-comments');
44 });
44 });
45 };
45 };
46
46
47
47
48 var _submitAjaxPOST = function(url, postData, successHandler, failHandler) {
48 var _submitAjaxPOST = function(url, postData, successHandler, failHandler) {
49 failHandler = failHandler || function() {};
49 failHandler = failHandler || function() {};
50 postData = toQueryString(postData);
50 postData = toQueryString(postData);
51 var request = $.ajax({
51 var request = $.ajax({
52 url: url,
52 url: url,
53 type: 'POST',
53 type: 'POST',
54 data: postData,
54 data: postData,
55 headers: {'X-PARTIAL-XHR': true}
55 headers: {'X-PARTIAL-XHR': true}
56 })
56 })
57 .done(function (data) {
57 .done(function (data) {
58 successHandler(data);
58 successHandler(data);
59 })
59 })
60 .fail(function (data, textStatus, errorThrown) {
60 .fail(function (data, textStatus, errorThrown) {
61 failHandler(data, textStatus, errorThrown)
61 failHandler(data, textStatus, errorThrown)
62 });
62 });
63 return request;
63 return request;
64 };
64 };
65
65
66
66
67 /* Comment form for main and inline comments */
67 /* Comment form for main and inline comments */
68 (function(mod) {
68 (function(mod) {
69
69
70 if (typeof exports == "object" && typeof module == "object") {
70 if (typeof exports == "object" && typeof module == "object") {
71 // CommonJS
71 // CommonJS
72 module.exports = mod();
72 module.exports = mod();
73 }
73 }
74 else {
74 else {
75 // Plain browser env
75 // Plain browser env
76 (this || window).CommentForm = mod();
76 (this || window).CommentForm = mod();
77 }
77 }
78
78
79 })(function() {
79 })(function() {
80 "use strict";
80 "use strict";
81
81
82 function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id) {
82 function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id) {
83
83
84 if (!(this instanceof CommentForm)) {
84 if (!(this instanceof CommentForm)) {
85 return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id);
85 return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id);
86 }
86 }
87
87
88 // bind the element instance to our Form
88 // bind the element instance to our Form
89 $(formElement).get(0).CommentForm = this;
89 $(formElement).get(0).CommentForm = this;
90
90
91 this.withLineNo = function(selector) {
91 this.withLineNo = function(selector) {
92 var lineNo = this.lineNo;
92 var lineNo = this.lineNo;
93 if (lineNo === undefined) {
93 if (lineNo === undefined) {
94 return selector
94 return selector
95 } else {
95 } else {
96 return selector + '_' + lineNo;
96 return selector + '_' + lineNo;
97 }
97 }
98 };
98 };
99
99
100 this.commitId = commitId;
100 this.commitId = commitId;
101 this.pullRequestId = pullRequestId;
101 this.pullRequestId = pullRequestId;
102 this.lineNo = lineNo;
102 this.lineNo = lineNo;
103 this.initAutocompleteActions = initAutocompleteActions;
103 this.initAutocompleteActions = initAutocompleteActions;
104
104
105 this.previewButton = this.withLineNo('#preview-btn');
105 this.previewButton = this.withLineNo('#preview-btn');
106 this.previewContainer = this.withLineNo('#preview-container');
106 this.previewContainer = this.withLineNo('#preview-container');
107
107
108 this.previewBoxSelector = this.withLineNo('#preview-box');
108 this.previewBoxSelector = this.withLineNo('#preview-box');
109
109
110 this.editButton = this.withLineNo('#edit-btn');
110 this.editButton = this.withLineNo('#edit-btn');
111 this.editContainer = this.withLineNo('#edit-container');
111 this.editContainer = this.withLineNo('#edit-container');
112 this.cancelButton = this.withLineNo('#cancel-btn');
112 this.cancelButton = this.withLineNo('#cancel-btn');
113 this.commentType = this.withLineNo('#comment_type');
113 this.commentType = this.withLineNo('#comment_type');
114
114
115 this.resolvesId = null;
115 this.resolvesId = null;
116 this.resolvesActionId = null;
116 this.resolvesActionId = null;
117
117
118 this.closesPr = '#close_pull_request';
118 this.closesPr = '#close_pull_request';
119
119
120 this.cmBox = this.withLineNo('#text');
120 this.cmBox = this.withLineNo('#text');
121 this.cm = initCommentBoxCodeMirror(this, this.cmBox, this.initAutocompleteActions);
121 this.cm = initCommentBoxCodeMirror(this, this.cmBox, this.initAutocompleteActions);
122
122
123 this.statusChange = this.withLineNo('#change_status');
123 this.statusChange = this.withLineNo('#change_status');
124
124
125 this.submitForm = formElement;
125 this.submitForm = formElement;
126
126
127 this.submitButton = $(this.submitForm).find('.submit-comment-action');
127 this.submitButton = $(this.submitForm).find('.submit-comment-action');
128 this.submitButtonText = this.submitButton.val();
128 this.submitButtonText = this.submitButton.val();
129
129
130 this.submitDraftButton = $(this.submitForm).find('.submit-draft-action');
130 this.submitDraftButton = $(this.submitForm).find('.submit-draft-action');
131 this.submitDraftButtonText = this.submitDraftButton.val();
131 this.submitDraftButtonText = this.submitDraftButton.val();
132
132
133 this.previewUrl = pyroutes.url('repo_commit_comment_preview',
133 this.previewUrl = pyroutes.url('repo_commit_comment_preview',
134 {'repo_name': templateContext.repo_name,
134 {'repo_name': templateContext.repo_name,
135 'commit_id': templateContext.commit_data.commit_id});
135 'commit_id': templateContext.commit_data.commit_id});
136
136
137 if (edit){
137 if (edit){
138 this.submitDraftButton.hide();
138 this.submitDraftButton.hide();
139 this.submitButtonText = _gettext('Update Comment');
139 this.submitButtonText = _gettext('Update Comment');
140 $(this.commentType).prop('disabled', true);
140 $(this.commentType).prop('disabled', true);
141 $(this.commentType).addClass('disabled');
141 $(this.commentType).addClass('disabled');
142 var editInfo =
142 var editInfo =
143 '';
143 '';
144 $(editInfo).insertBefore($(this.editButton).parent());
144 $(editInfo).insertBefore($(this.editButton).parent());
145 }
145 }
146
146
147 if (resolvesCommentId){
147 if (resolvesCommentId){
148 this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId);
148 this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId);
149 this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId);
149 this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId);
150 $(this.commentType).prop('disabled', true);
150 $(this.commentType).prop('disabled', true);
151 $(this.commentType).addClass('disabled');
151 $(this.commentType).addClass('disabled');
152
152
153 // disable select
153 // disable select
154 setTimeout(function() {
154 setTimeout(function() {
155 $(self.statusChange).select2('readonly', true);
155 $(self.statusChange).select2('readonly', true);
156 }, 10);
156 }, 10);
157
157
158 var resolvedInfo = (
158 var resolvedInfo = (
159 '<li class="resolve-action">' +
159 '<li class="resolve-action">' +
160 '<input type="hidden" id="resolve_comment_{0}" name="resolve_comment_{0}" value="{0}">' +
160 '<input type="hidden" id="resolve_comment_{0}" name="resolve_comment_{0}" value="{0}">' +
161 '<button id="resolve_comment_action_{0}" class="resolve-text btn btn-sm" onclick="return Rhodecode.comments.submitResolution({0})">{1} #{0}</button>' +
161 '<button id="resolve_comment_action_{0}" class="resolve-text btn btn-sm" onclick="return Rhodecode.comments.submitResolution({0})">{1} #{0}</button>' +
162 '</li>'
162 '</li>'
163 ).format(resolvesCommentId, _gettext('resolve comment'));
163 ).format(resolvesCommentId, _gettext('resolve comment'));
164 $(resolvedInfo).insertAfter($(this.commentType).parent());
164 $(resolvedInfo).insertAfter($(this.commentType).parent());
165 }
165 }
166
166
167 // based on commitId, or pullRequestId decide where do we submit
167 // based on commitId, or pullRequestId decide where do we submit
168 // out data
168 // out data
169 if (this.commitId){
169 if (this.commitId){
170 var pyurl = 'repo_commit_comment_create';
170 var pyurl = 'repo_commit_comment_create';
171 if(edit){
171 if(edit){
172 pyurl = 'repo_commit_comment_edit';
172 pyurl = 'repo_commit_comment_edit';
173 }
173 }
174 this.submitUrl = pyroutes.url(pyurl,
174 this.submitUrl = pyroutes.url(pyurl,
175 {'repo_name': templateContext.repo_name,
175 {'repo_name': templateContext.repo_name,
176 'commit_id': this.commitId,
176 'commit_id': this.commitId,
177 'comment_id': comment_id});
177 'comment_id': comment_id});
178 this.selfUrl = pyroutes.url('repo_commit',
178 this.selfUrl = pyroutes.url('repo_commit',
179 {'repo_name': templateContext.repo_name,
179 {'repo_name': templateContext.repo_name,
180 'commit_id': this.commitId});
180 'commit_id': this.commitId});
181
181
182 } else if (this.pullRequestId) {
182 } else if (this.pullRequestId) {
183 var pyurl = 'pullrequest_comment_create';
183 var pyurl = 'pullrequest_comment_create';
184 if(edit){
184 if(edit){
185 pyurl = 'pullrequest_comment_edit';
185 pyurl = 'pullrequest_comment_edit';
186 }
186 }
187 this.submitUrl = pyroutes.url(pyurl,
187 this.submitUrl = pyroutes.url(pyurl,
188 {'repo_name': templateContext.repo_name,
188 {'repo_name': templateContext.repo_name,
189 'pull_request_id': this.pullRequestId,
189 'pull_request_id': this.pullRequestId,
190 'comment_id': comment_id});
190 'comment_id': comment_id});
191 this.selfUrl = pyroutes.url('pullrequest_show',
191 this.selfUrl = pyroutes.url('pullrequest_show',
192 {'repo_name': templateContext.repo_name,
192 {'repo_name': templateContext.repo_name,
193 'pull_request_id': this.pullRequestId});
193 'pull_request_id': this.pullRequestId});
194
194
195 } else {
195 } else {
196 throw new Error(
196 throw new Error(
197 'CommentForm requires pullRequestId, or commitId to be specified.')
197 'CommentForm requires pullRequestId, or commitId to be specified.')
198 }
198 }
199
199
200 // FUNCTIONS and helpers
200 // FUNCTIONS and helpers
201 var self = this;
201 var self = this;
202
202
203 this.isInline = function(){
203 this.isInline = function(){
204 return this.lineNo && this.lineNo != 'general';
204 return this.lineNo && this.lineNo != 'general';
205 };
205 };
206
206
207 this.getCmInstance = function(){
207 this.getCmInstance = function(){
208 return this.cm
208 return this.cm
209 };
209 };
210
210
211 this.setPlaceholder = function(placeholder) {
211 this.setPlaceholder = function(placeholder) {
212 var cm = this.getCmInstance();
212 var cm = this.getCmInstance();
213 if (cm){
213 if (cm){
214 cm.setOption('placeholder', placeholder);
214 cm.setOption('placeholder', placeholder);
215 }
215 }
216 };
216 };
217
217
218 this.getCommentStatus = function() {
218 this.getCommentStatus = function() {
219 return $(this.submitForm).find(this.statusChange).val();
219 return $(this.submitForm).find(this.statusChange).val();
220 };
220 };
221
221
222 this.getCommentType = function() {
222 this.getCommentType = function() {
223 return $(this.submitForm).find(this.commentType).val();
223 return $(this.submitForm).find(this.commentType).val();
224 };
224 };
225
225
226 this.getDraftState = function () {
226 this.getDraftState = function () {
227 var submitterElem = $(this.submitForm).find('input[type="submit"].submitter');
227 var submitterElem = $(this.submitForm).find('input[type="submit"].submitter');
228 var data = $(submitterElem).data('isDraft');
228 var data = $(submitterElem).data('isDraft');
229 return data
229 return data
230 }
230 }
231
231
232 this.getResolvesId = function() {
232 this.getResolvesId = function() {
233 return $(this.submitForm).find(this.resolvesId).val() || null;
233 return $(this.submitForm).find(this.resolvesId).val() || null;
234 };
234 };
235
235
236 this.getClosePr = function() {
236 this.getClosePr = function() {
237 return $(this.submitForm).find(this.closesPr).val() || null;
237 return $(this.submitForm).find(this.closesPr).val() || null;
238 };
238 };
239
239
240 this.markCommentResolved = function(resolvedCommentId){
240 this.markCommentResolved = function(resolvedCommentId){
241 Rhodecode.comments.markCommentResolved(resolvedCommentId)
241 Rhodecode.comments.markCommentResolved(resolvedCommentId)
242 };
242 };
243
243
244 this.isAllowedToSubmit = function() {
244 this.isAllowedToSubmit = function() {
245 var commentDisabled = $(this.submitButton).prop('disabled');
245 var commentDisabled = $(this.submitButton).prop('disabled');
246 var draftDisabled = $(this.submitDraftButton).prop('disabled');
246 var draftDisabled = $(this.submitDraftButton).prop('disabled');
247 return !commentDisabled && !draftDisabled;
247 return !commentDisabled && !draftDisabled;
248 };
248 };
249
249
250 this.initStatusChangeSelector = function(){
250 this.initStatusChangeSelector = function(){
251 var formatChangeStatus = function(state, escapeMarkup) {
251 var formatChangeStatus = function(state, escapeMarkup) {
252 var originalOption = state.element;
252 var originalOption = state.element;
253 var tmpl = '<i class="icon-circle review-status-{0}"></i><span>{1}</span>'.format($(originalOption).data('status'), escapeMarkup(state.text));
253 var tmpl = '<i class="icon-circle review-status-{0}"></i><span>{1}</span>'.format($(originalOption).data('status'), escapeMarkup(state.text));
254 return tmpl
254 return tmpl
255 };
255 };
256 var formatResult = function(result, container, query, escapeMarkup) {
256 var formatResult = function(result, container, query, escapeMarkup) {
257 return formatChangeStatus(result, escapeMarkup);
257 return formatChangeStatus(result, escapeMarkup);
258 };
258 };
259
259
260 var formatSelection = function(data, container, escapeMarkup) {
260 var formatSelection = function(data, container, escapeMarkup) {
261 return formatChangeStatus(data, escapeMarkup);
261 return formatChangeStatus(data, escapeMarkup);
262 };
262 };
263
263
264 $(this.submitForm).find(this.statusChange).select2({
264 $(this.submitForm).find(this.statusChange).select2({
265 placeholder: _gettext('Status Review'),
265 placeholder: _gettext('Status Review'),
266 formatResult: formatResult,
266 formatResult: formatResult,
267 formatSelection: formatSelection,
267 formatSelection: formatSelection,
268 containerCssClass: "drop-menu status_box_menu",
268 containerCssClass: "drop-menu status_box_menu",
269 dropdownCssClass: "drop-menu-dropdown",
269 dropdownCssClass: "drop-menu-dropdown",
270 dropdownAutoWidth: true,
270 dropdownAutoWidth: true,
271 minimumResultsForSearch: -1
271 minimumResultsForSearch: -1
272 });
272 });
273
273
274 $(this.submitForm).find(this.statusChange).on('change', function() {
274 $(this.submitForm).find(this.statusChange).on('change', function() {
275 var status = self.getCommentStatus();
275 var status = self.getCommentStatus();
276
276
277 if (status && !self.isInline()) {
277 if (status && !self.isInline()) {
278 $(self.submitButton).prop('disabled', false);
278 $(self.submitButton).prop('disabled', false);
279 $(self.submitDraftButton).prop('disabled', false);
279 $(self.submitDraftButton).prop('disabled', false);
280 }
280 }
281
281
282 var placeholderText = _gettext('Comment text will be set automatically based on currently selected status ({0}) ...').format(status);
282 var placeholderText = _gettext('Comment text will be set automatically based on currently selected status ({0}) ...').format(status);
283 self.setPlaceholder(placeholderText)
283 self.setPlaceholder(placeholderText)
284 })
284 })
285 };
285 };
286
286
287 // reset the comment form into it's original state
287 // reset the comment form into it's original state
288 this.resetCommentFormState = function(content) {
288 this.resetCommentFormState = function(content) {
289 content = content || '';
289 content = content || '';
290
290
291 $(this.editContainer).show();
291 $(this.editContainer).show();
292 $(this.editButton).parent().addClass('active');
292 $(this.editButton).parent().addClass('active');
293
293
294 $(this.previewContainer).hide();
294 $(this.previewContainer).hide();
295 $(this.previewButton).parent().removeClass('active');
295 $(this.previewButton).parent().removeClass('active');
296
296
297 this.setActionButtonsDisabled(true);
297 this.setActionButtonsDisabled(true);
298 self.cm.setValue(content);
298 self.cm.setValue(content);
299 self.cm.setOption("readOnly", false);
299 self.cm.setOption("readOnly", false);
300
300
301 if (this.resolvesId) {
301 if (this.resolvesId) {
302 // destroy the resolve action
302 // destroy the resolve action
303 $(this.resolvesId).parent().remove();
303 $(this.resolvesId).parent().remove();
304 }
304 }
305 // reset closingPR flag
305 // reset closingPR flag
306 $('.close-pr-input').remove();
306 $('.close-pr-input').remove();
307
307
308 $(this.statusChange).select2('readonly', false);
308 $(this.statusChange).select2('readonly', false);
309 };
309 };
310
310
311 this.globalSubmitSuccessCallback = function(comment){
311 this.globalSubmitSuccessCallback = function(comment){
312 // default behaviour is to call GLOBAL hook, if it's registered.
312 // default behaviour is to call GLOBAL hook, if it's registered.
313 if (window.commentFormGlobalSubmitSuccessCallback !== undefined){
313 if (window.commentFormGlobalSubmitSuccessCallback !== undefined){
314 commentFormGlobalSubmitSuccessCallback(comment);
314 commentFormGlobalSubmitSuccessCallback(comment);
315 }
315 }
316 };
316 };
317
317
318 this.submitAjaxPOST = function(url, postData, successHandler, failHandler) {
318 this.submitAjaxPOST = function(url, postData, successHandler, failHandler) {
319 return _submitAjaxPOST(url, postData, successHandler, failHandler);
319 return _submitAjaxPOST(url, postData, successHandler, failHandler);
320 };
320 };
321
321
322 // overwrite a submitHandler, we need to do it for inline comments
322 // overwrite a submitHandler, we need to do it for inline comments
323 this.setHandleFormSubmit = function(callback) {
323 this.setHandleFormSubmit = function(callback) {
324 this.handleFormSubmit = callback;
324 this.handleFormSubmit = callback;
325 };
325 };
326
326
327 // overwrite a submitSuccessHandler
327 // overwrite a submitSuccessHandler
328 this.setGlobalSubmitSuccessCallback = function(callback) {
328 this.setGlobalSubmitSuccessCallback = function(callback) {
329 this.globalSubmitSuccessCallback = callback;
329 this.globalSubmitSuccessCallback = callback;
330 };
330 };
331
331
332 // default handler for for submit for main comments
332 // default handler for for submit for main comments
333 this.handleFormSubmit = function() {
333 this.handleFormSubmit = function() {
334 var text = self.cm.getValue();
334 var text = self.cm.getValue();
335 var status = self.getCommentStatus();
335 var status = self.getCommentStatus();
336 var commentType = self.getCommentType();
336 var commentType = self.getCommentType();
337 var isDraft = self.getDraftState();
337 var isDraft = self.getDraftState();
338 var resolvesCommentId = self.getResolvesId();
338 var resolvesCommentId = self.getResolvesId();
339 var closePullRequest = self.getClosePr();
339 var closePullRequest = self.getClosePr();
340
340
341 if (text === "" && !status) {
341 if (text === "" && !status) {
342 return;
342 return;
343 }
343 }
344
344
345 var excludeCancelBtn = false;
345 var excludeCancelBtn = false;
346 var submitEvent = true;
346 var submitEvent = true;
347 self.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
347 self.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
348 self.cm.setOption("readOnly", true);
348 self.cm.setOption("readOnly", true);
349
349
350 var postData = {
350 var postData = {
351 'text': text,
351 'text': text,
352 'changeset_status': status,
352 'changeset_status': status,
353 'comment_type': commentType,
353 'comment_type': commentType,
354 'csrf_token': CSRF_TOKEN
354 'csrf_token': CSRF_TOKEN
355 };
355 };
356
356
357 if (resolvesCommentId) {
357 if (resolvesCommentId) {
358 postData['resolves_comment_id'] = resolvesCommentId;
358 postData['resolves_comment_id'] = resolvesCommentId;
359 }
359 }
360
360
361 if (closePullRequest) {
361 if (closePullRequest) {
362 postData['close_pull_request'] = true;
362 postData['close_pull_request'] = true;
363 }
363 }
364
364
365 // submitSuccess for general comments
365 // submitSuccess for general comments
366 var submitSuccessCallback = function(json_data) {
366 var submitSuccessCallback = function(json_data) {
367 // reload page if we change status for single commit.
367 // reload page if we change status for single commit.
368 if (status && self.commitId) {
368 if (status && self.commitId) {
369 location.reload(true);
369 location.reload(true);
370 } else {
370 } else {
371 // inject newly created comments, json_data is {<comment_id>: {}}
371 // inject newly created comments, json_data is {<comment_id>: {}}
372 Rhodecode.comments.attachGeneralComment(json_data)
372 Rhodecode.comments.attachGeneralComment(json_data)
373
373
374 self.resetCommentFormState();
374 self.resetCommentFormState();
375 timeagoActivate();
375 timeagoActivate();
376 tooltipActivate();
376 tooltipActivate();
377
377
378 // mark visually which comment was resolved
378 // mark visually which comment was resolved
379 if (resolvesCommentId) {
379 if (resolvesCommentId) {
380 self.markCommentResolved(resolvesCommentId);
380 self.markCommentResolved(resolvesCommentId);
381 }
381 }
382 }
382 }
383
383
384 // run global callback on submit
384 // run global callback on submit
385 self.globalSubmitSuccessCallback({draft: isDraft, comment_id: comment_id});
385 self.globalSubmitSuccessCallback({draft: isDraft, comment_id: comment_id});
386
386
387 };
387 };
388 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
388 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
389 var prefix = "Error while submitting comment.\n"
389 var prefix = "Error while submitting comment.\n"
390 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
390 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
391 ajaxErrorSwal(message);
391 ajaxErrorSwal(message);
392 self.resetCommentFormState(text);
392 self.resetCommentFormState(text);
393 };
393 };
394 self.submitAjaxPOST(
394 self.submitAjaxPOST(
395 self.submitUrl, postData, submitSuccessCallback, submitFailCallback);
395 self.submitUrl, postData, submitSuccessCallback, submitFailCallback);
396 };
396 };
397
397
398 this.previewSuccessCallback = function(o) {
398 this.previewSuccessCallback = function(o) {
399 $(self.previewBoxSelector).html(o);
399 $(self.previewBoxSelector).html(o);
400 $(self.previewBoxSelector).removeClass('unloaded');
400 $(self.previewBoxSelector).removeClass('unloaded');
401
401
402 // swap buttons, making preview active
402 // swap buttons, making preview active
403 $(self.previewButton).parent().addClass('active');
403 $(self.previewButton).parent().addClass('active');
404 $(self.editButton).parent().removeClass('active');
404 $(self.editButton).parent().removeClass('active');
405
405
406 // unlock buttons
406 // unlock buttons
407 self.setActionButtonsDisabled(false);
407 self.setActionButtonsDisabled(false);
408 };
408 };
409
409
410 this.setActionButtonsDisabled = function(state, excludeCancelBtn, submitEvent) {
410 this.setActionButtonsDisabled = function(state, excludeCancelBtn, submitEvent) {
411 excludeCancelBtn = excludeCancelBtn || false;
411 excludeCancelBtn = excludeCancelBtn || false;
412 submitEvent = submitEvent || false;
412 submitEvent = submitEvent || false;
413
413
414 $(this.editButton).prop('disabled', state);
414 $(this.editButton).prop('disabled', state);
415 $(this.previewButton).prop('disabled', state);
415 $(this.previewButton).prop('disabled', state);
416
416
417 if (!excludeCancelBtn) {
417 if (!excludeCancelBtn) {
418 $(this.cancelButton).prop('disabled', state);
418 $(this.cancelButton).prop('disabled', state);
419 }
419 }
420
420
421 var submitState = state;
421 var submitState = state;
422 if (!submitEvent && this.getCommentStatus() && !self.isInline()) {
422 if (!submitEvent && this.getCommentStatus() && !self.isInline()) {
423 // if the value of commit review status is set, we allow
423 // if the value of commit review status is set, we allow
424 // submit button, but only on Main form, isInline means inline
424 // submit button, but only on Main form, isInline means inline
425 submitState = false
425 submitState = false
426 }
426 }
427
427
428 $(this.submitButton).prop('disabled', submitState);
428 $(this.submitButton).prop('disabled', submitState);
429 $(this.submitDraftButton).prop('disabled', submitState);
429 $(this.submitDraftButton).prop('disabled', submitState);
430
430
431 if (submitEvent) {
431 if (submitEvent) {
432 var isDraft = self.getDraftState();
432 var isDraft = self.getDraftState();
433
433
434 if (isDraft) {
434 if (isDraft) {
435 $(this.submitDraftButton).val(_gettext('Saving Draft...'));
435 $(this.submitDraftButton).val(_gettext('Saving Draft...'));
436 } else {
436 } else {
437 $(this.submitButton).val(_gettext('Submitting...'));
437 $(this.submitButton).val(_gettext('Submitting...'));
438 }
438 }
439
439
440 } else {
440 } else {
441 $(this.submitButton).val(this.submitButtonText);
441 $(this.submitButton).val(this.submitButtonText);
442 $(this.submitDraftButton).val(this.submitDraftButtonText);
442 $(this.submitDraftButton).val(this.submitDraftButtonText);
443 }
443 }
444
444
445 };
445 };
446
446
447 // lock preview/edit/submit buttons on load, but exclude cancel button
447 // lock preview/edit/submit buttons on load, but exclude cancel button
448 var excludeCancelBtn = true;
448 var excludeCancelBtn = true;
449 this.setActionButtonsDisabled(true, excludeCancelBtn);
449 this.setActionButtonsDisabled(true, excludeCancelBtn);
450
450
451 // anonymous users don't have access to initialized CM instance
451 // anonymous users don't have access to initialized CM instance
452 if (this.cm !== undefined){
452 if (this.cm !== undefined){
453 this.cm.on('change', function(cMirror) {
453 this.cm.on('change', function(cMirror) {
454 if (cMirror.getValue() === "") {
454 if (cMirror.getValue() === "") {
455 self.setActionButtonsDisabled(true, excludeCancelBtn)
455 self.setActionButtonsDisabled(true, excludeCancelBtn)
456 } else {
456 } else {
457 self.setActionButtonsDisabled(false, excludeCancelBtn)
457 self.setActionButtonsDisabled(false, excludeCancelBtn)
458 }
458 }
459 });
459 });
460 }
460 }
461
461
462 $(this.editButton).on('click', function(e) {
462 $(this.editButton).on('click', function(e) {
463 e.preventDefault();
463 e.preventDefault();
464
464
465 $(self.previewButton).parent().removeClass('active');
465 $(self.previewButton).parent().removeClass('active');
466 $(self.previewContainer).hide();
466 $(self.previewContainer).hide();
467
467
468 $(self.editButton).parent().addClass('active');
468 $(self.editButton).parent().addClass('active');
469 $(self.editContainer).show();
469 $(self.editContainer).show();
470
470
471 });
471 });
472
472
473 $(this.previewButton).on('click', function(e) {
473 $(this.previewButton).on('click', function(e) {
474 e.preventDefault();
474 e.preventDefault();
475 var text = self.cm.getValue();
475 var text = self.cm.getValue();
476
476
477 if (text === "") {
477 if (text === "") {
478 return;
478 return;
479 }
479 }
480
480
481 var postData = {
481 var postData = {
482 'text': text,
482 'text': text,
483 'renderer': templateContext.visual.default_renderer,
483 'renderer': templateContext.visual.default_renderer,
484 'csrf_token': CSRF_TOKEN
484 'csrf_token': CSRF_TOKEN
485 };
485 };
486
486
487 // lock ALL buttons on preview
487 // lock ALL buttons on preview
488 self.setActionButtonsDisabled(true);
488 self.setActionButtonsDisabled(true);
489
489
490 $(self.previewBoxSelector).addClass('unloaded');
490 $(self.previewBoxSelector).addClass('unloaded');
491 $(self.previewBoxSelector).html(_gettext('Loading ...'));
491 $(self.previewBoxSelector).html(_gettext('Loading ...'));
492
492
493 $(self.editContainer).hide();
493 $(self.editContainer).hide();
494 $(self.previewContainer).show();
494 $(self.previewContainer).show();
495
495
496 // by default we reset state of comment preserving the text
496 // by default we reset state of comment preserving the text
497 var previewFailCallback = function(jqXHR, textStatus, errorThrown) {
497 var previewFailCallback = function(jqXHR, textStatus, errorThrown) {
498 var prefix = "Error while preview of comment.\n"
498 var prefix = "Error while preview of comment.\n"
499 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
499 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
500 ajaxErrorSwal(message);
500 ajaxErrorSwal(message);
501
501
502 self.resetCommentFormState(text)
502 self.resetCommentFormState(text)
503 };
503 };
504 self.submitAjaxPOST(
504 self.submitAjaxPOST(
505 self.previewUrl, postData, self.previewSuccessCallback,
505 self.previewUrl, postData, self.previewSuccessCallback,
506 previewFailCallback);
506 previewFailCallback);
507
507
508 $(self.previewButton).parent().addClass('active');
508 $(self.previewButton).parent().addClass('active');
509 $(self.editButton).parent().removeClass('active');
509 $(self.editButton).parent().removeClass('active');
510 });
510 });
511
511
512 $(this.submitForm).submit(function(e) {
512 $(this.submitForm).submit(function(e) {
513 e.preventDefault();
513 e.preventDefault();
514 var allowedToSubmit = self.isAllowedToSubmit();
514 var allowedToSubmit = self.isAllowedToSubmit();
515 if (!allowedToSubmit){
515 if (!allowedToSubmit){
516 return false;
516 return false;
517 }
517 }
518
518
519 self.handleFormSubmit();
519 self.handleFormSubmit();
520 });
520 });
521
521
522 }
522 }
523
523
524 return CommentForm;
524 return CommentForm;
525 });
525 });
526
526
527 /* selector for comment versions */
527 /* selector for comment versions */
528 var initVersionSelector = function(selector, initialData) {
528 var initVersionSelector = function(selector, initialData) {
529
529
530 var formatResult = function(result, container, query, escapeMarkup) {
530 var formatResult = function(result, container, query, escapeMarkup) {
531
531
532 return renderTemplate('commentVersion', {
532 return renderTemplate('commentVersion', {
533 show_disabled: true,
533 show_disabled: true,
534 version: result.comment_version,
534 version: result.comment_version,
535 user_name: result.comment_author_username,
535 user_name: result.comment_author_username,
536 gravatar_url: result.comment_author_gravatar,
536 gravatar_url: result.comment_author_gravatar,
537 size: 16,
537 size: 16,
538 timeago_component: result.comment_created_on,
538 timeago_component: result.comment_created_on,
539 })
539 })
540 };
540 };
541
541
542 $(selector).select2({
542 $(selector).select2({
543 placeholder: "Edited",
543 placeholder: "Edited",
544 containerCssClass: "drop-menu-comment-history",
544 containerCssClass: "drop-menu-comment-history",
545 dropdownCssClass: "drop-menu-dropdown",
545 dropdownCssClass: "drop-menu-dropdown",
546 dropdownAutoWidth: true,
546 dropdownAutoWidth: true,
547 minimumResultsForSearch: -1,
547 minimumResultsForSearch: -1,
548 data: initialData,
548 data: initialData,
549 formatResult: formatResult,
549 formatResult: formatResult,
550 });
550 });
551
551
552 $(selector).on('select2-selecting', function (e) {
552 $(selector).on('select2-selecting', function (e) {
553 // hide the mast as we later do preventDefault()
553 // hide the mast as we later do preventDefault()
554 $("#select2-drop-mask").click();
554 $("#select2-drop-mask").click();
555 e.preventDefault();
555 e.preventDefault();
556 e.choice.action();
556 e.choice.action();
557 });
557 });
558
558
559 $(selector).on("select2-open", function() {
559 $(selector).on("select2-open", function() {
560 timeagoActivate();
560 timeagoActivate();
561 });
561 });
562 };
562 };
563
563
564 /* comments controller */
564 /* comments controller */
565 var CommentsController = function() {
565 var CommentsController = function() {
566 var mainComment = '#text';
566 var mainComment = '#text';
567 var self = this;
567 var self = this;
568
568
569 this.showVersion = function (comment_id, comment_history_id) {
569 this.showVersion = function (comment_id, comment_history_id) {
570
570
571 var historyViewUrl = pyroutes.url(
571 var historyViewUrl = pyroutes.url(
572 'repo_commit_comment_history_view',
572 'repo_commit_comment_history_view',
573 {
573 {
574 'repo_name': templateContext.repo_name,
574 'repo_name': templateContext.repo_name,
575 'commit_id': comment_id,
575 'commit_id': null, // We don't need to check the commit data here...
576 'comment_id': comment_id,
576 'comment_history_id': comment_history_id,
577 'comment_history_id': comment_history_id,
577 }
578 }
578 );
579 );
579 successRenderCommit = function (data) {
580 successRenderCommit = function (data) {
580 SwalNoAnimation.fire({
581 SwalNoAnimation.fire({
581 html: data,
582 html: data,
582 title: '',
583 title: '',
583 });
584 });
584 };
585 };
585 failRenderCommit = function () {
586 failRenderCommit = function () {
586 SwalNoAnimation.fire({
587 SwalNoAnimation.fire({
587 html: 'Error while loading comment history',
588 html: 'Error while loading comment history',
588 title: '',
589 title: '',
589 });
590 });
590 };
591 };
591 _submitAjaxPOST(
592 _submitAjaxPOST(
592 historyViewUrl, {'csrf_token': CSRF_TOKEN},
593 historyViewUrl, {'csrf_token': CSRF_TOKEN},
593 successRenderCommit,
594 successRenderCommit,
594 failRenderCommit
595 failRenderCommit
595 );
596 );
596 };
597 };
597
598
598 this.getLineNumber = function(node) {
599 this.getLineNumber = function(node) {
599 var $node = $(node);
600 var $node = $(node);
600 var lineNo = $node.closest('td').attr('data-line-no');
601 var lineNo = $node.closest('td').attr('data-line-no');
601 if (lineNo === undefined && $node.data('commentInline')){
602 if (lineNo === undefined && $node.data('commentInline')){
602 lineNo = $node.data('commentLineNo')
603 lineNo = $node.data('commentLineNo')
603 }
604 }
604
605
605 return lineNo
606 return lineNo
606 };
607 };
607
608
608 this.scrollToComment = function(node, offset, outdated) {
609 this.scrollToComment = function(node, offset, outdated) {
609 if (offset === undefined) {
610 if (offset === undefined) {
610 offset = 0;
611 offset = 0;
611 }
612 }
612 var outdated = outdated || false;
613 var outdated = outdated || false;
613 var klass = outdated ? 'div.comment-outdated' : 'div.comment-current';
614 var klass = outdated ? 'div.comment-outdated' : 'div.comment-current';
614
615
615 if (!node) {
616 if (!node) {
616 node = $('.comment-selected');
617 node = $('.comment-selected');
617 if (!node.length) {
618 if (!node.length) {
618 node = $('comment-current')
619 node = $('comment-current')
619 }
620 }
620 }
621 }
621
622
622 $wrapper = $(node).closest('div.comment');
623 $wrapper = $(node).closest('div.comment');
623
624
624 // show hidden comment when referenced.
625 // show hidden comment when referenced.
625 if (!$wrapper.is(':visible')){
626 if (!$wrapper.is(':visible')){
626 $wrapper.show();
627 $wrapper.show();
627 }
628 }
628
629
629 $comment = $(node).closest(klass);
630 $comment = $(node).closest(klass);
630 $comments = $(klass);
631 $comments = $(klass);
631
632
632 $('.comment-selected').removeClass('comment-selected');
633 $('.comment-selected').removeClass('comment-selected');
633
634
634 var nextIdx = $(klass).index($comment) + offset;
635 var nextIdx = $(klass).index($comment) + offset;
635 if (nextIdx >= $comments.length) {
636 if (nextIdx >= $comments.length) {
636 nextIdx = 0;
637 nextIdx = 0;
637 }
638 }
638 var $next = $(klass).eq(nextIdx);
639 var $next = $(klass).eq(nextIdx);
639
640
640 var $cb = $next.closest('.cb');
641 var $cb = $next.closest('.cb');
641 $cb.removeClass('cb-collapsed');
642 $cb.removeClass('cb-collapsed');
642
643
643 var $filediffCollapseState = $cb.closest('.filediff').prev();
644 var $filediffCollapseState = $cb.closest('.filediff').prev();
644 $filediffCollapseState.prop('checked', false);
645 $filediffCollapseState.prop('checked', false);
645 $next.addClass('comment-selected');
646 $next.addClass('comment-selected');
646 scrollToElement($next);
647 scrollToElement($next);
647 return false;
648 return false;
648 };
649 };
649
650
650 this.nextComment = function(node) {
651 this.nextComment = function(node) {
651 return self.scrollToComment(node, 1);
652 return self.scrollToComment(node, 1);
652 };
653 };
653
654
654 this.prevComment = function(node) {
655 this.prevComment = function(node) {
655 return self.scrollToComment(node, -1);
656 return self.scrollToComment(node, -1);
656 };
657 };
657
658
658 this.nextOutdatedComment = function(node) {
659 this.nextOutdatedComment = function(node) {
659 return self.scrollToComment(node, 1, true);
660 return self.scrollToComment(node, 1, true);
660 };
661 };
661
662
662 this.prevOutdatedComment = function(node) {
663 this.prevOutdatedComment = function(node) {
663 return self.scrollToComment(node, -1, true);
664 return self.scrollToComment(node, -1, true);
664 };
665 };
665
666
666 this.cancelComment = function (node) {
667 this.cancelComment = function (node) {
667 var $node = $(node);
668 var $node = $(node);
668 var edit = $(this).attr('edit');
669 var edit = $(this).attr('edit');
669 var $inlineComments = $node.closest('div.inline-comments');
670 var $inlineComments = $node.closest('div.inline-comments');
670
671
671 if (edit) {
672 if (edit) {
672 var $general_comments = null;
673 var $general_comments = null;
673 if (!$inlineComments.length) {
674 if (!$inlineComments.length) {
674 $general_comments = $('#comments');
675 $general_comments = $('#comments');
675 var $comment = $general_comments.parent().find('div.comment:hidden');
676 var $comment = $general_comments.parent().find('div.comment:hidden');
676 // show hidden general comment form
677 // show hidden general comment form
677 $('#cb-comment-general-form-placeholder').show();
678 $('#cb-comment-general-form-placeholder').show();
678 } else {
679 } else {
679 var $comment = $inlineComments.find('div.comment:hidden');
680 var $comment = $inlineComments.find('div.comment:hidden');
680 }
681 }
681 $comment.show();
682 $comment.show();
682 }
683 }
683 var $replyWrapper = $node.closest('.comment-inline-form').closest('.reply-thread-container-wrapper')
684 var $replyWrapper = $node.closest('.comment-inline-form').closest('.reply-thread-container-wrapper')
684 $replyWrapper.removeClass('comment-form-active');
685 $replyWrapper.removeClass('comment-form-active');
685
686
686 var lastComment = $inlineComments.find('.comment-inline').last();
687 var lastComment = $inlineComments.find('.comment-inline').last();
687 if ($(lastComment).hasClass('comment-outdated')) {
688 if ($(lastComment).hasClass('comment-outdated')) {
688 $replyWrapper.hide();
689 $replyWrapper.hide();
689 }
690 }
690
691
691 $node.closest('.comment-inline-form').remove();
692 $node.closest('.comment-inline-form').remove();
692 return false;
693 return false;
693 };
694 };
694
695
695 this._deleteComment = function(node) {
696 this._deleteComment = function(node) {
696 var $node = $(node);
697 var $node = $(node);
697 var $td = $node.closest('td');
698 var $td = $node.closest('td');
698 var $comment = $node.closest('.comment');
699 var $comment = $node.closest('.comment');
699 var comment_id = $($comment).data('commentId');
700 var comment_id = $($comment).data('commentId');
700 var isDraft = $($comment).data('commentDraft');
701 var isDraft = $($comment).data('commentDraft');
701
702
702 var pullRequestId = templateContext.pull_request_data.pull_request_id;
703 var pullRequestId = templateContext.pull_request_data.pull_request_id;
703 var commitId = templateContext.commit_data.commit_id;
704 var commitId = templateContext.commit_data.commit_id;
704
705
705 if (pullRequestId) {
706 if (pullRequestId) {
706 var url = pyroutes.url('pullrequest_comment_delete', {"comment_id": comment_id, "repo_name": templateContext.repo_name, "pull_request_id": pullRequestId})
707 var url = pyroutes.url('pullrequest_comment_delete', {"comment_id": comment_id, "repo_name": templateContext.repo_name, "pull_request_id": pullRequestId})
707 } else if (commitId) {
708 } else if (commitId) {
708 var url = pyroutes.url('repo_commit_comment_delete', {"comment_id": comment_id, "repo_name": templateContext.repo_name, "commit_id": commitId})
709 var url = pyroutes.url('repo_commit_comment_delete', {"comment_id": comment_id, "repo_name": templateContext.repo_name, "commit_id": commitId})
709 }
710 }
710
711
711 var postData = {
712 var postData = {
712 'csrf_token': CSRF_TOKEN
713 'csrf_token': CSRF_TOKEN
713 };
714 };
714
715
715 $comment.addClass('comment-deleting');
716 $comment.addClass('comment-deleting');
716 $comment.hide('fast');
717 $comment.hide('fast');
717
718
718 var success = function(response) {
719 var success = function(response) {
719 $comment.remove();
720 $comment.remove();
720
721
721 if (window.updateSticky !== undefined) {
722 if (window.updateSticky !== undefined) {
722 // potentially our comments change the active window size, so we
723 // potentially our comments change the active window size, so we
723 // notify sticky elements
724 // notify sticky elements
724 updateSticky()
725 updateSticky()
725 }
726 }
726
727
727 if (window.refreshAllComments !== undefined && !isDraft) {
728 if (window.refreshAllComments !== undefined && !isDraft) {
728 // if we have this handler, run it, and refresh all comments boxes
729 // if we have this handler, run it, and refresh all comments boxes
729 refreshAllComments()
730 refreshAllComments()
730 }
731 }
731 else if (window.refreshDraftComments !== undefined && isDraft) {
732 else if (window.refreshDraftComments !== undefined && isDraft) {
732 // if we have this handler, run it, and refresh all comments boxes
733 // if we have this handler, run it, and refresh all comments boxes
733 refreshDraftComments();
734 refreshDraftComments();
734 }
735 }
735 return false;
736 return false;
736 };
737 };
737
738
738 var failure = function(jqXHR, textStatus, errorThrown) {
739 var failure = function(jqXHR, textStatus, errorThrown) {
739 var prefix = "Error while deleting this comment.\n"
740 var prefix = "Error while deleting this comment.\n"
740 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
741 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
741 ajaxErrorSwal(message);
742 ajaxErrorSwal(message);
742
743
743 $comment.show('fast');
744 $comment.show('fast');
744 $comment.removeClass('comment-deleting');
745 $comment.removeClass('comment-deleting');
745 return false;
746 return false;
746 };
747 };
747 ajaxPOST(url, postData, success, failure);
748 ajaxPOST(url, postData, success, failure);
748
749
749 }
750 }
750
751
751 this.deleteComment = function(node) {
752 this.deleteComment = function(node) {
752 var $comment = $(node).closest('.comment');
753 var $comment = $(node).closest('.comment');
753 var comment_id = $comment.attr('data-comment-id');
754 var comment_id = $comment.attr('data-comment-id');
754
755
755 SwalNoAnimation.fire({
756 SwalNoAnimation.fire({
756 title: 'Delete this comment?',
757 title: 'Delete this comment?',
757 icon: 'warning',
758 icon: 'warning',
758 showCancelButton: true,
759 showCancelButton: true,
759 confirmButtonText: _gettext('Yes, delete comment #{0}!').format(comment_id),
760 confirmButtonText: _gettext('Yes, delete comment #{0}!').format(comment_id),
760
761
761 }).then(function(result) {
762 }).then(function(result) {
762 if (result.value) {
763 if (result.value) {
763 self._deleteComment(node);
764 self._deleteComment(node);
764 }
765 }
765 })
766 })
766 };
767 };
767
768
768 this._finalizeDrafts = function(commentIds) {
769 this._finalizeDrafts = function(commentIds) {
769
770
770 var pullRequestId = templateContext.pull_request_data.pull_request_id;
771 var pullRequestId = templateContext.pull_request_data.pull_request_id;
771 var commitId = templateContext.commit_data.commit_id;
772 var commitId = templateContext.commit_data.commit_id;
772
773
773 if (pullRequestId) {
774 if (pullRequestId) {
774 var url = pyroutes.url('pullrequest_draft_comments_submit', {"repo_name": templateContext.repo_name, "pull_request_id": pullRequestId})
775 var url = pyroutes.url('pullrequest_draft_comments_submit', {"repo_name": templateContext.repo_name, "pull_request_id": pullRequestId})
775 } else if (commitId) {
776 } else if (commitId) {
776 var url = pyroutes.url('commit_draft_comments_submit', {"repo_name": templateContext.repo_name, "commit_id": commitId})
777 var url = pyroutes.url('commit_draft_comments_submit', {"repo_name": templateContext.repo_name, "commit_id": commitId})
777 }
778 }
778
779
779 // remove the drafts so we can lock them before submit.
780 // remove the drafts so we can lock them before submit.
780 $.each(commentIds, function(idx, val){
781 $.each(commentIds, function(idx, val){
781 $('#comment-{0}'.format(val)).remove();
782 $('#comment-{0}'.format(val)).remove();
782 })
783 })
783
784
784 var postData = {'comments': commentIds, 'csrf_token': CSRF_TOKEN};
785 var postData = {'comments': commentIds, 'csrf_token': CSRF_TOKEN};
785
786
786 var submitSuccessCallback = function(json_data) {
787 var submitSuccessCallback = function(json_data) {
787 self.attachInlineComment(json_data);
788 self.attachInlineComment(json_data);
788
789
789 if (window.refreshDraftComments !== undefined) {
790 if (window.refreshDraftComments !== undefined) {
790 // if we have this handler, run it, and refresh all comments boxes
791 // if we have this handler, run it, and refresh all comments boxes
791 refreshDraftComments()
792 refreshDraftComments()
792 }
793 }
793
794
794 if (window.refreshAllComments !== undefined) {
795 if (window.refreshAllComments !== undefined) {
795 // if we have this handler, run it, and refresh all comments boxes
796 // if we have this handler, run it, and refresh all comments boxes
796 refreshAllComments()
797 refreshAllComments()
797 }
798 }
798
799
799 return false;
800 return false;
800 };
801 };
801
802
802 ajaxPOST(url, postData, submitSuccessCallback)
803 ajaxPOST(url, postData, submitSuccessCallback)
803
804
804 }
805 }
805
806
806 this.finalizeDrafts = function(commentIds, callback) {
807 this.finalizeDrafts = function(commentIds, callback) {
807
808
808 SwalNoAnimation.fire({
809 SwalNoAnimation.fire({
809 title: _ngettext('Submit {0} draft comment.', 'Submit {0} draft comments.', commentIds.length).format(commentIds.length),
810 title: _ngettext('Submit {0} draft comment.', 'Submit {0} draft comments.', commentIds.length).format(commentIds.length),
810 icon: 'warning',
811 icon: 'warning',
811 showCancelButton: true,
812 showCancelButton: true,
812 confirmButtonText: _gettext('Yes'),
813 confirmButtonText: _gettext('Yes'),
813
814
814 }).then(function(result) {
815 }).then(function(result) {
815 if (result.value) {
816 if (result.value) {
816 if (callback !== undefined) {
817 if (callback !== undefined) {
817 callback(result)
818 callback(result)
818 }
819 }
819 self._finalizeDrafts(commentIds);
820 self._finalizeDrafts(commentIds);
820 }
821 }
821 })
822 })
822 };
823 };
823
824
824 this.toggleWideMode = function (node) {
825 this.toggleWideMode = function (node) {
825
826
826 if ($('#content').hasClass('wrapper')) {
827 if ($('#content').hasClass('wrapper')) {
827 $('#content').removeClass("wrapper");
828 $('#content').removeClass("wrapper");
828 $('#content').addClass("wide-mode-wrapper");
829 $('#content').addClass("wide-mode-wrapper");
829 $(node).addClass('btn-success');
830 $(node).addClass('btn-success');
830 return true
831 return true
831 } else {
832 } else {
832 $('#content').removeClass("wide-mode-wrapper");
833 $('#content').removeClass("wide-mode-wrapper");
833 $('#content').addClass("wrapper");
834 $('#content').addClass("wrapper");
834 $(node).removeClass('btn-success');
835 $(node).removeClass('btn-success');
835 return false
836 return false
836 }
837 }
837
838
838 };
839 };
839
840
840 /**
841 /**
841 * Turn off/on all comments in file diff
842 * Turn off/on all comments in file diff
842 */
843 */
843 this.toggleDiffComments = function(node) {
844 this.toggleDiffComments = function(node) {
844 // Find closes filediff container
845 // Find closes filediff container
845 var $filediff = $(node).closest('.filediff');
846 var $filediff = $(node).closest('.filediff');
846 if ($(node).hasClass('toggle-on')) {
847 if ($(node).hasClass('toggle-on')) {
847 var show = false;
848 var show = false;
848 } else if ($(node).hasClass('toggle-off')) {
849 } else if ($(node).hasClass('toggle-off')) {
849 var show = true;
850 var show = true;
850 }
851 }
851
852
852 // Toggle each individual comment block, so we can un-toggle single ones
853 // Toggle each individual comment block, so we can un-toggle single ones
853 $.each($filediff.find('.toggle-comment-action'), function(idx, val) {
854 $.each($filediff.find('.toggle-comment-action'), function(idx, val) {
854 self.toggleLineComments($(val), show)
855 self.toggleLineComments($(val), show)
855 })
856 })
856
857
857 // since we change the height of the diff container that has anchor points for upper
858 // since we change the height of the diff container that has anchor points for upper
858 // sticky header, we need to tell it to re-calculate those
859 // sticky header, we need to tell it to re-calculate those
859 if (window.updateSticky !== undefined) {
860 if (window.updateSticky !== undefined) {
860 // potentially our comments change the active window size, so we
861 // potentially our comments change the active window size, so we
861 // notify sticky elements
862 // notify sticky elements
862 updateSticky()
863 updateSticky()
863 }
864 }
864
865
865 return false;
866 return false;
866 }
867 }
867
868
868 this.toggleLineComments = function(node, show) {
869 this.toggleLineComments = function(node, show) {
869
870
870 var trElem = $(node).closest('tr')
871 var trElem = $(node).closest('tr')
871
872
872 if (show === true) {
873 if (show === true) {
873 // mark outdated comments as visible before the toggle;
874 // mark outdated comments as visible before the toggle;
874 $(trElem).find('.comment-outdated').show();
875 $(trElem).find('.comment-outdated').show();
875 $(trElem).removeClass('hide-line-comments');
876 $(trElem).removeClass('hide-line-comments');
876 } else if (show === false) {
877 } else if (show === false) {
877 $(trElem).find('.comment-outdated').hide();
878 $(trElem).find('.comment-outdated').hide();
878 $(trElem).addClass('hide-line-comments');
879 $(trElem).addClass('hide-line-comments');
879 } else {
880 } else {
880 // mark outdated comments as visible before the toggle;
881 // mark outdated comments as visible before the toggle;
881 $(trElem).find('.comment-outdated').show();
882 $(trElem).find('.comment-outdated').show();
882 $(trElem).toggleClass('hide-line-comments');
883 $(trElem).toggleClass('hide-line-comments');
883 }
884 }
884
885
885 // since we change the height of the diff container that has anchor points for upper
886 // since we change the height of the diff container that has anchor points for upper
886 // sticky header, we need to tell it to re-calculate those
887 // sticky header, we need to tell it to re-calculate those
887 if (window.updateSticky !== undefined) {
888 if (window.updateSticky !== undefined) {
888 // potentially our comments change the active window size, so we
889 // potentially our comments change the active window size, so we
889 // notify sticky elements
890 // notify sticky elements
890 updateSticky()
891 updateSticky()
891 }
892 }
892
893
893 };
894 };
894
895
895 this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId, edit, comment_id){
896 this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId, edit, comment_id){
896 var pullRequestId = templateContext.pull_request_data.pull_request_id;
897 var pullRequestId = templateContext.pull_request_data.pull_request_id;
897 var commitId = templateContext.commit_data.commit_id;
898 var commitId = templateContext.commit_data.commit_id;
898
899
899 var commentForm = new CommentForm(
900 var commentForm = new CommentForm(
900 formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId, edit, comment_id);
901 formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId, edit, comment_id);
901 var cm = commentForm.getCmInstance();
902 var cm = commentForm.getCmInstance();
902
903
903 if (resolvesCommentId){
904 if (resolvesCommentId){
904 placeholderText = _gettext('Leave a resolution comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId);
905 placeholderText = _gettext('Leave a resolution comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId);
905 }
906 }
906
907
907 setTimeout(function() {
908 setTimeout(function() {
908 // callbacks
909 // callbacks
909 if (cm !== undefined) {
910 if (cm !== undefined) {
910 commentForm.setPlaceholder(placeholderText);
911 commentForm.setPlaceholder(placeholderText);
911 if (commentForm.isInline()) {
912 if (commentForm.isInline()) {
912 cm.focus();
913 cm.focus();
913 cm.refresh();
914 cm.refresh();
914 }
915 }
915 }
916 }
916 }, 10);
917 }, 10);
917
918
918 // trigger scrolldown to the resolve comment, since it might be away
919 // trigger scrolldown to the resolve comment, since it might be away
919 // from the clicked
920 // from the clicked
920 if (resolvesCommentId){
921 if (resolvesCommentId){
921 var actionNode = $(commentForm.resolvesActionId).offset();
922 var actionNode = $(commentForm.resolvesActionId).offset();
922
923
923 setTimeout(function() {
924 setTimeout(function() {
924 if (actionNode) {
925 if (actionNode) {
925 $('body, html').animate({scrollTop: actionNode.top}, 10);
926 $('body, html').animate({scrollTop: actionNode.top}, 10);
926 }
927 }
927 }, 100);
928 }, 100);
928 }
929 }
929
930
930 // add dropzone support
931 // add dropzone support
931 var insertAttachmentText = function (cm, attachmentName, attachmentStoreUrl, isRendered) {
932 var insertAttachmentText = function (cm, attachmentName, attachmentStoreUrl, isRendered) {
932 var renderer = templateContext.visual.default_renderer;
933 var renderer = templateContext.visual.default_renderer;
933 if (renderer == 'rst') {
934 if (renderer == 'rst') {
934 var attachmentUrl = '`#{0} <{1}>`_'.format(attachmentName, attachmentStoreUrl);
935 var attachmentUrl = '`#{0} <{1}>`_'.format(attachmentName, attachmentStoreUrl);
935 if (isRendered){
936 if (isRendered){
936 attachmentUrl = '\n.. image:: {0}'.format(attachmentStoreUrl);
937 attachmentUrl = '\n.. image:: {0}'.format(attachmentStoreUrl);
937 }
938 }
938 } else if (renderer == 'markdown') {
939 } else if (renderer == 'markdown') {
939 var attachmentUrl = '[{0}]({1})'.format(attachmentName, attachmentStoreUrl);
940 var attachmentUrl = '[{0}]({1})'.format(attachmentName, attachmentStoreUrl);
940 if (isRendered){
941 if (isRendered){
941 attachmentUrl = '!' + attachmentUrl;
942 attachmentUrl = '!' + attachmentUrl;
942 }
943 }
943 } else {
944 } else {
944 var attachmentUrl = '{}'.format(attachmentStoreUrl);
945 var attachmentUrl = '{}'.format(attachmentStoreUrl);
945 }
946 }
946 cm.replaceRange(attachmentUrl+'\n', CodeMirror.Pos(cm.lastLine()));
947 cm.replaceRange(attachmentUrl+'\n', CodeMirror.Pos(cm.lastLine()));
947
948
948 return false;
949 return false;
949 };
950 };
950
951
951 //see: https://www.dropzonejs.com/#configuration
952 //see: https://www.dropzonejs.com/#configuration
952 var storeUrl = pyroutes.url('repo_commit_comment_attachment_upload',
953 var storeUrl = pyroutes.url('repo_commit_comment_attachment_upload',
953 {'repo_name': templateContext.repo_name,
954 {'repo_name': templateContext.repo_name,
954 'commit_id': templateContext.commit_data.commit_id})
955 'commit_id': templateContext.commit_data.commit_id})
955
956
956 var previewTmpl = $(formElement).find('.comment-attachment-uploader-template').get(0);
957 var previewTmpl = $(formElement).find('.comment-attachment-uploader-template').get(0);
957 if (previewTmpl !== undefined){
958 if (previewTmpl !== undefined){
958 var selectLink = $(formElement).find('.pick-attachment').get(0);
959 var selectLink = $(formElement).find('.pick-attachment').get(0);
959 $(formElement).find('.comment-attachment-uploader').dropzone({
960 $(formElement).find('.comment-attachment-uploader').dropzone({
960 url: storeUrl,
961 url: storeUrl,
961 headers: {"X-CSRF-Token": CSRF_TOKEN},
962 headers: {"X-CSRF-Token": CSRF_TOKEN},
962 paramName: function () {
963 paramName: function () {
963 return "attachment"
964 return "attachment"
964 }, // The name that will be used to transfer the file
965 }, // The name that will be used to transfer the file
965 clickable: selectLink,
966 clickable: selectLink,
966 parallelUploads: 1,
967 parallelUploads: 1,
967 maxFiles: 10,
968 maxFiles: 10,
968 maxFilesize: templateContext.attachment_store.max_file_size_mb,
969 maxFilesize: templateContext.attachment_store.max_file_size_mb,
969 uploadMultiple: false,
970 uploadMultiple: false,
970 autoProcessQueue: true, // if false queue will not be processed automatically.
971 autoProcessQueue: true, // if false queue will not be processed automatically.
971 createImageThumbnails: false,
972 createImageThumbnails: false,
972 previewTemplate: previewTmpl.innerHTML,
973 previewTemplate: previewTmpl.innerHTML,
973
974
974 accept: function (file, done) {
975 accept: function (file, done) {
975 done();
976 done();
976 },
977 },
977 init: function () {
978 init: function () {
978
979
979 this.on("sending", function (file, xhr, formData) {
980 this.on("sending", function (file, xhr, formData) {
980 $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').hide();
981 $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').hide();
981 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').show();
982 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').show();
982 });
983 });
983
984
984 this.on("success", function (file, response) {
985 this.on("success", function (file, response) {
985 $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').show();
986 $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').show();
986 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide();
987 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide();
987
988
988 var isRendered = false;
989 var isRendered = false;
989 var ext = file.name.split('.').pop();
990 var ext = file.name.split('.').pop();
990 var imageExts = templateContext.attachment_store.image_ext;
991 var imageExts = templateContext.attachment_store.image_ext;
991 if (imageExts.indexOf(ext) !== -1){
992 if (imageExts.indexOf(ext) !== -1){
992 isRendered = true;
993 isRendered = true;
993 }
994 }
994
995
995 insertAttachmentText(cm, file.name, response.repo_fqn_access_path, isRendered)
996 insertAttachmentText(cm, file.name, response.repo_fqn_access_path, isRendered)
996 });
997 });
997
998
998 this.on("error", function (file, errorMessage, xhr) {
999 this.on("error", function (file, errorMessage, xhr) {
999 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide();
1000 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide();
1000
1001
1001 var error = null;
1002 var error = null;
1002
1003
1003 if (xhr !== undefined){
1004 if (xhr !== undefined){
1004 var httpStatus = xhr.status + " " + xhr.statusText;
1005 var httpStatus = xhr.status + " " + xhr.statusText;
1005 if (xhr !== undefined && xhr.status >= 500) {
1006 if (xhr !== undefined && xhr.status >= 500) {
1006 error = httpStatus;
1007 error = httpStatus;
1007 }
1008 }
1008 }
1009 }
1009
1010
1010 if (error === null) {
1011 if (error === null) {
1011 error = errorMessage.error || errorMessage || httpStatus;
1012 error = errorMessage.error || errorMessage || httpStatus;
1012 }
1013 }
1013 $(file.previewElement).find('.dz-error-message').html('ERROR: {0}'.format(error));
1014 $(file.previewElement).find('.dz-error-message').html('ERROR: {0}'.format(error));
1014
1015
1015 });
1016 });
1016 }
1017 }
1017 });
1018 });
1018 }
1019 }
1019 return commentForm;
1020 return commentForm;
1020 };
1021 };
1021
1022
1022 this.createGeneralComment = function (lineNo, placeholderText, resolvesCommentId) {
1023 this.createGeneralComment = function (lineNo, placeholderText, resolvesCommentId) {
1023
1024
1024 var tmpl = $('#cb-comment-general-form-template').html();
1025 var tmpl = $('#cb-comment-general-form-template').html();
1025 tmpl = tmpl.format(null, 'general');
1026 tmpl = tmpl.format(null, 'general');
1026 var $form = $(tmpl);
1027 var $form = $(tmpl);
1027
1028
1028 var $formPlaceholder = $('#cb-comment-general-form-placeholder');
1029 var $formPlaceholder = $('#cb-comment-general-form-placeholder');
1029 var curForm = $formPlaceholder.find('form');
1030 var curForm = $formPlaceholder.find('form');
1030 if (curForm){
1031 if (curForm){
1031 curForm.remove();
1032 curForm.remove();
1032 }
1033 }
1033 $formPlaceholder.append($form);
1034 $formPlaceholder.append($form);
1034
1035
1035 var _form = $($form[0]);
1036 var _form = $($form[0]);
1036 var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo'];
1037 var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo'];
1037 var edit = false;
1038 var edit = false;
1038 var comment_id = null;
1039 var comment_id = null;
1039 var commentForm = this.createCommentForm(
1040 var commentForm = this.createCommentForm(
1040 _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId, edit, comment_id);
1041 _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId, edit, comment_id);
1041 commentForm.initStatusChangeSelector();
1042 commentForm.initStatusChangeSelector();
1042
1043
1043 return commentForm;
1044 return commentForm;
1044 };
1045 };
1045
1046
1046 this.editComment = function(node, line_no, f_path) {
1047 this.editComment = function(node, line_no, f_path) {
1047 self.edit = true;
1048 self.edit = true;
1048 var $node = $(node);
1049 var $node = $(node);
1049 var $td = $node.closest('td');
1050 var $td = $node.closest('td');
1050
1051
1051 var $comment = $(node).closest('.comment');
1052 var $comment = $(node).closest('.comment');
1052 var comment_id = $($comment).data('commentId');
1053 var comment_id = $($comment).data('commentId');
1053 var isDraft = $($comment).data('commentDraft');
1054 var isDraft = $($comment).data('commentDraft');
1054 var $editForm = null
1055 var $editForm = null
1055
1056
1056 var $comments = $node.closest('div.inline-comments');
1057 var $comments = $node.closest('div.inline-comments');
1057 var $general_comments = null;
1058 var $general_comments = null;
1058
1059
1059 if($comments.length){
1060 if($comments.length){
1060 // inline comments setup
1061 // inline comments setup
1061 $editForm = $comments.find('.comment-inline-form');
1062 $editForm = $comments.find('.comment-inline-form');
1062 line_no = self.getLineNumber(node)
1063 line_no = self.getLineNumber(node)
1063 }
1064 }
1064 else{
1065 else{
1065 // general comments setup
1066 // general comments setup
1066 $comments = $('#comments');
1067 $comments = $('#comments');
1067 $editForm = $comments.find('.comment-inline-form');
1068 $editForm = $comments.find('.comment-inline-form');
1068 line_no = $comment[0].id
1069 line_no = $comment[0].id
1069 $('#cb-comment-general-form-placeholder').hide();
1070 $('#cb-comment-general-form-placeholder').hide();
1070 }
1071 }
1071
1072
1072 if ($editForm.length === 0) {
1073 if ($editForm.length === 0) {
1073
1074
1074 // unhide all comments if they are hidden for a proper REPLY mode
1075 // unhide all comments if they are hidden for a proper REPLY mode
1075 var $filediff = $node.closest('.filediff');
1076 var $filediff = $node.closest('.filediff');
1076 $filediff.removeClass('hide-comments');
1077 $filediff.removeClass('hide-comments');
1077
1078
1078 $editForm = self.createNewFormWrapper(f_path, line_no);
1079 $editForm = self.createNewFormWrapper(f_path, line_no);
1079 if(f_path && line_no) {
1080 if(f_path && line_no) {
1080 $editForm.addClass('comment-inline-form-edit')
1081 $editForm.addClass('comment-inline-form-edit')
1081 }
1082 }
1082
1083
1083 $comment.after($editForm)
1084 $comment.after($editForm)
1084
1085
1085 var _form = $($editForm[0]).find('form');
1086 var _form = $($editForm[0]).find('form');
1086 var autocompleteActions = ['as_note',];
1087 var autocompleteActions = ['as_note',];
1087 var commentForm = this.createCommentForm(
1088 var commentForm = this.createCommentForm(
1088 _form, line_no, '', autocompleteActions, resolvesCommentId,
1089 _form, line_no, '', autocompleteActions, resolvesCommentId,
1089 this.edit, comment_id);
1090 this.edit, comment_id);
1090 var old_comment_text_binary = $comment.attr('data-comment-text');
1091 var old_comment_text_binary = $comment.attr('data-comment-text');
1091 var old_comment_text = b64DecodeUnicode(old_comment_text_binary);
1092 var old_comment_text = b64DecodeUnicode(old_comment_text_binary);
1092 commentForm.cm.setValue(old_comment_text);
1093 commentForm.cm.setValue(old_comment_text);
1093 $comment.hide();
1094 $comment.hide();
1094 tooltipActivate();
1095 tooltipActivate();
1095
1096
1096 // set a CUSTOM submit handler for inline comment edit action.
1097 // set a CUSTOM submit handler for inline comment edit action.
1097 commentForm.setHandleFormSubmit(function(o) {
1098 commentForm.setHandleFormSubmit(function(o) {
1098 var text = commentForm.cm.getValue();
1099 var text = commentForm.cm.getValue();
1099 var commentType = commentForm.getCommentType();
1100 var commentType = commentForm.getCommentType();
1100
1101
1101 if (text === "") {
1102 if (text === "") {
1102 return;
1103 return;
1103 }
1104 }
1104
1105
1105 if (old_comment_text == text) {
1106 if (old_comment_text == text) {
1106 SwalNoAnimation.fire({
1107 SwalNoAnimation.fire({
1107 title: 'Unable to edit comment',
1108 title: 'Unable to edit comment',
1108 html: _gettext('Comment body was not changed.'),
1109 html: _gettext('Comment body was not changed.'),
1109 });
1110 });
1110 return;
1111 return;
1111 }
1112 }
1112 var excludeCancelBtn = false;
1113 var excludeCancelBtn = false;
1113 var submitEvent = true;
1114 var submitEvent = true;
1114 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
1115 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
1115 commentForm.cm.setOption("readOnly", true);
1116 commentForm.cm.setOption("readOnly", true);
1116
1117
1117 // Read last version known
1118 // Read last version known
1118 var versionSelector = $('#comment_versions_{0}'.format(comment_id));
1119 var versionSelector = $('#comment_versions_{0}'.format(comment_id));
1119 var version = versionSelector.data('lastVersion');
1120 var version = versionSelector.data('lastVersion');
1120
1121
1121 if (!version) {
1122 if (!version) {
1122 version = 0;
1123 version = 0;
1123 }
1124 }
1124
1125
1125 var postData = {
1126 var postData = {
1126 'text': text,
1127 'text': text,
1127 'f_path': f_path,
1128 'f_path': f_path,
1128 'line': line_no,
1129 'line': line_no,
1129 'comment_type': commentType,
1130 'comment_type': commentType,
1130 'draft': isDraft,
1131 'draft': isDraft,
1131 'version': version,
1132 'version': version,
1132 'csrf_token': CSRF_TOKEN
1133 'csrf_token': CSRF_TOKEN
1133 };
1134 };
1134
1135
1135 var submitSuccessCallback = function(json_data) {
1136 var submitSuccessCallback = function(json_data) {
1136 $editForm.remove();
1137 $editForm.remove();
1137 $comment.show();
1138 $comment.show();
1138 var postData = {
1139 var postData = {
1139 'text': text,
1140 'text': text,
1140 'renderer': $comment.attr('data-comment-renderer'),
1141 'renderer': $comment.attr('data-comment-renderer'),
1141 'csrf_token': CSRF_TOKEN
1142 'csrf_token': CSRF_TOKEN
1142 };
1143 };
1143
1144
1144 /* Inject new edited version selector */
1145 /* Inject new edited version selector */
1145 var updateCommentVersionDropDown = function () {
1146 var updateCommentVersionDropDown = function () {
1146 var versionSelectId = '#comment_versions_'+comment_id;
1147 var versionSelectId = '#comment_versions_'+comment_id;
1147 var preLoadVersionData = [
1148 var preLoadVersionData = [
1148 {
1149 {
1149 id: json_data['comment_version'],
1150 id: json_data['comment_version'],
1150 text: "v{0}".format(json_data['comment_version']),
1151 text: "v{0}".format(json_data['comment_version']),
1151 action: function () {
1152 action: function () {
1152 Rhodecode.comments.showVersion(
1153 Rhodecode.comments.showVersion(
1153 json_data['comment_id'],
1154 json_data['comment_id'],
1154 json_data['comment_history_id']
1155 json_data['comment_history_id']
1155 )
1156 )
1156 },
1157 },
1157 comment_version: json_data['comment_version'],
1158 comment_version: json_data['comment_version'],
1158 comment_author_username: json_data['comment_author_username'],
1159 comment_author_username: json_data['comment_author_username'],
1159 comment_author_gravatar: json_data['comment_author_gravatar'],
1160 comment_author_gravatar: json_data['comment_author_gravatar'],
1160 comment_created_on: json_data['comment_created_on'],
1161 comment_created_on: json_data['comment_created_on'],
1161 },
1162 },
1162 ]
1163 ]
1163
1164
1164
1165
1165 if ($(versionSelectId).data('select2')) {
1166 if ($(versionSelectId).data('select2')) {
1166 var oldData = $(versionSelectId).data('select2').opts.data.results;
1167 var oldData = $(versionSelectId).data('select2').opts.data.results;
1167 $(versionSelectId).select2("destroy");
1168 $(versionSelectId).select2("destroy");
1168 preLoadVersionData = oldData.concat(preLoadVersionData)
1169 preLoadVersionData = oldData.concat(preLoadVersionData)
1169 }
1170 }
1170
1171
1171 initVersionSelector(versionSelectId, {results: preLoadVersionData});
1172 initVersionSelector(versionSelectId, {results: preLoadVersionData});
1172
1173
1173 $comment.attr('data-comment-text', utf8ToB64(text));
1174 $comment.attr('data-comment-text', utf8ToB64(text));
1174
1175
1175 var versionSelector = $('#comment_versions_'+comment_id);
1176 var versionSelector = $('#comment_versions_'+comment_id);
1176
1177
1177 // set lastVersion so we know our last edit version
1178 // set lastVersion so we know our last edit version
1178 versionSelector.data('lastVersion', json_data['comment_version'])
1179 versionSelector.data('lastVersion', json_data['comment_version'])
1179 versionSelector.parent().show();
1180 versionSelector.parent().show();
1180 }
1181 }
1181 updateCommentVersionDropDown();
1182 updateCommentVersionDropDown();
1182
1183
1183 // by default we reset state of comment preserving the text
1184 // by default we reset state of comment preserving the text
1184 var failRenderCommit = function(jqXHR, textStatus, errorThrown) {
1185 var failRenderCommit = function(jqXHR, textStatus, errorThrown) {
1185 var prefix = "Error while editing this comment.\n"
1186 var prefix = "Error while editing this comment.\n"
1186 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1187 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1187 ajaxErrorSwal(message);
1188 ajaxErrorSwal(message);
1188 };
1189 };
1189
1190
1190 var successRenderCommit = function(o){
1191 var successRenderCommit = function(o){
1191 $comment.show();
1192 $comment.show();
1192 $comment[0].lastElementChild.innerHTML = o;
1193 $comment[0].lastElementChild.innerHTML = o;
1193 };
1194 };
1194
1195
1195 var previewUrl = pyroutes.url(
1196 var previewUrl = pyroutes.url(
1196 'repo_commit_comment_preview',
1197 'repo_commit_comment_preview',
1197 {'repo_name': templateContext.repo_name,
1198 {'repo_name': templateContext.repo_name,
1198 'commit_id': templateContext.commit_data.commit_id});
1199 'commit_id': templateContext.commit_data.commit_id});
1199
1200
1200 _submitAjaxPOST(
1201 _submitAjaxPOST(
1201 previewUrl, postData, successRenderCommit, failRenderCommit
1202 previewUrl, postData, successRenderCommit, failRenderCommit
1202 );
1203 );
1203
1204
1204 try {
1205 try {
1205 var html = json_data.rendered_text;
1206 var html = json_data.rendered_text;
1206 var lineno = json_data.line_no;
1207 var lineno = json_data.line_no;
1207 var target_id = json_data.target_id;
1208 var target_id = json_data.target_id;
1208
1209
1209 $comments.find('.cb-comment-add-button').before(html);
1210 $comments.find('.cb-comment-add-button').before(html);
1210
1211
1211 // run global callback on submit
1212 // run global callback on submit
1212 commentForm.globalSubmitSuccessCallback({draft: isDraft, comment_id: comment_id});
1213 commentForm.globalSubmitSuccessCallback({draft: isDraft, comment_id: comment_id});
1213
1214
1214 } catch (e) {
1215 } catch (e) {
1215 console.error(e);
1216 console.error(e);
1216 }
1217 }
1217
1218
1218 // re trigger the linkification of next/prev navigation
1219 // re trigger the linkification of next/prev navigation
1219 linkifyComments($('.inline-comment-injected'));
1220 linkifyComments($('.inline-comment-injected'));
1220 timeagoActivate();
1221 timeagoActivate();
1221 tooltipActivate();
1222 tooltipActivate();
1222
1223
1223 if (window.updateSticky !== undefined) {
1224 if (window.updateSticky !== undefined) {
1224 // potentially our comments change the active window size, so we
1225 // potentially our comments change the active window size, so we
1225 // notify sticky elements
1226 // notify sticky elements
1226 updateSticky()
1227 updateSticky()
1227 }
1228 }
1228
1229
1229 if (window.refreshAllComments !== undefined && !isDraft) {
1230 if (window.refreshAllComments !== undefined && !isDraft) {
1230 // if we have this handler, run it, and refresh all comments boxes
1231 // if we have this handler, run it, and refresh all comments boxes
1231 refreshAllComments()
1232 refreshAllComments()
1232 }
1233 }
1233 else if (window.refreshDraftComments !== undefined && isDraft) {
1234 else if (window.refreshDraftComments !== undefined && isDraft) {
1234 // if we have this handler, run it, and refresh all comments boxes
1235 // if we have this handler, run it, and refresh all comments boxes
1235 refreshDraftComments();
1236 refreshDraftComments();
1236 }
1237 }
1237
1238
1238 commentForm.setActionButtonsDisabled(false);
1239 commentForm.setActionButtonsDisabled(false);
1239
1240
1240 };
1241 };
1241
1242
1242 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
1243 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
1243 var prefix = "Error while editing comment.\n"
1244 var prefix = "Error while editing comment.\n"
1244 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1245 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1245 if (jqXHR.status == 409){
1246 if (jqXHR.status == 409){
1246 message = 'This comment was probably changed somewhere else. Please reload the content of this comment.'
1247 message = 'This comment was probably changed somewhere else. Please reload the content of this comment.'
1247 ajaxErrorSwal(message, 'Comment version mismatch.');
1248 ajaxErrorSwal(message, 'Comment version mismatch.');
1248 } else {
1249 } else {
1249 ajaxErrorSwal(message);
1250 ajaxErrorSwal(message);
1250 }
1251 }
1251
1252
1252 commentForm.resetCommentFormState(text)
1253 commentForm.resetCommentFormState(text)
1253 };
1254 };
1254 commentForm.submitAjaxPOST(
1255 commentForm.submitAjaxPOST(
1255 commentForm.submitUrl, postData,
1256 commentForm.submitUrl, postData,
1256 submitSuccessCallback,
1257 submitSuccessCallback,
1257 submitFailCallback);
1258 submitFailCallback);
1258 });
1259 });
1259 }
1260 }
1260
1261
1261 $editForm.addClass('comment-inline-form-open');
1262 $editForm.addClass('comment-inline-form-open');
1262 };
1263 };
1263
1264
1264 this.attachComment = function(json_data) {
1265 this.attachComment = function(json_data) {
1265 var self = this;
1266 var self = this;
1266 $.each(json_data, function(idx, val) {
1267 $.each(json_data, function(idx, val) {
1267 var json_data_elem = [val]
1268 var json_data_elem = [val]
1268 var isInline = val.comment_f_path && val.comment_lineno
1269 var isInline = val.comment_f_path && val.comment_lineno
1269
1270
1270 if (isInline) {
1271 if (isInline) {
1271 self.attachInlineComment(json_data_elem)
1272 self.attachInlineComment(json_data_elem)
1272 } else {
1273 } else {
1273 self.attachGeneralComment(json_data_elem)
1274 self.attachGeneralComment(json_data_elem)
1274 }
1275 }
1275 })
1276 })
1276
1277
1277 }
1278 }
1278
1279
1279 this.attachGeneralComment = function(json_data) {
1280 this.attachGeneralComment = function(json_data) {
1280 $.each(json_data, function(idx, val) {
1281 $.each(json_data, function(idx, val) {
1281 $('#injected_page_comments').append(val.rendered_text);
1282 $('#injected_page_comments').append(val.rendered_text);
1282 })
1283 })
1283 }
1284 }
1284
1285
1285 this.attachInlineComment = function(json_data) {
1286 this.attachInlineComment = function(json_data) {
1286
1287
1287 $.each(json_data, function (idx, val) {
1288 $.each(json_data, function (idx, val) {
1288 var line_qry = '*[data-line-no="{0}"]'.format(val.line_no);
1289 var line_qry = '*[data-line-no="{0}"]'.format(val.line_no);
1289 var html = val.rendered_text;
1290 var html = val.rendered_text;
1290 var $inlineComments = $('#' + val.target_id)
1291 var $inlineComments = $('#' + val.target_id)
1291 .find(line_qry)
1292 .find(line_qry)
1292 .find('.inline-comments');
1293 .find('.inline-comments');
1293
1294
1294 var lastComment = $inlineComments.find('.comment-inline').last();
1295 var lastComment = $inlineComments.find('.comment-inline').last();
1295
1296
1296 if (lastComment.length === 0) {
1297 if (lastComment.length === 0) {
1297 // first comment, we append simply
1298 // first comment, we append simply
1298 $inlineComments.find('.reply-thread-container-wrapper').before(html);
1299 $inlineComments.find('.reply-thread-container-wrapper').before(html);
1299 } else {
1300 } else {
1300 $(lastComment).after(html)
1301 $(lastComment).after(html)
1301 }
1302 }
1302
1303
1303 })
1304 })
1304
1305
1305 };
1306 };
1306
1307
1307 this.createNewFormWrapper = function(f_path, line_no) {
1308 this.createNewFormWrapper = function(f_path, line_no) {
1308 // create a new reply HTML form from template
1309 // create a new reply HTML form from template
1309 var tmpl = $('#cb-comment-inline-form-template').html();
1310 var tmpl = $('#cb-comment-inline-form-template').html();
1310 tmpl = tmpl.format(escapeHtml(f_path), line_no);
1311 tmpl = tmpl.format(escapeHtml(f_path), line_no);
1311 return $(tmpl);
1312 return $(tmpl);
1312 }
1313 }
1313
1314
1314 this.markCommentResolved = function(commentId) {
1315 this.markCommentResolved = function(commentId) {
1315 $('#comment-label-{0}'.format(commentId)).find('.resolved').show();
1316 $('#comment-label-{0}'.format(commentId)).find('.resolved').show();
1316 $('#comment-label-{0}'.format(commentId)).find('.resolve').hide();
1317 $('#comment-label-{0}'.format(commentId)).find('.resolve').hide();
1317 };
1318 };
1318
1319
1319 this.createComment = function(node, f_path, line_no, resolutionComment) {
1320 this.createComment = function(node, f_path, line_no, resolutionComment) {
1320 self.edit = false;
1321 self.edit = false;
1321 var $node = $(node);
1322 var $node = $(node);
1322 var $td = $node.closest('td');
1323 var $td = $node.closest('td');
1323 var resolvesCommentId = resolutionComment || null;
1324 var resolvesCommentId = resolutionComment || null;
1324
1325
1325 var $replyForm = $td.find('.comment-inline-form');
1326 var $replyForm = $td.find('.comment-inline-form');
1326
1327
1327 // if form isn't existing, we're generating a new one and injecting it.
1328 // if form isn't existing, we're generating a new one and injecting it.
1328 if ($replyForm.length === 0) {
1329 if ($replyForm.length === 0) {
1329
1330
1330 // unhide/expand all comments if they are hidden for a proper REPLY mode
1331 // unhide/expand all comments if they are hidden for a proper REPLY mode
1331 self.toggleLineComments($node, true);
1332 self.toggleLineComments($node, true);
1332
1333
1333 $replyForm = self.createNewFormWrapper(f_path, line_no);
1334 $replyForm = self.createNewFormWrapper(f_path, line_no);
1334
1335
1335 var $comments = $td.find('.inline-comments');
1336 var $comments = $td.find('.inline-comments');
1336
1337
1337 // There aren't any comments, we init the `.inline-comments` with `reply-thread-container` first
1338 // There aren't any comments, we init the `.inline-comments` with `reply-thread-container` first
1338 if ($comments.length===0) {
1339 if ($comments.length===0) {
1339 var replBtn = '<button class="cb-comment-add-button" onclick="return Rhodecode.comments.createComment(this, \'{0}\', \'{1}\', null)">Reply...</button>'.format(escapeHtml(f_path), line_no)
1340 var replBtn = '<button class="cb-comment-add-button" onclick="return Rhodecode.comments.createComment(this, \'{0}\', \'{1}\', null)">Reply...</button>'.format(escapeHtml(f_path), line_no)
1340 var $reply_container = $('#cb-comments-inline-container-template')
1341 var $reply_container = $('#cb-comments-inline-container-template')
1341 $reply_container.find('button.cb-comment-add-button').replaceWith(replBtn);
1342 $reply_container.find('button.cb-comment-add-button').replaceWith(replBtn);
1342 $td.append($($reply_container).html());
1343 $td.append($($reply_container).html());
1343 }
1344 }
1344
1345
1345 // default comment button exists, so we prepend the form for leaving initial comment
1346 // default comment button exists, so we prepend the form for leaving initial comment
1346 $td.find('.cb-comment-add-button').before($replyForm);
1347 $td.find('.cb-comment-add-button').before($replyForm);
1347 // set marker, that we have a open form
1348 // set marker, that we have a open form
1348 var $replyWrapper = $td.find('.reply-thread-container-wrapper')
1349 var $replyWrapper = $td.find('.reply-thread-container-wrapper')
1349 $replyWrapper.addClass('comment-form-active');
1350 $replyWrapper.addClass('comment-form-active');
1350
1351
1351 var lastComment = $comments.find('.comment-inline').last();
1352 var lastComment = $comments.find('.comment-inline').last();
1352 if ($(lastComment).hasClass('comment-outdated')) {
1353 if ($(lastComment).hasClass('comment-outdated')) {
1353 $replyWrapper.show();
1354 $replyWrapper.show();
1354 }
1355 }
1355
1356
1356 var _form = $($replyForm[0]).find('form');
1357 var _form = $($replyForm[0]).find('form');
1357 var autocompleteActions = ['as_note', 'as_todo'];
1358 var autocompleteActions = ['as_note', 'as_todo'];
1358 var comment_id=null;
1359 var comment_id=null;
1359 var placeholderText = _gettext('Leave a comment on file {0} line {1}.').format(f_path, line_no);
1360 var placeholderText = _gettext('Leave a comment on file {0} line {1}.').format(f_path, line_no);
1360 var commentForm = self.createCommentForm(
1361 var commentForm = self.createCommentForm(
1361 _form, line_no, placeholderText, autocompleteActions, resolvesCommentId,
1362 _form, line_no, placeholderText, autocompleteActions, resolvesCommentId,
1362 self.edit, comment_id);
1363 self.edit, comment_id);
1363
1364
1364 // set a CUSTOM submit handler for inline comments.
1365 // set a CUSTOM submit handler for inline comments.
1365 commentForm.setHandleFormSubmit(function(o) {
1366 commentForm.setHandleFormSubmit(function(o) {
1366 var text = commentForm.cm.getValue();
1367 var text = commentForm.cm.getValue();
1367 var commentType = commentForm.getCommentType();
1368 var commentType = commentForm.getCommentType();
1368 var resolvesCommentId = commentForm.getResolvesId();
1369 var resolvesCommentId = commentForm.getResolvesId();
1369 var isDraft = commentForm.getDraftState();
1370 var isDraft = commentForm.getDraftState();
1370
1371
1371 if (text === "") {
1372 if (text === "") {
1372 return;
1373 return;
1373 }
1374 }
1374
1375
1375 if (line_no === undefined) {
1376 if (line_no === undefined) {
1376 alert('Error: unable to fetch line number for this inline comment !');
1377 alert('Error: unable to fetch line number for this inline comment !');
1377 return;
1378 return;
1378 }
1379 }
1379
1380
1380 if (f_path === undefined) {
1381 if (f_path === undefined) {
1381 alert('Error: unable to fetch file path for this inline comment !');
1382 alert('Error: unable to fetch file path for this inline comment !');
1382 return;
1383 return;
1383 }
1384 }
1384
1385
1385 var excludeCancelBtn = false;
1386 var excludeCancelBtn = false;
1386 var submitEvent = true;
1387 var submitEvent = true;
1387 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
1388 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
1388 commentForm.cm.setOption("readOnly", true);
1389 commentForm.cm.setOption("readOnly", true);
1389 var postData = {
1390 var postData = {
1390 'text': text,
1391 'text': text,
1391 'f_path': f_path,
1392 'f_path': f_path,
1392 'line': line_no,
1393 'line': line_no,
1393 'comment_type': commentType,
1394 'comment_type': commentType,
1394 'draft': isDraft,
1395 'draft': isDraft,
1395 'csrf_token': CSRF_TOKEN
1396 'csrf_token': CSRF_TOKEN
1396 };
1397 };
1397 if (resolvesCommentId){
1398 if (resolvesCommentId){
1398 postData['resolves_comment_id'] = resolvesCommentId;
1399 postData['resolves_comment_id'] = resolvesCommentId;
1399 }
1400 }
1400
1401
1401 // submitSuccess for inline commits
1402 // submitSuccess for inline commits
1402 var submitSuccessCallback = function(json_data) {
1403 var submitSuccessCallback = function(json_data) {
1403
1404
1404 $replyForm.remove();
1405 $replyForm.remove();
1405 $td.find('.reply-thread-container-wrapper').removeClass('comment-form-active');
1406 $td.find('.reply-thread-container-wrapper').removeClass('comment-form-active');
1406
1407
1407 try {
1408 try {
1408
1409
1409 // inject newly created comments, json_data is {<comment_id>: {}}
1410 // inject newly created comments, json_data is {<comment_id>: {}}
1410 self.attachInlineComment(json_data)
1411 self.attachInlineComment(json_data)
1411
1412
1412 //mark visually which comment was resolved
1413 //mark visually which comment was resolved
1413 if (resolvesCommentId) {
1414 if (resolvesCommentId) {
1414 self.markCommentResolved(resolvesCommentId);
1415 self.markCommentResolved(resolvesCommentId);
1415 }
1416 }
1416
1417
1417 // run global callback on submit
1418 // run global callback on submit
1418 commentForm.globalSubmitSuccessCallback({
1419 commentForm.globalSubmitSuccessCallback({
1419 draft: isDraft,
1420 draft: isDraft,
1420 comment_id: comment_id
1421 comment_id: comment_id
1421 });
1422 });
1422
1423
1423 } catch (e) {
1424 } catch (e) {
1424 console.error(e);
1425 console.error(e);
1425 }
1426 }
1426
1427
1427 if (window.updateSticky !== undefined) {
1428 if (window.updateSticky !== undefined) {
1428 // potentially our comments change the active window size, so we
1429 // potentially our comments change the active window size, so we
1429 // notify sticky elements
1430 // notify sticky elements
1430 updateSticky()
1431 updateSticky()
1431 }
1432 }
1432
1433
1433 if (window.refreshAllComments !== undefined && !isDraft) {
1434 if (window.refreshAllComments !== undefined && !isDraft) {
1434 // if we have this handler, run it, and refresh all comments boxes
1435 // if we have this handler, run it, and refresh all comments boxes
1435 refreshAllComments()
1436 refreshAllComments()
1436 }
1437 }
1437 else if (window.refreshDraftComments !== undefined && isDraft) {
1438 else if (window.refreshDraftComments !== undefined && isDraft) {
1438 // if we have this handler, run it, and refresh all comments boxes
1439 // if we have this handler, run it, and refresh all comments boxes
1439 refreshDraftComments();
1440 refreshDraftComments();
1440 }
1441 }
1441
1442
1442 commentForm.setActionButtonsDisabled(false);
1443 commentForm.setActionButtonsDisabled(false);
1443
1444
1444 // re trigger the linkification of next/prev navigation
1445 // re trigger the linkification of next/prev navigation
1445 linkifyComments($('.inline-comment-injected'));
1446 linkifyComments($('.inline-comment-injected'));
1446 timeagoActivate();
1447 timeagoActivate();
1447 tooltipActivate();
1448 tooltipActivate();
1448 };
1449 };
1449
1450
1450 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
1451 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
1451 var prefix = "Error while submitting comment.\n"
1452 var prefix = "Error while submitting comment.\n"
1452 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1453 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1453 ajaxErrorSwal(message);
1454 ajaxErrorSwal(message);
1454 commentForm.resetCommentFormState(text)
1455 commentForm.resetCommentFormState(text)
1455 };
1456 };
1456
1457
1457 commentForm.submitAjaxPOST(
1458 commentForm.submitAjaxPOST(
1458 commentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
1459 commentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
1459 });
1460 });
1460 }
1461 }
1461
1462
1462 // Finally "open" our reply form, since we know there are comments and we have the "attached" old form
1463 // Finally "open" our reply form, since we know there are comments and we have the "attached" old form
1463 $replyForm.addClass('comment-inline-form-open');
1464 $replyForm.addClass('comment-inline-form-open');
1464 tooltipActivate();
1465 tooltipActivate();
1465 };
1466 };
1466
1467
1467 this.createResolutionComment = function(commentId){
1468 this.createResolutionComment = function(commentId){
1468 // hide the trigger text
1469 // hide the trigger text
1469 $('#resolve-comment-{0}'.format(commentId)).hide();
1470 $('#resolve-comment-{0}'.format(commentId)).hide();
1470
1471
1471 var comment = $('#comment-'+commentId);
1472 var comment = $('#comment-'+commentId);
1472 var commentData = comment.data();
1473 var commentData = comment.data();
1473
1474
1474 if (commentData.commentInline) {
1475 if (commentData.commentInline) {
1475 var f_path = commentData.commentFPath;
1476 var f_path = commentData.commentFPath;
1476 var line_no = commentData.commentLineNo;
1477 var line_no = commentData.commentLineNo;
1477 this.createComment(comment, f_path, line_no, commentId)
1478 this.createComment(comment, f_path, line_no, commentId)
1478 } else {
1479 } else {
1479 this.createGeneralComment('general', "$placeholder", commentId)
1480 this.createGeneralComment('general', "$placeholder", commentId)
1480 }
1481 }
1481
1482
1482 return false;
1483 return false;
1483 };
1484 };
1484
1485
1485 this.submitResolution = function(commentId){
1486 this.submitResolution = function(commentId){
1486 var form = $('#resolve_comment_{0}'.format(commentId)).closest('form');
1487 var form = $('#resolve_comment_{0}'.format(commentId)).closest('form');
1487 var commentForm = form.get(0).CommentForm;
1488 var commentForm = form.get(0).CommentForm;
1488
1489
1489 var cm = commentForm.getCmInstance();
1490 var cm = commentForm.getCmInstance();
1490 var renderer = templateContext.visual.default_renderer;
1491 var renderer = templateContext.visual.default_renderer;
1491 if (renderer == 'rst'){
1492 if (renderer == 'rst'){
1492 var commentUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentForm.selfUrl);
1493 var commentUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentForm.selfUrl);
1493 } else if (renderer == 'markdown') {
1494 } else if (renderer == 'markdown') {
1494 var commentUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentForm.selfUrl);
1495 var commentUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentForm.selfUrl);
1495 } else {
1496 } else {
1496 var commentUrl = '{1}#comment-{0}'.format(commentId, commentForm.selfUrl);
1497 var commentUrl = '{1}#comment-{0}'.format(commentId, commentForm.selfUrl);
1497 }
1498 }
1498
1499
1499 cm.setValue(_gettext('TODO from comment {0} was fixed.').format(commentUrl));
1500 cm.setValue(_gettext('TODO from comment {0} was fixed.').format(commentUrl));
1500 form.submit();
1501 form.submit();
1501 return false;
1502 return false;
1502 };
1503 };
1503
1504
1504 this.resolveTodo = function (elem, todoId) {
1505 this.resolveTodo = function (elem, todoId) {
1505 var commentId = todoId;
1506 var commentId = todoId;
1506
1507
1507 SwalNoAnimation.fire({
1508 SwalNoAnimation.fire({
1508 title: 'Resolve TODO {0}'.format(todoId),
1509 title: 'Resolve TODO {0}'.format(todoId),
1509 showCancelButton: true,
1510 showCancelButton: true,
1510 confirmButtonText: _gettext('Yes'),
1511 confirmButtonText: _gettext('Yes'),
1511 showLoaderOnConfirm: true,
1512 showLoaderOnConfirm: true,
1512
1513
1513 allowOutsideClick: function () {
1514 allowOutsideClick: function () {
1514 !Swal.isLoading()
1515 !Swal.isLoading()
1515 },
1516 },
1516 preConfirm: function () {
1517 preConfirm: function () {
1517 var comment = $('#comment-' + commentId);
1518 var comment = $('#comment-' + commentId);
1518 var commentData = comment.data();
1519 var commentData = comment.data();
1519
1520
1520 var f_path = null
1521 var f_path = null
1521 var line_no = null
1522 var line_no = null
1522 if (commentData.commentInline) {
1523 if (commentData.commentInline) {
1523 f_path = commentData.commentFPath;
1524 f_path = commentData.commentFPath;
1524 line_no = commentData.commentLineNo;
1525 line_no = commentData.commentLineNo;
1525 }
1526 }
1526
1527
1527 var renderer = templateContext.visual.default_renderer;
1528 var renderer = templateContext.visual.default_renderer;
1528 var commentBoxUrl = '{1}#comment-{0}'.format(commentId);
1529 var commentBoxUrl = '{1}#comment-{0}'.format(commentId);
1529
1530
1530 // Pull request case
1531 // Pull request case
1531 if (templateContext.pull_request_data.pull_request_id !== null) {
1532 if (templateContext.pull_request_data.pull_request_id !== null) {
1532 var commentUrl = pyroutes.url('pullrequest_comment_create',
1533 var commentUrl = pyroutes.url('pullrequest_comment_create',
1533 {
1534 {
1534 'repo_name': templateContext.repo_name,
1535 'repo_name': templateContext.repo_name,
1535 'pull_request_id': templateContext.pull_request_data.pull_request_id,
1536 'pull_request_id': templateContext.pull_request_data.pull_request_id,
1536 'comment_id': commentId
1537 'comment_id': commentId
1537 });
1538 });
1538 } else {
1539 } else {
1539 var commentUrl = pyroutes.url('repo_commit_comment_create',
1540 var commentUrl = pyroutes.url('repo_commit_comment_create',
1540 {
1541 {
1541 'repo_name': templateContext.repo_name,
1542 'repo_name': templateContext.repo_name,
1542 'commit_id': templateContext.commit_data.commit_id,
1543 'commit_id': templateContext.commit_data.commit_id,
1543 'comment_id': commentId
1544 'comment_id': commentId
1544 });
1545 });
1545 }
1546 }
1546
1547
1547 if (renderer === 'rst') {
1548 if (renderer === 'rst') {
1548 commentBoxUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentUrl);
1549 commentBoxUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentUrl);
1549 } else if (renderer === 'markdown') {
1550 } else if (renderer === 'markdown') {
1550 commentBoxUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentUrl);
1551 commentBoxUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentUrl);
1551 }
1552 }
1552 var resolveText = _gettext('TODO from comment {0} was fixed.').format(commentBoxUrl);
1553 var resolveText = _gettext('TODO from comment {0} was fixed.').format(commentBoxUrl);
1553
1554
1554 var postData = {
1555 var postData = {
1555 text: resolveText,
1556 text: resolveText,
1556 comment_type: 'note',
1557 comment_type: 'note',
1557 draft: false,
1558 draft: false,
1558 csrf_token: CSRF_TOKEN,
1559 csrf_token: CSRF_TOKEN,
1559 resolves_comment_id: commentId
1560 resolves_comment_id: commentId
1560 }
1561 }
1561 if (commentData.commentInline) {
1562 if (commentData.commentInline) {
1562 postData['f_path'] = f_path;
1563 postData['f_path'] = f_path;
1563 postData['line'] = line_no;
1564 postData['line'] = line_no;
1564 }
1565 }
1565
1566
1566 return new Promise(function (resolve, reject) {
1567 return new Promise(function (resolve, reject) {
1567 $.ajax({
1568 $.ajax({
1568 type: 'POST',
1569 type: 'POST',
1569 data: postData,
1570 data: postData,
1570 url: commentUrl,
1571 url: commentUrl,
1571 headers: {'X-PARTIAL-XHR': true}
1572 headers: {'X-PARTIAL-XHR': true}
1572 })
1573 })
1573 .done(function (data) {
1574 .done(function (data) {
1574 resolve(data);
1575 resolve(data);
1575 })
1576 })
1576 .fail(function (jqXHR, textStatus, errorThrown) {
1577 .fail(function (jqXHR, textStatus, errorThrown) {
1577 var prefix = "Error while resolving TODO.\n"
1578 var prefix = "Error while resolving TODO.\n"
1578 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1579 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1579 ajaxErrorSwal(message);
1580 ajaxErrorSwal(message);
1580 });
1581 });
1581 })
1582 })
1582 }
1583 }
1583
1584
1584 })
1585 })
1585 .then(function (result) {
1586 .then(function (result) {
1586 var success = function (json_data) {
1587 var success = function (json_data) {
1587 resolvesCommentId = commentId;
1588 resolvesCommentId = commentId;
1588 var commentResolved = json_data[Object.keys(json_data)[0]]
1589 var commentResolved = json_data[Object.keys(json_data)[0]]
1589
1590
1590 try {
1591 try {
1591
1592
1592 if (commentResolved.f_path) {
1593 if (commentResolved.f_path) {
1593 // inject newly created comments, json_data is {<comment_id>: {}}
1594 // inject newly created comments, json_data is {<comment_id>: {}}
1594 self.attachInlineComment(json_data)
1595 self.attachInlineComment(json_data)
1595 } else {
1596 } else {
1596 self.attachGeneralComment(json_data)
1597 self.attachGeneralComment(json_data)
1597 }
1598 }
1598
1599
1599 //mark visually which comment was resolved
1600 //mark visually which comment was resolved
1600 if (resolvesCommentId) {
1601 if (resolvesCommentId) {
1601 self.markCommentResolved(resolvesCommentId);
1602 self.markCommentResolved(resolvesCommentId);
1602 }
1603 }
1603
1604
1604 // run global callback on submit
1605 // run global callback on submit
1605 if (window.commentFormGlobalSubmitSuccessCallback !== undefined) {
1606 if (window.commentFormGlobalSubmitSuccessCallback !== undefined) {
1606 commentFormGlobalSubmitSuccessCallback({
1607 commentFormGlobalSubmitSuccessCallback({
1607 draft: false,
1608 draft: false,
1608 comment_id: commentId
1609 comment_id: commentId
1609 });
1610 });
1610 }
1611 }
1611
1612
1612 } catch (e) {
1613 } catch (e) {
1613 console.error(e);
1614 console.error(e);
1614 }
1615 }
1615
1616
1616 if (window.updateSticky !== undefined) {
1617 if (window.updateSticky !== undefined) {
1617 // potentially our comments change the active window size, so we
1618 // potentially our comments change the active window size, so we
1618 // notify sticky elements
1619 // notify sticky elements
1619 updateSticky()
1620 updateSticky()
1620 }
1621 }
1621
1622
1622 if (window.refreshAllComments !== undefined) {
1623 if (window.refreshAllComments !== undefined) {
1623 // if we have this handler, run it, and refresh all comments boxes
1624 // if we have this handler, run it, and refresh all comments boxes
1624 refreshAllComments()
1625 refreshAllComments()
1625 }
1626 }
1626 // re trigger the linkification of next/prev navigation
1627 // re trigger the linkification of next/prev navigation
1627 linkifyComments($('.inline-comment-injected'));
1628 linkifyComments($('.inline-comment-injected'));
1628 timeagoActivate();
1629 timeagoActivate();
1629 tooltipActivate();
1630 tooltipActivate();
1630 };
1631 };
1631
1632
1632 if (result.value) {
1633 if (result.value) {
1633 $(elem).remove();
1634 $(elem).remove();
1634 success(result.value)
1635 success(result.value)
1635 }
1636 }
1636 })
1637 })
1637 };
1638 };
1638
1639
1639 };
1640 };
1640
1641
1641 window.commentHelp = function(renderer) {
1642 window.commentHelp = function(renderer) {
1642 var funcData = {'renderer': renderer}
1643 var funcData = {'renderer': renderer}
1643 return renderTemplate('commentHelpHovercard', funcData)
1644 return renderTemplate('commentHelpHovercard', funcData)
1644 }
1645 }
General Comments 0
You need to be logged in to leave comments. Login now