Show More
@@ -1,619 +1,620 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Database creation, and setup module for RhodeCode Enterprise. Used for creation |
|
23 | 23 | of database as well as for migration operations |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import sys |
|
28 | 28 | import time |
|
29 | 29 | import uuid |
|
30 | 30 | import logging |
|
31 | 31 | import getpass |
|
32 | 32 | from os.path import dirname as dn, join as jn |
|
33 | 33 | |
|
34 | 34 | from sqlalchemy.engine import create_engine |
|
35 | 35 | |
|
36 | 36 | from rhodecode import __dbversion__ |
|
37 | 37 | from rhodecode.model import init_model |
|
38 | 38 | from rhodecode.model.user import UserModel |
|
39 | 39 | from rhodecode.model.db import ( |
|
40 | 40 | User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm, |
|
41 | 41 | DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository) |
|
42 | 42 | from rhodecode.model.meta import Session, Base |
|
43 | 43 | from rhodecode.model.permission import PermissionModel |
|
44 | 44 | from rhodecode.model.repo import RepoModel |
|
45 | 45 | from rhodecode.model.repo_group import RepoGroupModel |
|
46 | 46 | from rhodecode.model.settings import SettingsModel |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def notify(msg): |
|
53 | 53 | """ |
|
54 | 54 | Notification for migrations messages |
|
55 | 55 | """ |
|
56 | 56 | ml = len(msg) + (4 * 2) |
|
57 | 57 | print('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper() |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | class DbManage(object): |
|
61 | 61 | |
|
62 | 62 | def __init__(self, log_sql, dbconf, root, tests=False, |
|
63 | 63 | SESSION=None, cli_args={}): |
|
64 | 64 | self.dbname = dbconf.split('/')[-1] |
|
65 | 65 | self.tests = tests |
|
66 | 66 | self.root = root |
|
67 | 67 | self.dburi = dbconf |
|
68 | 68 | self.log_sql = log_sql |
|
69 | 69 | self.db_exists = False |
|
70 | 70 | self.cli_args = cli_args |
|
71 | 71 | self.init_db(SESSION=SESSION) |
|
72 | 72 | self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask')) |
|
73 | 73 | |
|
74 | 74 | def get_ask_ok_func(self, param): |
|
75 | 75 | if param not in [None]: |
|
76 | 76 | # return a function lambda that has a default set to param |
|
77 | 77 | return lambda *args, **kwargs: param |
|
78 | 78 | else: |
|
79 | 79 | from rhodecode.lib.utils import ask_ok |
|
80 | 80 | return ask_ok |
|
81 | 81 | |
|
82 | 82 | def init_db(self, SESSION=None): |
|
83 | 83 | if SESSION: |
|
84 | 84 | self.sa = SESSION |
|
85 | 85 | else: |
|
86 | 86 | # init new sessions |
|
87 | 87 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
88 | 88 | init_model(engine) |
|
89 | 89 | self.sa = Session() |
|
90 | 90 | |
|
91 | 91 | def create_tables(self, override=False): |
|
92 | 92 | """ |
|
93 | 93 | Create a auth database |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | log.info("Existing database with the same name is going to be destroyed.") |
|
97 | 97 | log.info("Setup command will run DROP ALL command on that database.") |
|
98 | 98 | if self.tests: |
|
99 | 99 | destroy = True |
|
100 | 100 | else: |
|
101 | 101 | destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]') |
|
102 | 102 | if not destroy: |
|
103 | 103 | log.info('Nothing done.') |
|
104 | 104 | sys.exit(0) |
|
105 | 105 | if destroy: |
|
106 | 106 | Base.metadata.drop_all() |
|
107 | 107 | |
|
108 | 108 | checkfirst = not override |
|
109 | 109 | Base.metadata.create_all(checkfirst=checkfirst) |
|
110 | 110 | log.info('Created tables for %s' % self.dbname) |
|
111 | 111 | |
|
112 | 112 | def set_db_version(self): |
|
113 | 113 | ver = DbMigrateVersion() |
|
114 | 114 | ver.version = __dbversion__ |
|
115 | 115 | ver.repository_id = 'rhodecode_db_migrations' |
|
116 | 116 | ver.repository_path = 'versions' |
|
117 | 117 | self.sa.add(ver) |
|
118 | 118 | log.info('db version set to: %s' % __dbversion__) |
|
119 | 119 | |
|
120 | 120 | def run_pre_migration_tasks(self): |
|
121 | 121 | """ |
|
122 | 122 | Run various tasks before actually doing migrations |
|
123 | 123 | """ |
|
124 | 124 | # delete cache keys on each upgrade |
|
125 | 125 | total = CacheKey.query().count() |
|
126 | 126 | log.info("Deleting (%s) cache keys now...", total) |
|
127 | 127 | CacheKey.delete_all_cache() |
|
128 | 128 | |
|
129 | 129 | def upgrade(self): |
|
130 | 130 | """ |
|
131 | 131 | Upgrades given database schema to given revision following |
|
132 | 132 | all needed steps, to perform the upgrade |
|
133 | 133 | |
|
134 | 134 | """ |
|
135 | 135 | |
|
136 | 136 | from rhodecode.lib.dbmigrate.migrate.versioning import api |
|
137 | 137 | from rhodecode.lib.dbmigrate.migrate.exceptions import \ |
|
138 | 138 | DatabaseNotControlledError |
|
139 | 139 | |
|
140 | 140 | if 'sqlite' in self.dburi: |
|
141 | 141 | print ( |
|
142 | 142 | '********************** WARNING **********************\n' |
|
143 | 143 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
144 | 144 | 'Earlier versions are known to fail on some migrations\n' |
|
145 | 145 | '*****************************************************\n') |
|
146 | 146 | |
|
147 | 147 | upgrade = self.ask_ok( |
|
148 | 148 | 'You are about to perform a database upgrade. Make ' |
|
149 | 149 | 'sure you have backed up your database. ' |
|
150 | 150 | 'Continue ? [y/n]') |
|
151 | 151 | if not upgrade: |
|
152 | 152 | log.info('No upgrade performed') |
|
153 | 153 | sys.exit(0) |
|
154 | 154 | |
|
155 | 155 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), |
|
156 | 156 | 'rhodecode/lib/dbmigrate') |
|
157 | 157 | db_uri = self.dburi |
|
158 | 158 | |
|
159 | 159 | try: |
|
160 | 160 | curr_version = api.db_version(db_uri, repository_path) |
|
161 | 161 | msg = ('Found current database under version ' |
|
162 | 162 | 'control with version %s' % curr_version) |
|
163 | 163 | |
|
164 | 164 | except (RuntimeError, DatabaseNotControlledError): |
|
165 | 165 | curr_version = 1 |
|
166 | 166 | msg = ('Current database is not under version control. Setting ' |
|
167 | 167 | 'as version %s' % curr_version) |
|
168 | 168 | api.version_control(db_uri, repository_path, curr_version) |
|
169 | 169 | |
|
170 | 170 | notify(msg) |
|
171 | 171 | |
|
172 | 172 | self.run_pre_migration_tasks() |
|
173 | 173 | |
|
174 | 174 | if curr_version == __dbversion__: |
|
175 | 175 | log.info('This database is already at the newest version') |
|
176 | 176 | sys.exit(0) |
|
177 | 177 | |
|
178 | 178 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
179 | 179 | notify('attempting to upgrade database from ' |
|
180 | 180 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
181 | 181 | |
|
182 | 182 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
183 | 183 | _step = None |
|
184 | 184 | for step in upgrade_steps: |
|
185 | 185 | notify('performing upgrade step %s' % step) |
|
186 | 186 | time.sleep(0.5) |
|
187 | 187 | |
|
188 | 188 | api.upgrade(db_uri, repository_path, step) |
|
189 | 189 | self.sa.rollback() |
|
190 | 190 | notify('schema upgrade for step %s completed' % (step,)) |
|
191 | 191 | |
|
192 | 192 | _step = step |
|
193 | 193 | |
|
194 | 194 | notify('upgrade to version %s successful' % _step) |
|
195 | 195 | |
|
196 | 196 | def fix_repo_paths(self): |
|
197 | 197 | """ |
|
198 | 198 | Fixes an old RhodeCode version path into new one without a '*' |
|
199 | 199 | """ |
|
200 | 200 | |
|
201 | 201 | paths = self.sa.query(RhodeCodeUi)\ |
|
202 | 202 | .filter(RhodeCodeUi.ui_key == '/')\ |
|
203 | 203 | .scalar() |
|
204 | 204 | |
|
205 | 205 | paths.ui_value = paths.ui_value.replace('*', '') |
|
206 | 206 | |
|
207 | 207 | try: |
|
208 | 208 | self.sa.add(paths) |
|
209 | 209 | self.sa.commit() |
|
210 | 210 | except Exception: |
|
211 | 211 | self.sa.rollback() |
|
212 | 212 | raise |
|
213 | 213 | |
|
214 | 214 | def fix_default_user(self): |
|
215 | 215 | """ |
|
216 | 216 | Fixes an old default user with some 'nicer' default values, |
|
217 | 217 | used mostly for anonymous access |
|
218 | 218 | """ |
|
219 | 219 | def_user = self.sa.query(User)\ |
|
220 | 220 | .filter(User.username == User.DEFAULT_USER)\ |
|
221 | 221 | .one() |
|
222 | 222 | |
|
223 | 223 | def_user.name = 'Anonymous' |
|
224 | 224 | def_user.lastname = 'User' |
|
225 | 225 | def_user.email = User.DEFAULT_USER_EMAIL |
|
226 | 226 | |
|
227 | 227 | try: |
|
228 | 228 | self.sa.add(def_user) |
|
229 | 229 | self.sa.commit() |
|
230 | 230 | except Exception: |
|
231 | 231 | self.sa.rollback() |
|
232 | 232 | raise |
|
233 | 233 | |
|
234 | 234 | def fix_settings(self): |
|
235 | 235 | """ |
|
236 | 236 | Fixes rhodecode settings and adds ga_code key for google analytics |
|
237 | 237 | """ |
|
238 | 238 | |
|
239 | 239 | hgsettings3 = RhodeCodeSetting('ga_code', '') |
|
240 | 240 | |
|
241 | 241 | try: |
|
242 | 242 | self.sa.add(hgsettings3) |
|
243 | 243 | self.sa.commit() |
|
244 | 244 | except Exception: |
|
245 | 245 | self.sa.rollback() |
|
246 | 246 | raise |
|
247 | 247 | |
|
248 | 248 | def create_admin_and_prompt(self): |
|
249 | 249 | |
|
250 | 250 | # defaults |
|
251 | 251 | defaults = self.cli_args |
|
252 | 252 | username = defaults.get('username') |
|
253 | 253 | password = defaults.get('password') |
|
254 | 254 | email = defaults.get('email') |
|
255 | 255 | |
|
256 | 256 | if username is None: |
|
257 | 257 | username = raw_input('Specify admin username:') |
|
258 | 258 | if password is None: |
|
259 | 259 | password = self._get_admin_password() |
|
260 | 260 | if not password: |
|
261 | 261 | # second try |
|
262 | 262 | password = self._get_admin_password() |
|
263 | 263 | if not password: |
|
264 | 264 | sys.exit() |
|
265 | 265 | if email is None: |
|
266 | 266 | email = raw_input('Specify admin email:') |
|
267 | 267 | api_key = self.cli_args.get('api_key') |
|
268 | 268 | self.create_user(username, password, email, True, |
|
269 | 269 | strict_creation_check=False, |
|
270 | 270 | api_key=api_key) |
|
271 | 271 | |
|
272 | 272 | def _get_admin_password(self): |
|
273 | 273 | password = getpass.getpass('Specify admin password ' |
|
274 | 274 | '(min 6 chars):') |
|
275 | 275 | confirm = getpass.getpass('Confirm password:') |
|
276 | 276 | |
|
277 | 277 | if password != confirm: |
|
278 | 278 | log.error('passwords mismatch') |
|
279 | 279 | return False |
|
280 | 280 | if len(password) < 6: |
|
281 | 281 | log.error('password is too short - use at least 6 characters') |
|
282 | 282 | return False |
|
283 | 283 | |
|
284 | 284 | return password |
|
285 | 285 | |
|
286 | 286 | def create_test_admin_and_users(self): |
|
287 | 287 | log.info('creating admin and regular test users') |
|
288 | 288 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \ |
|
289 | 289 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
290 | 290 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
291 | 291 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
292 | 292 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
293 | 293 | |
|
294 | 294 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
295 | 295 | TEST_USER_ADMIN_EMAIL, True, api_key=True) |
|
296 | 296 | |
|
297 | 297 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
298 | 298 | TEST_USER_REGULAR_EMAIL, False, api_key=True) |
|
299 | 299 | |
|
300 | 300 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
301 | 301 | TEST_USER_REGULAR2_EMAIL, False, api_key=True) |
|
302 | 302 | |
|
303 | 303 | def create_ui_settings(self, repo_store_path): |
|
304 | 304 | """ |
|
305 | 305 | Creates ui settings, fills out hooks |
|
306 | 306 | and disables dotencode |
|
307 | 307 | """ |
|
308 | 308 | settings_model = SettingsModel(sa=self.sa) |
|
309 | 309 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
310 | 310 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
311 | 311 | |
|
312 | 312 | # Build HOOKS |
|
313 | 313 | hooks = [ |
|
314 | 314 | (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'), |
|
315 | 315 | |
|
316 | 316 | # HG |
|
317 | 317 | (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'), |
|
318 | 318 | (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'), |
|
319 | 319 | (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
320 | 320 | (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'), |
|
321 | 321 | (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'), |
|
322 | (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'), | |
|
322 | 323 | |
|
323 | 324 | ] |
|
324 | 325 | |
|
325 | 326 | for key, value in hooks: |
|
326 | 327 | hook_obj = settings_model.get_ui_by_key(key) |
|
327 | 328 | hooks2 = hook_obj if hook_obj else RhodeCodeUi() |
|
328 | 329 | hooks2.ui_section = 'hooks' |
|
329 | 330 | hooks2.ui_key = key |
|
330 | 331 | hooks2.ui_value = value |
|
331 | 332 | self.sa.add(hooks2) |
|
332 | 333 | |
|
333 | 334 | # enable largefiles |
|
334 | 335 | largefiles = RhodeCodeUi() |
|
335 | 336 | largefiles.ui_section = 'extensions' |
|
336 | 337 | largefiles.ui_key = 'largefiles' |
|
337 | 338 | largefiles.ui_value = '' |
|
338 | 339 | self.sa.add(largefiles) |
|
339 | 340 | |
|
340 | 341 | # set default largefiles cache dir, defaults to |
|
341 | 342 | # /repo_store_location/.cache/largefiles |
|
342 | 343 | largefiles = RhodeCodeUi() |
|
343 | 344 | largefiles.ui_section = 'largefiles' |
|
344 | 345 | largefiles.ui_key = 'usercache' |
|
345 | 346 | largefiles.ui_value = largefiles_store(repo_store_path) |
|
346 | 347 | |
|
347 | 348 | self.sa.add(largefiles) |
|
348 | 349 | |
|
349 | 350 | # set default lfs cache dir, defaults to |
|
350 | 351 | # /repo_store_location/.cache/lfs_store |
|
351 | 352 | lfsstore = RhodeCodeUi() |
|
352 | 353 | lfsstore.ui_section = 'vcs_git_lfs' |
|
353 | 354 | lfsstore.ui_key = 'store_location' |
|
354 | 355 | lfsstore.ui_value = lfs_store(repo_store_path) |
|
355 | 356 | |
|
356 | 357 | self.sa.add(lfsstore) |
|
357 | 358 | |
|
358 | 359 | # enable hgsubversion disabled by default |
|
359 | 360 | hgsubversion = RhodeCodeUi() |
|
360 | 361 | hgsubversion.ui_section = 'extensions' |
|
361 | 362 | hgsubversion.ui_key = 'hgsubversion' |
|
362 | 363 | hgsubversion.ui_value = '' |
|
363 | 364 | hgsubversion.ui_active = False |
|
364 | 365 | self.sa.add(hgsubversion) |
|
365 | 366 | |
|
366 | 367 | # enable hgevolve disabled by default |
|
367 | 368 | hgevolve = RhodeCodeUi() |
|
368 | 369 | hgevolve.ui_section = 'extensions' |
|
369 | 370 | hgevolve.ui_key = 'evolve' |
|
370 | 371 | hgevolve.ui_value = '' |
|
371 | 372 | hgevolve.ui_active = False |
|
372 | 373 | self.sa.add(hgevolve) |
|
373 | 374 | |
|
374 | 375 | # enable hggit disabled by default |
|
375 | 376 | hggit = RhodeCodeUi() |
|
376 | 377 | hggit.ui_section = 'extensions' |
|
377 | 378 | hggit.ui_key = 'hggit' |
|
378 | 379 | hggit.ui_value = '' |
|
379 | 380 | hggit.ui_active = False |
|
380 | 381 | self.sa.add(hggit) |
|
381 | 382 | |
|
382 | 383 | # set svn branch defaults |
|
383 | 384 | branches = ["/branches/*", "/trunk"] |
|
384 | 385 | tags = ["/tags/*"] |
|
385 | 386 | |
|
386 | 387 | for branch in branches: |
|
387 | 388 | settings_model.create_ui_section_value( |
|
388 | 389 | RhodeCodeUi.SVN_BRANCH_ID, branch) |
|
389 | 390 | |
|
390 | 391 | for tag in tags: |
|
391 | 392 | settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag) |
|
392 | 393 | |
|
393 | 394 | def create_auth_plugin_options(self, skip_existing=False): |
|
394 | 395 | """ |
|
395 | 396 | Create default auth plugin settings, and make it active |
|
396 | 397 | |
|
397 | 398 | :param skip_existing: |
|
398 | 399 | """ |
|
399 | 400 | |
|
400 | 401 | for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'), |
|
401 | 402 | ('auth_rhodecode_enabled', 'True', 'bool')]: |
|
402 | 403 | if (skip_existing and |
|
403 | 404 | SettingsModel().get_setting_by_name(k) is not None): |
|
404 | 405 | log.debug('Skipping option %s' % k) |
|
405 | 406 | continue |
|
406 | 407 | setting = RhodeCodeSetting(k, v, t) |
|
407 | 408 | self.sa.add(setting) |
|
408 | 409 | |
|
409 | 410 | def create_default_options(self, skip_existing=False): |
|
410 | 411 | """Creates default settings""" |
|
411 | 412 | |
|
412 | 413 | for k, v, t in [ |
|
413 | 414 | ('default_repo_enable_locking', False, 'bool'), |
|
414 | 415 | ('default_repo_enable_downloads', False, 'bool'), |
|
415 | 416 | ('default_repo_enable_statistics', False, 'bool'), |
|
416 | 417 | ('default_repo_private', False, 'bool'), |
|
417 | 418 | ('default_repo_type', 'hg', 'unicode')]: |
|
418 | 419 | |
|
419 | 420 | if (skip_existing and |
|
420 | 421 | SettingsModel().get_setting_by_name(k) is not None): |
|
421 | 422 | log.debug('Skipping option %s' % k) |
|
422 | 423 | continue |
|
423 | 424 | setting = RhodeCodeSetting(k, v, t) |
|
424 | 425 | self.sa.add(setting) |
|
425 | 426 | |
|
426 | 427 | def fixup_groups(self): |
|
427 | 428 | def_usr = User.get_default_user() |
|
428 | 429 | for g in RepoGroup.query().all(): |
|
429 | 430 | g.group_name = g.get_new_name(g.name) |
|
430 | 431 | self.sa.add(g) |
|
431 | 432 | # get default perm |
|
432 | 433 | default = UserRepoGroupToPerm.query()\ |
|
433 | 434 | .filter(UserRepoGroupToPerm.group == g)\ |
|
434 | 435 | .filter(UserRepoGroupToPerm.user == def_usr)\ |
|
435 | 436 | .scalar() |
|
436 | 437 | |
|
437 | 438 | if default is None: |
|
438 | 439 | log.debug('missing default permission for group %s adding' % g) |
|
439 | 440 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
440 | 441 | self.sa.add(perm_obj) |
|
441 | 442 | |
|
442 | 443 | def reset_permissions(self, username): |
|
443 | 444 | """ |
|
444 | 445 | Resets permissions to default state, useful when old systems had |
|
445 | 446 | bad permissions, we must clean them up |
|
446 | 447 | |
|
447 | 448 | :param username: |
|
448 | 449 | """ |
|
449 | 450 | default_user = User.get_by_username(username) |
|
450 | 451 | if not default_user: |
|
451 | 452 | return |
|
452 | 453 | |
|
453 | 454 | u2p = UserToPerm.query()\ |
|
454 | 455 | .filter(UserToPerm.user == default_user).all() |
|
455 | 456 | fixed = False |
|
456 | 457 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
457 | 458 | for p in u2p: |
|
458 | 459 | Session().delete(p) |
|
459 | 460 | fixed = True |
|
460 | 461 | self.populate_default_permissions() |
|
461 | 462 | return fixed |
|
462 | 463 | |
|
463 | 464 | def update_repo_info(self): |
|
464 | 465 | RepoModel.update_repoinfo() |
|
465 | 466 | |
|
466 | 467 | def config_prompt(self, test_repo_path='', retries=3): |
|
467 | 468 | defaults = self.cli_args |
|
468 | 469 | _path = defaults.get('repos_location') |
|
469 | 470 | if retries == 3: |
|
470 | 471 | log.info('Setting up repositories config') |
|
471 | 472 | |
|
472 | 473 | if _path is not None: |
|
473 | 474 | path = _path |
|
474 | 475 | elif not self.tests and not test_repo_path: |
|
475 | 476 | path = raw_input( |
|
476 | 477 | 'Enter a valid absolute path to store repositories. ' |
|
477 | 478 | 'All repositories in that path will be added automatically:' |
|
478 | 479 | ) |
|
479 | 480 | else: |
|
480 | 481 | path = test_repo_path |
|
481 | 482 | path_ok = True |
|
482 | 483 | |
|
483 | 484 | # check proper dir |
|
484 | 485 | if not os.path.isdir(path): |
|
485 | 486 | path_ok = False |
|
486 | 487 | log.error('Given path %s is not a valid directory' % (path,)) |
|
487 | 488 | |
|
488 | 489 | elif not os.path.isabs(path): |
|
489 | 490 | path_ok = False |
|
490 | 491 | log.error('Given path %s is not an absolute path' % (path,)) |
|
491 | 492 | |
|
492 | 493 | # check if path is at least readable. |
|
493 | 494 | if not os.access(path, os.R_OK): |
|
494 | 495 | path_ok = False |
|
495 | 496 | log.error('Given path %s is not readable' % (path,)) |
|
496 | 497 | |
|
497 | 498 | # check write access, warn user about non writeable paths |
|
498 | 499 | elif not os.access(path, os.W_OK) and path_ok: |
|
499 | 500 | log.warning('No write permission to given path %s' % (path,)) |
|
500 | 501 | |
|
501 | 502 | q = ('Given path %s is not writeable, do you want to ' |
|
502 | 503 | 'continue with read only mode ? [y/n]' % (path,)) |
|
503 | 504 | if not self.ask_ok(q): |
|
504 | 505 | log.error('Canceled by user') |
|
505 | 506 | sys.exit(-1) |
|
506 | 507 | |
|
507 | 508 | if retries == 0: |
|
508 | 509 | sys.exit('max retries reached') |
|
509 | 510 | if not path_ok: |
|
510 | 511 | retries -= 1 |
|
511 | 512 | return self.config_prompt(test_repo_path, retries) |
|
512 | 513 | |
|
513 | 514 | real_path = os.path.normpath(os.path.realpath(path)) |
|
514 | 515 | |
|
515 | 516 | if real_path != os.path.normpath(path): |
|
516 | 517 | q = ('Path looks like a symlink, RhodeCode Enterprise will store ' |
|
517 | 518 | 'given path as %s ? [y/n]') % (real_path,) |
|
518 | 519 | if not self.ask_ok(q): |
|
519 | 520 | log.error('Canceled by user') |
|
520 | 521 | sys.exit(-1) |
|
521 | 522 | |
|
522 | 523 | return real_path |
|
523 | 524 | |
|
524 | 525 | def create_settings(self, path): |
|
525 | 526 | |
|
526 | 527 | self.create_ui_settings(path) |
|
527 | 528 | |
|
528 | 529 | ui_config = [ |
|
529 | 530 | ('web', 'push_ssl', 'False'), |
|
530 | 531 | ('web', 'allow_archive', 'gz zip bz2'), |
|
531 | 532 | ('web', 'allow_push', '*'), |
|
532 | 533 | ('web', 'baseurl', '/'), |
|
533 | 534 | ('paths', '/', path), |
|
534 | 535 | ('phases', 'publish', 'True') |
|
535 | 536 | ] |
|
536 | 537 | for section, key, value in ui_config: |
|
537 | 538 | ui_conf = RhodeCodeUi() |
|
538 | 539 | setattr(ui_conf, 'ui_section', section) |
|
539 | 540 | setattr(ui_conf, 'ui_key', key) |
|
540 | 541 | setattr(ui_conf, 'ui_value', value) |
|
541 | 542 | self.sa.add(ui_conf) |
|
542 | 543 | |
|
543 | 544 | # rhodecode app settings |
|
544 | 545 | settings = [ |
|
545 | 546 | ('realm', 'RhodeCode', 'unicode'), |
|
546 | 547 | ('title', '', 'unicode'), |
|
547 | 548 | ('pre_code', '', 'unicode'), |
|
548 | 549 | ('post_code', '', 'unicode'), |
|
549 | 550 | ('show_public_icon', True, 'bool'), |
|
550 | 551 | ('show_private_icon', True, 'bool'), |
|
551 | 552 | ('stylify_metatags', False, 'bool'), |
|
552 | 553 | ('dashboard_items', 100, 'int'), |
|
553 | 554 | ('admin_grid_items', 25, 'int'), |
|
554 | 555 | ('show_version', True, 'bool'), |
|
555 | 556 | ('use_gravatar', False, 'bool'), |
|
556 | 557 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
557 | 558 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
558 | 559 | ('support_url', '', 'unicode'), |
|
559 | 560 | ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), |
|
560 | 561 | ('show_revision_number', True, 'bool'), |
|
561 | 562 | ('show_sha_length', 12, 'int'), |
|
562 | 563 | ] |
|
563 | 564 | |
|
564 | 565 | for key, val, type_ in settings: |
|
565 | 566 | sett = RhodeCodeSetting(key, val, type_) |
|
566 | 567 | self.sa.add(sett) |
|
567 | 568 | |
|
568 | 569 | self.create_auth_plugin_options() |
|
569 | 570 | self.create_default_options() |
|
570 | 571 | |
|
571 | 572 | log.info('created ui config') |
|
572 | 573 | |
|
573 | 574 | def create_user(self, username, password, email='', admin=False, |
|
574 | 575 | strict_creation_check=True, api_key=None): |
|
575 | 576 | log.info('creating user %s' % username) |
|
576 | 577 | user = UserModel().create_or_update( |
|
577 | 578 | username, password, email, firstname='RhodeCode', lastname='Admin', |
|
578 | 579 | active=True, admin=admin, extern_type="rhodecode", |
|
579 | 580 | strict_creation_check=strict_creation_check) |
|
580 | 581 | |
|
581 | 582 | if api_key: |
|
582 | 583 | log.info('setting a provided api key for the user %s', username) |
|
583 | 584 | from rhodecode.model.auth_token import AuthTokenModel |
|
584 | 585 | AuthTokenModel().create( |
|
585 | 586 | user=user, description='BUILTIN TOKEN') |
|
586 | 587 | |
|
587 | 588 | def create_default_user(self): |
|
588 | 589 | log.info('creating default user') |
|
589 | 590 | # create default user for handling default permissions. |
|
590 | 591 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
591 | 592 | password=str(uuid.uuid1())[:20], |
|
592 | 593 | email=User.DEFAULT_USER_EMAIL, |
|
593 | 594 | firstname='Anonymous', |
|
594 | 595 | lastname='User', |
|
595 | 596 | strict_creation_check=False) |
|
596 | 597 | # based on configuration options activate/deactive this user which |
|
597 | 598 | # controlls anonymous access |
|
598 | 599 | if self.cli_args.get('public_access') is False: |
|
599 | 600 | log.info('Public access disabled') |
|
600 | 601 | user.active = False |
|
601 | 602 | Session().add(user) |
|
602 | 603 | Session().commit() |
|
603 | 604 | |
|
604 | 605 | def create_permissions(self): |
|
605 | 606 | """ |
|
606 | 607 | Creates all permissions defined in the system |
|
607 | 608 | """ |
|
608 | 609 | # module.(access|create|change|delete)_[name] |
|
609 | 610 | # module.(none|read|write|admin) |
|
610 | 611 | log.info('creating permissions') |
|
611 | 612 | PermissionModel(self.sa).create_permissions() |
|
612 | 613 | |
|
613 | 614 | def populate_default_permissions(self): |
|
614 | 615 | """ |
|
615 | 616 | Populate default permissions. It will create only the default |
|
616 | 617 | permissions that are missing, and not alter already defined ones |
|
617 | 618 | """ |
|
618 | 619 | log.info('creating default user permissions') |
|
619 | 620 | PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER) |
@@ -1,413 +1,425 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2013-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Set of hooks run by RhodeCode Enterprise |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import collections |
|
28 | 28 | import logging |
|
29 | 29 | |
|
30 | 30 | import rhodecode |
|
31 | 31 | from rhodecode import events |
|
32 | 32 | from rhodecode.lib import helpers as h |
|
33 | 33 | from rhodecode.lib import audit_logger |
|
34 | 34 | from rhodecode.lib.utils2 import safe_str |
|
35 | 35 | from rhodecode.lib.exceptions import HTTPLockedRC, UserCreationError |
|
36 | 36 | from rhodecode.model.db import Repository, User |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | HookResponse = collections.namedtuple('HookResponse', ('status', 'output')) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | def is_shadow_repo(extras): |
|
45 | 45 | """ |
|
46 | 46 | Returns ``True`` if this is an action executed against a shadow repository. |
|
47 | 47 | """ |
|
48 | 48 | return extras['is_shadow_repo'] |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | def _get_scm_size(alias, root_path): |
|
52 | 52 | |
|
53 | 53 | if not alias.startswith('.'): |
|
54 | 54 | alias += '.' |
|
55 | 55 | |
|
56 | 56 | size_scm, size_root = 0, 0 |
|
57 | 57 | for path, unused_dirs, files in os.walk(safe_str(root_path)): |
|
58 | 58 | if path.find(alias) != -1: |
|
59 | 59 | for f in files: |
|
60 | 60 | try: |
|
61 | 61 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
62 | 62 | except OSError: |
|
63 | 63 | pass |
|
64 | 64 | else: |
|
65 | 65 | for f in files: |
|
66 | 66 | try: |
|
67 | 67 | size_root += os.path.getsize(os.path.join(path, f)) |
|
68 | 68 | except OSError: |
|
69 | 69 | pass |
|
70 | 70 | |
|
71 | 71 | size_scm_f = h.format_byte_size_binary(size_scm) |
|
72 | 72 | size_root_f = h.format_byte_size_binary(size_root) |
|
73 | 73 | size_total_f = h.format_byte_size_binary(size_root + size_scm) |
|
74 | 74 | |
|
75 | 75 | return size_scm_f, size_root_f, size_total_f |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | # actual hooks called by Mercurial internally, and GIT by our Python Hooks |
|
79 | 79 | def repo_size(extras): |
|
80 | 80 | """Present size of repository after push.""" |
|
81 | 81 | repo = Repository.get_by_repo_name(extras.repository) |
|
82 | 82 | vcs_part = safe_str(u'.%s' % repo.repo_type) |
|
83 | 83 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, |
|
84 | 84 | repo.repo_full_path) |
|
85 | 85 | msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' |
|
86 | 86 | % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) |
|
87 | 87 | return HookResponse(0, msg) |
|
88 | 88 | |
|
89 | 89 | |
|
90 | 90 | def pre_push(extras): |
|
91 | 91 | """ |
|
92 | 92 | Hook executed before pushing code. |
|
93 | 93 | |
|
94 | 94 | It bans pushing when the repository is locked. |
|
95 | 95 | """ |
|
96 | 96 | |
|
97 | 97 | usr = User.get_by_username(extras.username) |
|
98 | 98 | output = '' |
|
99 | 99 | if extras.locked_by[0] and usr.user_id != int(extras.locked_by[0]): |
|
100 | 100 | locked_by = User.get(extras.locked_by[0]).username |
|
101 | 101 | reason = extras.locked_by[2] |
|
102 | 102 | # this exception is interpreted in git/hg middlewares and based |
|
103 | 103 | # on that proper return code is server to client |
|
104 | 104 | _http_ret = HTTPLockedRC( |
|
105 | 105 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
106 | 106 | if str(_http_ret.code).startswith('2'): |
|
107 | 107 | # 2xx Codes don't raise exceptions |
|
108 | 108 | output = _http_ret.title |
|
109 | 109 | else: |
|
110 | 110 | raise _http_ret |
|
111 | 111 | |
|
112 | 112 | # Propagate to external components. This is done after checking the |
|
113 | 113 | # lock, for consistent behavior. |
|
114 | 114 | if not is_shadow_repo(extras): |
|
115 | 115 | pre_push_extension(repo_store_path=Repository.base_path(), **extras) |
|
116 | 116 | events.trigger(events.RepoPrePushEvent( |
|
117 | 117 | repo_name=extras.repository, extras=extras)) |
|
118 | 118 | |
|
119 | 119 | return HookResponse(0, output) |
|
120 | 120 | |
|
121 | 121 | |
|
122 | 122 | def pre_pull(extras): |
|
123 | 123 | """ |
|
124 | 124 | Hook executed before pulling the code. |
|
125 | 125 | |
|
126 | 126 | It bans pulling when the repository is locked. |
|
127 | 127 | """ |
|
128 | 128 | |
|
129 | 129 | output = '' |
|
130 | 130 | if extras.locked_by[0]: |
|
131 | 131 | locked_by = User.get(extras.locked_by[0]).username |
|
132 | 132 | reason = extras.locked_by[2] |
|
133 | 133 | # this exception is interpreted in git/hg middlewares and based |
|
134 | 134 | # on that proper return code is server to client |
|
135 | 135 | _http_ret = HTTPLockedRC( |
|
136 | 136 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
137 | 137 | if str(_http_ret.code).startswith('2'): |
|
138 | 138 | # 2xx Codes don't raise exceptions |
|
139 | 139 | output = _http_ret.title |
|
140 | 140 | else: |
|
141 | 141 | raise _http_ret |
|
142 | 142 | |
|
143 | 143 | # Propagate to external components. This is done after checking the |
|
144 | 144 | # lock, for consistent behavior. |
|
145 | 145 | if not is_shadow_repo(extras): |
|
146 | 146 | pre_pull_extension(**extras) |
|
147 | 147 | events.trigger(events.RepoPrePullEvent( |
|
148 | 148 | repo_name=extras.repository, extras=extras)) |
|
149 | 149 | |
|
150 | 150 | return HookResponse(0, output) |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | def post_pull(extras): |
|
154 | 154 | """Hook executed after client pulls the code.""" |
|
155 | 155 | |
|
156 | 156 | audit_user = audit_logger.UserWrap( |
|
157 | 157 | username=extras.username, |
|
158 | 158 | ip_addr=extras.ip) |
|
159 | 159 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
160 | 160 | audit_logger.store( |
|
161 | 161 | action='user.pull', action_data={ |
|
162 | 162 | 'user_agent': extras.user_agent}, |
|
163 | 163 | user=audit_user, repo=repo, commit=True) |
|
164 | 164 | |
|
165 | 165 | # Propagate to external components. |
|
166 | 166 | if not is_shadow_repo(extras): |
|
167 | 167 | post_pull_extension(**extras) |
|
168 | 168 | events.trigger(events.RepoPullEvent( |
|
169 | 169 | repo_name=extras.repository, extras=extras)) |
|
170 | 170 | |
|
171 | 171 | output = '' |
|
172 | 172 | # make lock is a tri state False, True, None. We only make lock on True |
|
173 | 173 | if extras.make_lock is True and not is_shadow_repo(extras): |
|
174 | 174 | user = User.get_by_username(extras.username) |
|
175 | 175 | Repository.lock(Repository.get_by_repo_name(extras.repository), |
|
176 | 176 | user.user_id, |
|
177 | 177 | lock_reason=Repository.LOCK_PULL) |
|
178 | 178 | msg = 'Made lock on repo `%s`' % (extras.repository,) |
|
179 | 179 | output += msg |
|
180 | 180 | |
|
181 | 181 | if extras.locked_by[0]: |
|
182 | 182 | locked_by = User.get(extras.locked_by[0]).username |
|
183 | 183 | reason = extras.locked_by[2] |
|
184 | 184 | _http_ret = HTTPLockedRC( |
|
185 | 185 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
186 | 186 | if str(_http_ret.code).startswith('2'): |
|
187 | 187 | # 2xx Codes don't raise exceptions |
|
188 | 188 | output += _http_ret.title |
|
189 | 189 | |
|
190 | 190 | return HookResponse(0, output) |
|
191 | 191 | |
|
192 | 192 | |
|
193 | 193 | def post_push(extras): |
|
194 | 194 | """Hook executed after user pushes to the repository.""" |
|
195 | 195 | commit_ids = extras.commit_ids |
|
196 | 196 | |
|
197 | 197 | # log the push call |
|
198 | 198 | audit_user = audit_logger.UserWrap( |
|
199 | 199 | username=extras.username, ip_addr=extras.ip) |
|
200 | 200 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
201 | 201 | audit_logger.store( |
|
202 | 202 | action='user.push', action_data={ |
|
203 | 203 | 'user_agent': extras.user_agent, |
|
204 | 204 | 'commit_ids': commit_ids[:10000]}, |
|
205 | 205 | user=audit_user, repo=repo, commit=True) |
|
206 | 206 | |
|
207 | 207 | # Propagate to external components. |
|
208 | 208 | if not is_shadow_repo(extras): |
|
209 | 209 | post_push_extension( |
|
210 | 210 | repo_store_path=Repository.base_path(), |
|
211 | 211 | pushed_revs=commit_ids, |
|
212 | 212 | **extras) |
|
213 | 213 | events.trigger(events.RepoPushEvent( |
|
214 | 214 | repo_name=extras.repository, |
|
215 | 215 | pushed_commit_ids=commit_ids, |
|
216 | 216 | extras=extras)) |
|
217 | 217 | |
|
218 | 218 | output = '' |
|
219 | 219 | # make lock is a tri state False, True, None. We only release lock on False |
|
220 | 220 | if extras.make_lock is False and not is_shadow_repo(extras): |
|
221 | 221 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) |
|
222 | 222 | msg = 'Released lock on repo `%s`\n' % extras.repository |
|
223 | 223 | output += msg |
|
224 | 224 | |
|
225 | 225 | if extras.locked_by[0]: |
|
226 | 226 | locked_by = User.get(extras.locked_by[0]).username |
|
227 | 227 | reason = extras.locked_by[2] |
|
228 | 228 | _http_ret = HTTPLockedRC( |
|
229 | 229 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
230 | 230 | # TODO: johbo: if not? |
|
231 | 231 | if str(_http_ret.code).startswith('2'): |
|
232 | 232 | # 2xx Codes don't raise exceptions |
|
233 | 233 | output += _http_ret.title |
|
234 | 234 | |
|
235 | if extras.new_refs: | |
|
236 | tmpl = \ | |
|
237 | extras.server_url + '/' + \ | |
|
238 | extras.repository + \ | |
|
239 | "/pull-request/new?{ref_type}={ref_name}" | |
|
240 | for branch_name in extras.new_refs['branches']: | |
|
241 | output += 'RhodeCode: open pull request link: {}\n'.format( | |
|
242 | tmpl.format(ref_type='branch', ref_name=branch_name)) | |
|
243 | ||
|
244 | for book_name in extras.new_refs['bookmarks']: | |
|
245 | output += 'RhodeCode: open pull request link: {}\n'.format( | |
|
246 | tmpl.format(ref_type='bookmark', ref_name=book_name)) | |
|
247 | ||
|
235 | 248 | output += 'RhodeCode: push completed\n' |
|
236 | ||
|
237 | 249 | return HookResponse(0, output) |
|
238 | 250 | |
|
239 | 251 | |
|
240 | 252 | def _locked_by_explanation(repo_name, user_name, reason): |
|
241 | 253 | message = ( |
|
242 | 254 | 'Repository `%s` locked by user `%s`. Reason:`%s`' |
|
243 | 255 | % (repo_name, user_name, reason)) |
|
244 | 256 | return message |
|
245 | 257 | |
|
246 | 258 | |
|
247 | 259 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
248 | 260 | # pre create hooks |
|
249 | 261 | if pre_create_user.is_active(): |
|
250 | 262 | allowed, reason = pre_create_user(created_by=created_by, **user_dict) |
|
251 | 263 | if not allowed: |
|
252 | 264 | raise UserCreationError(reason) |
|
253 | 265 | |
|
254 | 266 | |
|
255 | 267 | class ExtensionCallback(object): |
|
256 | 268 | """ |
|
257 | 269 | Forwards a given call to rcextensions, sanitizes keyword arguments. |
|
258 | 270 | |
|
259 | 271 | Does check if there is an extension active for that hook. If it is |
|
260 | 272 | there, it will forward all `kwargs_keys` keyword arguments to the |
|
261 | 273 | extension callback. |
|
262 | 274 | """ |
|
263 | 275 | |
|
264 | 276 | def __init__(self, hook_name, kwargs_keys): |
|
265 | 277 | self._hook_name = hook_name |
|
266 | 278 | self._kwargs_keys = set(kwargs_keys) |
|
267 | 279 | |
|
268 | 280 | def __call__(self, *args, **kwargs): |
|
269 | 281 | log.debug('Calling extension callback for %s', self._hook_name) |
|
270 | 282 | |
|
271 | 283 | kwargs_to_pass = dict((key, kwargs[key]) for key in self._kwargs_keys) |
|
272 | 284 | # backward compat for removed api_key for old hooks. THis was it works |
|
273 | 285 | # with older rcextensions that require api_key present |
|
274 | 286 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: |
|
275 | 287 | kwargs_to_pass['api_key'] = '_DEPRECATED_' |
|
276 | 288 | |
|
277 | 289 | callback = self._get_callback() |
|
278 | 290 | if callback: |
|
279 | 291 | return callback(**kwargs_to_pass) |
|
280 | 292 | else: |
|
281 | 293 | log.debug('extensions callback not found skipping...') |
|
282 | 294 | |
|
283 | 295 | def is_active(self): |
|
284 | 296 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) |
|
285 | 297 | |
|
286 | 298 | def _get_callback(self): |
|
287 | 299 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) |
|
288 | 300 | |
|
289 | 301 | |
|
290 | 302 | pre_pull_extension = ExtensionCallback( |
|
291 | 303 | hook_name='PRE_PULL_HOOK', |
|
292 | 304 | kwargs_keys=( |
|
293 | 305 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
294 | 306 | 'repository')) |
|
295 | 307 | |
|
296 | 308 | |
|
297 | 309 | post_pull_extension = ExtensionCallback( |
|
298 | 310 | hook_name='PULL_HOOK', |
|
299 | 311 | kwargs_keys=( |
|
300 | 312 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
301 | 313 | 'repository')) |
|
302 | 314 | |
|
303 | 315 | |
|
304 | 316 | pre_push_extension = ExtensionCallback( |
|
305 | 317 | hook_name='PRE_PUSH_HOOK', |
|
306 | 318 | kwargs_keys=( |
|
307 | 319 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
308 | 320 | 'repository', 'repo_store_path', 'commit_ids')) |
|
309 | 321 | |
|
310 | 322 | |
|
311 | 323 | post_push_extension = ExtensionCallback( |
|
312 | 324 | hook_name='PUSH_HOOK', |
|
313 | 325 | kwargs_keys=( |
|
314 | 326 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
315 | 327 | 'repository', 'repo_store_path', 'pushed_revs')) |
|
316 | 328 | |
|
317 | 329 | |
|
318 | 330 | pre_create_user = ExtensionCallback( |
|
319 | 331 | hook_name='PRE_CREATE_USER_HOOK', |
|
320 | 332 | kwargs_keys=( |
|
321 | 333 | 'username', 'password', 'email', 'firstname', 'lastname', 'active', |
|
322 | 334 | 'admin', 'created_by')) |
|
323 | 335 | |
|
324 | 336 | |
|
325 | 337 | log_create_pull_request = ExtensionCallback( |
|
326 | 338 | hook_name='CREATE_PULL_REQUEST', |
|
327 | 339 | kwargs_keys=( |
|
328 | 340 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
329 | 341 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
330 | 342 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
331 | 343 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
332 | 344 | |
|
333 | 345 | |
|
334 | 346 | log_merge_pull_request = ExtensionCallback( |
|
335 | 347 | hook_name='MERGE_PULL_REQUEST', |
|
336 | 348 | kwargs_keys=( |
|
337 | 349 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
338 | 350 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
339 | 351 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
340 | 352 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
341 | 353 | |
|
342 | 354 | |
|
343 | 355 | log_close_pull_request = ExtensionCallback( |
|
344 | 356 | hook_name='CLOSE_PULL_REQUEST', |
|
345 | 357 | kwargs_keys=( |
|
346 | 358 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
347 | 359 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
348 | 360 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
349 | 361 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
350 | 362 | |
|
351 | 363 | |
|
352 | 364 | log_review_pull_request = ExtensionCallback( |
|
353 | 365 | hook_name='REVIEW_PULL_REQUEST', |
|
354 | 366 | kwargs_keys=( |
|
355 | 367 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
356 | 368 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
357 | 369 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
358 | 370 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
359 | 371 | |
|
360 | 372 | |
|
361 | 373 | log_update_pull_request = ExtensionCallback( |
|
362 | 374 | hook_name='UPDATE_PULL_REQUEST', |
|
363 | 375 | kwargs_keys=( |
|
364 | 376 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
365 | 377 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
366 | 378 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
367 | 379 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
368 | 380 | |
|
369 | 381 | |
|
370 | 382 | log_create_user = ExtensionCallback( |
|
371 | 383 | hook_name='CREATE_USER_HOOK', |
|
372 | 384 | kwargs_keys=( |
|
373 | 385 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
374 | 386 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
375 | 387 | 'ip_addresses', 'extern_type', 'extern_name', |
|
376 | 388 | 'email', 'api_keys', 'last_login', |
|
377 | 389 | 'full_name', 'active', 'password', 'emails', |
|
378 | 390 | 'inherit_default_permissions', 'created_by', 'created_on')) |
|
379 | 391 | |
|
380 | 392 | |
|
381 | 393 | log_delete_user = ExtensionCallback( |
|
382 | 394 | hook_name='DELETE_USER_HOOK', |
|
383 | 395 | kwargs_keys=( |
|
384 | 396 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
385 | 397 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
386 | 398 | 'ip_addresses', |
|
387 | 399 | 'email', 'last_login', |
|
388 | 400 | 'full_name', 'active', 'password', 'emails', |
|
389 | 401 | 'inherit_default_permissions', 'deleted_by')) |
|
390 | 402 | |
|
391 | 403 | |
|
392 | 404 | log_create_repository = ExtensionCallback( |
|
393 | 405 | hook_name='CREATE_REPO_HOOK', |
|
394 | 406 | kwargs_keys=( |
|
395 | 407 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
396 | 408 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
397 | 409 | 'clone_uri', 'fork_id', 'group_id', 'created_by')) |
|
398 | 410 | |
|
399 | 411 | |
|
400 | 412 | log_delete_repository = ExtensionCallback( |
|
401 | 413 | hook_name='DELETE_REPO_HOOK', |
|
402 | 414 | kwargs_keys=( |
|
403 | 415 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
404 | 416 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
405 | 417 | 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) |
|
406 | 418 | |
|
407 | 419 | |
|
408 | 420 | log_create_repository_group = ExtensionCallback( |
|
409 | 421 | hook_name='CREATE_REPO_GROUP_HOOK', |
|
410 | 422 | kwargs_keys=( |
|
411 | 423 | 'group_name', 'group_parent_id', 'group_description', |
|
412 | 424 | 'group_id', 'user_id', 'created_by', 'created_on', |
|
413 | 425 | 'enable_locking')) |
@@ -1,1042 +1,1043 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Utilities library for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import datetime |
|
26 | 26 | import decorator |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | import re |
|
31 | 31 | import shutil |
|
32 | 32 | import tempfile |
|
33 | 33 | import traceback |
|
34 | 34 | import tarfile |
|
35 | 35 | import warnings |
|
36 | 36 | import hashlib |
|
37 | 37 | from os.path import join as jn |
|
38 | 38 | |
|
39 | 39 | import paste |
|
40 | 40 | import pkg_resources |
|
41 | 41 | from paste.script.command import Command, BadCommand |
|
42 | 42 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
43 | 43 | from mako import exceptions |
|
44 | 44 | from pyramid.threadlocal import get_current_registry |
|
45 | 45 | from pyramid.request import Request |
|
46 | 46 | |
|
47 | 47 | from rhodecode.lib.fakemod import create_module |
|
48 | 48 | from rhodecode.lib.vcs.backends.base import Config |
|
49 | 49 | from rhodecode.lib.vcs.exceptions import VCSError |
|
50 | 50 | from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend |
|
51 | 51 | from rhodecode.lib.utils2 import ( |
|
52 | 52 | safe_str, safe_unicode, get_current_rhodecode_user, md5) |
|
53 | 53 | from rhodecode.model import meta |
|
54 | 54 | from rhodecode.model.db import ( |
|
55 | 55 | Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup) |
|
56 | 56 | from rhodecode.model.meta import Session |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') |
|
62 | 62 | |
|
63 | 63 | # String which contains characters that are not allowed in slug names for |
|
64 | 64 | # repositories or repository groups. It is properly escaped to use it in |
|
65 | 65 | # regular expressions. |
|
66 | 66 | SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:') |
|
67 | 67 | |
|
68 | 68 | # Regex that matches forbidden characters in repo/group slugs. |
|
69 | 69 | SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS)) |
|
70 | 70 | |
|
71 | 71 | # Regex that matches allowed characters in repo/group slugs. |
|
72 | 72 | SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS)) |
|
73 | 73 | |
|
74 | 74 | # Regex that matches whole repo/group slugs. |
|
75 | 75 | SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS)) |
|
76 | 76 | |
|
77 | 77 | _license_cache = None |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | def repo_name_slug(value): |
|
81 | 81 | """ |
|
82 | 82 | Return slug of name of repository |
|
83 | 83 | This function is called on each creation/modification |
|
84 | 84 | of repository to prevent bad names in repo |
|
85 | 85 | """ |
|
86 | 86 | replacement_char = '-' |
|
87 | 87 | |
|
88 | 88 | slug = remove_formatting(value) |
|
89 | 89 | slug = SLUG_BAD_CHAR_RE.sub('', slug) |
|
90 | 90 | slug = re.sub('[\s]+', '-', slug) |
|
91 | 91 | slug = collapse(slug, replacement_char) |
|
92 | 92 | return slug |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | #============================================================================== |
|
96 | 96 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
97 | 97 | #============================================================================== |
|
98 | 98 | def get_repo_slug(request): |
|
99 | 99 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
100 | 100 | # pyramid |
|
101 | 101 | _repo = request.matchdict.get('repo_name') |
|
102 | 102 | else: |
|
103 | 103 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
104 | 104 | |
|
105 | 105 | if _repo: |
|
106 | 106 | _repo = _repo.rstrip('/') |
|
107 | 107 | return _repo |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def get_repo_group_slug(request): |
|
111 | 111 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
112 | 112 | # pyramid |
|
113 | 113 | _group = request.matchdict.get('group_name') |
|
114 | 114 | else: |
|
115 | 115 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
116 | 116 | |
|
117 | 117 | if _group: |
|
118 | 118 | _group = _group.rstrip('/') |
|
119 | 119 | return _group |
|
120 | 120 | |
|
121 | 121 | |
|
122 | 122 | def get_user_group_slug(request): |
|
123 | 123 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
124 | 124 | # pyramid |
|
125 | 125 | _group = request.matchdict.get('user_group_id') |
|
126 | 126 | else: |
|
127 | 127 | _group = request.environ['pylons.routes_dict'].get('user_group_id') |
|
128 | 128 | |
|
129 | 129 | try: |
|
130 | 130 | _group = UserGroup.get(_group) |
|
131 | 131 | if _group: |
|
132 | 132 | _group = _group.users_group_name |
|
133 | 133 | except Exception: |
|
134 | 134 | log.debug(traceback.format_exc()) |
|
135 | 135 | # catch all failures here |
|
136 | 136 | pass |
|
137 | 137 | |
|
138 | 138 | return _group |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): |
|
142 | 142 | """ |
|
143 | 143 | Action logger for various actions made by users |
|
144 | 144 | |
|
145 | 145 | :param user: user that made this action, can be a unique username string or |
|
146 | 146 | object containing user_id attribute |
|
147 | 147 | :param action: action to log, should be on of predefined unique actions for |
|
148 | 148 | easy translations |
|
149 | 149 | :param repo: string name of repository or object containing repo_id, |
|
150 | 150 | that action was made on |
|
151 | 151 | :param ipaddr: optional ip address from what the action was made |
|
152 | 152 | :param sa: optional sqlalchemy session |
|
153 | 153 | |
|
154 | 154 | """ |
|
155 | 155 | |
|
156 | 156 | if not sa: |
|
157 | 157 | sa = meta.Session() |
|
158 | 158 | # if we don't get explicit IP address try to get one from registered user |
|
159 | 159 | # in tmpl context var |
|
160 | 160 | if not ipaddr: |
|
161 | 161 | ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '') |
|
162 | 162 | |
|
163 | 163 | try: |
|
164 | 164 | if getattr(user, 'user_id', None): |
|
165 | 165 | user_obj = User.get(user.user_id) |
|
166 | 166 | elif isinstance(user, basestring): |
|
167 | 167 | user_obj = User.get_by_username(user) |
|
168 | 168 | else: |
|
169 | 169 | raise Exception('You have to provide a user object or a username') |
|
170 | 170 | |
|
171 | 171 | if getattr(repo, 'repo_id', None): |
|
172 | 172 | repo_obj = Repository.get(repo.repo_id) |
|
173 | 173 | repo_name = repo_obj.repo_name |
|
174 | 174 | elif isinstance(repo, basestring): |
|
175 | 175 | repo_name = repo.lstrip('/') |
|
176 | 176 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
177 | 177 | else: |
|
178 | 178 | repo_obj = None |
|
179 | 179 | repo_name = '' |
|
180 | 180 | |
|
181 | 181 | user_log = UserLog() |
|
182 | 182 | user_log.user_id = user_obj.user_id |
|
183 | 183 | user_log.username = user_obj.username |
|
184 | 184 | action = safe_unicode(action) |
|
185 | 185 | user_log.action = action[:1200000] |
|
186 | 186 | |
|
187 | 187 | user_log.repository = repo_obj |
|
188 | 188 | user_log.repository_name = repo_name |
|
189 | 189 | |
|
190 | 190 | user_log.action_date = datetime.datetime.now() |
|
191 | 191 | user_log.user_ip = ipaddr |
|
192 | 192 | sa.add(user_log) |
|
193 | 193 | |
|
194 | 194 | log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s', |
|
195 | 195 | action, safe_unicode(repo), user_obj, ipaddr) |
|
196 | 196 | if commit: |
|
197 | 197 | sa.commit() |
|
198 | 198 | except Exception: |
|
199 | 199 | log.error(traceback.format_exc()) |
|
200 | 200 | raise |
|
201 | 201 | |
|
202 | 202 | |
|
203 | 203 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
204 | 204 | """ |
|
205 | 205 | Scans given path for repos and return (name,(type,path)) tuple |
|
206 | 206 | |
|
207 | 207 | :param path: path to scan for repositories |
|
208 | 208 | :param recursive: recursive search and return names with subdirs in front |
|
209 | 209 | """ |
|
210 | 210 | |
|
211 | 211 | # remove ending slash for better results |
|
212 | 212 | path = path.rstrip(os.sep) |
|
213 | 213 | log.debug('now scanning in %s location recursive:%s...', path, recursive) |
|
214 | 214 | |
|
215 | 215 | def _get_repos(p): |
|
216 | 216 | dirpaths = _get_dirpaths(p) |
|
217 | 217 | if not _is_dir_writable(p): |
|
218 | 218 | log.warning('repo path without write access: %s', p) |
|
219 | 219 | |
|
220 | 220 | for dirpath in dirpaths: |
|
221 | 221 | if os.path.isfile(os.path.join(p, dirpath)): |
|
222 | 222 | continue |
|
223 | 223 | cur_path = os.path.join(p, dirpath) |
|
224 | 224 | |
|
225 | 225 | # skip removed repos |
|
226 | 226 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
227 | 227 | continue |
|
228 | 228 | |
|
229 | 229 | #skip .<somethin> dirs |
|
230 | 230 | if dirpath.startswith('.'): |
|
231 | 231 | continue |
|
232 | 232 | |
|
233 | 233 | try: |
|
234 | 234 | scm_info = get_scm(cur_path) |
|
235 | 235 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
236 | 236 | except VCSError: |
|
237 | 237 | if not recursive: |
|
238 | 238 | continue |
|
239 | 239 | #check if this dir containts other repos for recursive scan |
|
240 | 240 | rec_path = os.path.join(p, dirpath) |
|
241 | 241 | if os.path.isdir(rec_path): |
|
242 | 242 | for inner_scm in _get_repos(rec_path): |
|
243 | 243 | yield inner_scm |
|
244 | 244 | |
|
245 | 245 | return _get_repos(path) |
|
246 | 246 | |
|
247 | 247 | |
|
248 | 248 | def _get_dirpaths(p): |
|
249 | 249 | try: |
|
250 | 250 | # OS-independable way of checking if we have at least read-only |
|
251 | 251 | # access or not. |
|
252 | 252 | dirpaths = os.listdir(p) |
|
253 | 253 | except OSError: |
|
254 | 254 | log.warning('ignoring repo path without read access: %s', p) |
|
255 | 255 | return [] |
|
256 | 256 | |
|
257 | 257 | # os.listpath has a tweak: If a unicode is passed into it, then it tries to |
|
258 | 258 | # decode paths and suddenly returns unicode objects itself. The items it |
|
259 | 259 | # cannot decode are returned as strings and cause issues. |
|
260 | 260 | # |
|
261 | 261 | # Those paths are ignored here until a solid solution for path handling has |
|
262 | 262 | # been built. |
|
263 | 263 | expected_type = type(p) |
|
264 | 264 | |
|
265 | 265 | def _has_correct_type(item): |
|
266 | 266 | if type(item) is not expected_type: |
|
267 | 267 | log.error( |
|
268 | 268 | u"Ignoring path %s since it cannot be decoded into unicode.", |
|
269 | 269 | # Using "repr" to make sure that we see the byte value in case |
|
270 | 270 | # of support. |
|
271 | 271 | repr(item)) |
|
272 | 272 | return False |
|
273 | 273 | return True |
|
274 | 274 | |
|
275 | 275 | dirpaths = [item for item in dirpaths if _has_correct_type(item)] |
|
276 | 276 | |
|
277 | 277 | return dirpaths |
|
278 | 278 | |
|
279 | 279 | |
|
280 | 280 | def _is_dir_writable(path): |
|
281 | 281 | """ |
|
282 | 282 | Probe if `path` is writable. |
|
283 | 283 | |
|
284 | 284 | Due to trouble on Cygwin / Windows, this is actually probing if it is |
|
285 | 285 | possible to create a file inside of `path`, stat does not produce reliable |
|
286 | 286 | results in this case. |
|
287 | 287 | """ |
|
288 | 288 | try: |
|
289 | 289 | with tempfile.TemporaryFile(dir=path): |
|
290 | 290 | pass |
|
291 | 291 | except OSError: |
|
292 | 292 | return False |
|
293 | 293 | return True |
|
294 | 294 | |
|
295 | 295 | |
|
296 | 296 | def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None): |
|
297 | 297 | """ |
|
298 | 298 | Returns True if given path is a valid repository False otherwise. |
|
299 | 299 | If expect_scm param is given also, compare if given scm is the same |
|
300 | 300 | as expected from scm parameter. If explicit_scm is given don't try to |
|
301 | 301 | detect the scm, just use the given one to check if repo is valid |
|
302 | 302 | |
|
303 | 303 | :param repo_name: |
|
304 | 304 | :param base_path: |
|
305 | 305 | :param expect_scm: |
|
306 | 306 | :param explicit_scm: |
|
307 | 307 | |
|
308 | 308 | :return True: if given path is a valid repository |
|
309 | 309 | """ |
|
310 | 310 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
311 | 311 | log.debug('Checking if `%s` is a valid path for repository. ' |
|
312 | 312 | 'Explicit type: %s', repo_name, explicit_scm) |
|
313 | 313 | |
|
314 | 314 | try: |
|
315 | 315 | if explicit_scm: |
|
316 | 316 | detected_scms = [get_scm_backend(explicit_scm)] |
|
317 | 317 | else: |
|
318 | 318 | detected_scms = get_scm(full_path) |
|
319 | 319 | |
|
320 | 320 | if expect_scm: |
|
321 | 321 | return detected_scms[0] == expect_scm |
|
322 | 322 | log.debug('path: %s is an vcs object:%s', full_path, detected_scms) |
|
323 | 323 | return True |
|
324 | 324 | except VCSError: |
|
325 | 325 | log.debug('path: %s is not a valid repo !', full_path) |
|
326 | 326 | return False |
|
327 | 327 | |
|
328 | 328 | |
|
329 | 329 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
330 | 330 | """ |
|
331 | 331 | Returns True if given path is a repository group, False otherwise |
|
332 | 332 | |
|
333 | 333 | :param repo_name: |
|
334 | 334 | :param base_path: |
|
335 | 335 | """ |
|
336 | 336 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
337 | 337 | log.debug('Checking if `%s` is a valid path for repository group', |
|
338 | 338 | repo_group_name) |
|
339 | 339 | |
|
340 | 340 | # check if it's not a repo |
|
341 | 341 | if is_valid_repo(repo_group_name, base_path): |
|
342 | 342 | log.debug('Repo called %s exist, it is not a valid ' |
|
343 | 343 | 'repo group' % repo_group_name) |
|
344 | 344 | return False |
|
345 | 345 | |
|
346 | 346 | try: |
|
347 | 347 | # we need to check bare git repos at higher level |
|
348 | 348 | # since we might match branches/hooks/info/objects or possible |
|
349 | 349 | # other things inside bare git repo |
|
350 | 350 | scm_ = get_scm(os.path.dirname(full_path)) |
|
351 | 351 | log.debug('path: %s is a vcs object:%s, not valid ' |
|
352 | 352 | 'repo group' % (full_path, scm_)) |
|
353 | 353 | return False |
|
354 | 354 | except VCSError: |
|
355 | 355 | pass |
|
356 | 356 | |
|
357 | 357 | # check if it's a valid path |
|
358 | 358 | if skip_path_check or os.path.isdir(full_path): |
|
359 | 359 | log.debug('path: %s is a valid repo group !', full_path) |
|
360 | 360 | return True |
|
361 | 361 | |
|
362 | 362 | log.debug('path: %s is not a valid repo group !', full_path) |
|
363 | 363 | return False |
|
364 | 364 | |
|
365 | 365 | |
|
366 | 366 | def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'): |
|
367 | 367 | while True: |
|
368 | 368 | ok = raw_input(prompt) |
|
369 | 369 | if ok.lower() in ('y', 'ye', 'yes'): |
|
370 | 370 | return True |
|
371 | 371 | if ok.lower() in ('n', 'no', 'nop', 'nope'): |
|
372 | 372 | return False |
|
373 | 373 | retries = retries - 1 |
|
374 | 374 | if retries < 0: |
|
375 | 375 | raise IOError |
|
376 | 376 | print(complaint) |
|
377 | 377 | |
|
378 | 378 | # propagated from mercurial documentation |
|
379 | 379 | ui_sections = [ |
|
380 | 380 | 'alias', 'auth', |
|
381 | 381 | 'decode/encode', 'defaults', |
|
382 | 382 | 'diff', 'email', |
|
383 | 383 | 'extensions', 'format', |
|
384 | 384 | 'merge-patterns', 'merge-tools', |
|
385 | 385 | 'hooks', 'http_proxy', |
|
386 | 386 | 'smtp', 'patch', |
|
387 | 387 | 'paths', 'profiling', |
|
388 | 388 | 'server', 'trusted', |
|
389 | 389 | 'ui', 'web', ] |
|
390 | 390 | |
|
391 | 391 | |
|
392 | 392 | def config_data_from_db(clear_session=True, repo=None): |
|
393 | 393 | """ |
|
394 | 394 | Read the configuration data from the database and return configuration |
|
395 | 395 | tuples. |
|
396 | 396 | """ |
|
397 | 397 | from rhodecode.model.settings import VcsSettingsModel |
|
398 | 398 | |
|
399 | 399 | config = [] |
|
400 | 400 | |
|
401 | 401 | sa = meta.Session() |
|
402 | 402 | settings_model = VcsSettingsModel(repo=repo, sa=sa) |
|
403 | 403 | |
|
404 | 404 | ui_settings = settings_model.get_ui_settings() |
|
405 | 405 | |
|
406 | 406 | for setting in ui_settings: |
|
407 | 407 | if setting.active: |
|
408 | 408 | log.debug( |
|
409 | 409 | 'settings ui from db: [%s] %s=%s', |
|
410 | 410 | setting.section, setting.key, setting.value) |
|
411 | 411 | config.append(( |
|
412 | 412 | safe_str(setting.section), safe_str(setting.key), |
|
413 | 413 | safe_str(setting.value))) |
|
414 | 414 | if setting.key == 'push_ssl': |
|
415 | 415 | # force set push_ssl requirement to False, rhodecode |
|
416 | 416 | # handles that |
|
417 | 417 | config.append(( |
|
418 | 418 | safe_str(setting.section), safe_str(setting.key), False)) |
|
419 | 419 | if clear_session: |
|
420 | 420 | meta.Session.remove() |
|
421 | 421 | |
|
422 | 422 | # TODO: mikhail: probably it makes no sense to re-read hooks information. |
|
423 | 423 | # It's already there and activated/deactivated |
|
424 | 424 | skip_entries = [] |
|
425 | 425 | enabled_hook_classes = get_enabled_hook_classes(ui_settings) |
|
426 | 426 | if 'pull' not in enabled_hook_classes: |
|
427 | 427 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) |
|
428 | 428 | if 'push' not in enabled_hook_classes: |
|
429 | 429 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) |
|
430 | 430 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH)) |
|
431 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY)) | |
|
431 | 432 | |
|
432 | 433 | config = [entry for entry in config if entry[:2] not in skip_entries] |
|
433 | 434 | |
|
434 | 435 | return config |
|
435 | 436 | |
|
436 | 437 | |
|
437 | 438 | def make_db_config(clear_session=True, repo=None): |
|
438 | 439 | """ |
|
439 | 440 | Create a :class:`Config` instance based on the values in the database. |
|
440 | 441 | """ |
|
441 | 442 | config = Config() |
|
442 | 443 | config_data = config_data_from_db(clear_session=clear_session, repo=repo) |
|
443 | 444 | for section, option, value in config_data: |
|
444 | 445 | config.set(section, option, value) |
|
445 | 446 | return config |
|
446 | 447 | |
|
447 | 448 | |
|
448 | 449 | def get_enabled_hook_classes(ui_settings): |
|
449 | 450 | """ |
|
450 | 451 | Return the enabled hook classes. |
|
451 | 452 | |
|
452 | 453 | :param ui_settings: List of ui_settings as returned |
|
453 | 454 | by :meth:`VcsSettingsModel.get_ui_settings` |
|
454 | 455 | |
|
455 | 456 | :return: a list with the enabled hook classes. The order is not guaranteed. |
|
456 | 457 | :rtype: list |
|
457 | 458 | """ |
|
458 | 459 | enabled_hooks = [] |
|
459 | 460 | active_hook_keys = [ |
|
460 | 461 | key for section, key, value, active in ui_settings |
|
461 | 462 | if section == 'hooks' and active] |
|
462 | 463 | |
|
463 | 464 | hook_names = { |
|
464 | 465 | RhodeCodeUi.HOOK_PUSH: 'push', |
|
465 | 466 | RhodeCodeUi.HOOK_PULL: 'pull', |
|
466 | 467 | RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size' |
|
467 | 468 | } |
|
468 | 469 | |
|
469 | 470 | for key in active_hook_keys: |
|
470 | 471 | hook = hook_names.get(key) |
|
471 | 472 | if hook: |
|
472 | 473 | enabled_hooks.append(hook) |
|
473 | 474 | |
|
474 | 475 | return enabled_hooks |
|
475 | 476 | |
|
476 | 477 | |
|
477 | 478 | def set_rhodecode_config(config): |
|
478 | 479 | """ |
|
479 | 480 | Updates pylons config with new settings from database |
|
480 | 481 | |
|
481 | 482 | :param config: |
|
482 | 483 | """ |
|
483 | 484 | from rhodecode.model.settings import SettingsModel |
|
484 | 485 | app_settings = SettingsModel().get_all_settings() |
|
485 | 486 | |
|
486 | 487 | for k, v in app_settings.items(): |
|
487 | 488 | config[k] = v |
|
488 | 489 | |
|
489 | 490 | |
|
490 | 491 | def get_rhodecode_realm(): |
|
491 | 492 | """ |
|
492 | 493 | Return the rhodecode realm from database. |
|
493 | 494 | """ |
|
494 | 495 | from rhodecode.model.settings import SettingsModel |
|
495 | 496 | realm = SettingsModel().get_setting_by_name('realm') |
|
496 | 497 | return safe_str(realm.app_settings_value) |
|
497 | 498 | |
|
498 | 499 | |
|
499 | 500 | def get_rhodecode_base_path(): |
|
500 | 501 | """ |
|
501 | 502 | Returns the base path. The base path is the filesystem path which points |
|
502 | 503 | to the repository store. |
|
503 | 504 | """ |
|
504 | 505 | from rhodecode.model.settings import SettingsModel |
|
505 | 506 | paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/') |
|
506 | 507 | return safe_str(paths_ui.ui_value) |
|
507 | 508 | |
|
508 | 509 | |
|
509 | 510 | def map_groups(path): |
|
510 | 511 | """ |
|
511 | 512 | Given a full path to a repository, create all nested groups that this |
|
512 | 513 | repo is inside. This function creates parent-child relationships between |
|
513 | 514 | groups and creates default perms for all new groups. |
|
514 | 515 | |
|
515 | 516 | :param paths: full path to repository |
|
516 | 517 | """ |
|
517 | 518 | from rhodecode.model.repo_group import RepoGroupModel |
|
518 | 519 | sa = meta.Session() |
|
519 | 520 | groups = path.split(Repository.NAME_SEP) |
|
520 | 521 | parent = None |
|
521 | 522 | group = None |
|
522 | 523 | |
|
523 | 524 | # last element is repo in nested groups structure |
|
524 | 525 | groups = groups[:-1] |
|
525 | 526 | rgm = RepoGroupModel(sa) |
|
526 | 527 | owner = User.get_first_super_admin() |
|
527 | 528 | for lvl, group_name in enumerate(groups): |
|
528 | 529 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
529 | 530 | group = RepoGroup.get_by_group_name(group_name) |
|
530 | 531 | desc = '%s group' % group_name |
|
531 | 532 | |
|
532 | 533 | # skip folders that are now removed repos |
|
533 | 534 | if REMOVED_REPO_PAT.match(group_name): |
|
534 | 535 | break |
|
535 | 536 | |
|
536 | 537 | if group is None: |
|
537 | 538 | log.debug('creating group level: %s group_name: %s', |
|
538 | 539 | lvl, group_name) |
|
539 | 540 | group = RepoGroup(group_name, parent) |
|
540 | 541 | group.group_description = desc |
|
541 | 542 | group.user = owner |
|
542 | 543 | sa.add(group) |
|
543 | 544 | perm_obj = rgm._create_default_perms(group) |
|
544 | 545 | sa.add(perm_obj) |
|
545 | 546 | sa.flush() |
|
546 | 547 | |
|
547 | 548 | parent = group |
|
548 | 549 | return group |
|
549 | 550 | |
|
550 | 551 | |
|
551 | 552 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
552 | 553 | """ |
|
553 | 554 | maps all repos given in initial_repo_list, non existing repositories |
|
554 | 555 | are created, if remove_obsolete is True it also checks for db entries |
|
555 | 556 | that are not in initial_repo_list and removes them. |
|
556 | 557 | |
|
557 | 558 | :param initial_repo_list: list of repositories found by scanning methods |
|
558 | 559 | :param remove_obsolete: check for obsolete entries in database |
|
559 | 560 | """ |
|
560 | 561 | from rhodecode.model.repo import RepoModel |
|
561 | 562 | from rhodecode.model.scm import ScmModel |
|
562 | 563 | from rhodecode.model.repo_group import RepoGroupModel |
|
563 | 564 | from rhodecode.model.settings import SettingsModel |
|
564 | 565 | |
|
565 | 566 | sa = meta.Session() |
|
566 | 567 | repo_model = RepoModel() |
|
567 | 568 | user = User.get_first_super_admin() |
|
568 | 569 | added = [] |
|
569 | 570 | |
|
570 | 571 | # creation defaults |
|
571 | 572 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
572 | 573 | enable_statistics = defs.get('repo_enable_statistics') |
|
573 | 574 | enable_locking = defs.get('repo_enable_locking') |
|
574 | 575 | enable_downloads = defs.get('repo_enable_downloads') |
|
575 | 576 | private = defs.get('repo_private') |
|
576 | 577 | |
|
577 | 578 | for name, repo in initial_repo_list.items(): |
|
578 | 579 | group = map_groups(name) |
|
579 | 580 | unicode_name = safe_unicode(name) |
|
580 | 581 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
581 | 582 | # found repo that is on filesystem not in RhodeCode database |
|
582 | 583 | if not db_repo: |
|
583 | 584 | log.info('repository %s not found, creating now', name) |
|
584 | 585 | added.append(name) |
|
585 | 586 | desc = (repo.description |
|
586 | 587 | if repo.description != 'unknown' |
|
587 | 588 | else '%s repository' % name) |
|
588 | 589 | |
|
589 | 590 | db_repo = repo_model._create_repo( |
|
590 | 591 | repo_name=name, |
|
591 | 592 | repo_type=repo.alias, |
|
592 | 593 | description=desc, |
|
593 | 594 | repo_group=getattr(group, 'group_id', None), |
|
594 | 595 | owner=user, |
|
595 | 596 | enable_locking=enable_locking, |
|
596 | 597 | enable_downloads=enable_downloads, |
|
597 | 598 | enable_statistics=enable_statistics, |
|
598 | 599 | private=private, |
|
599 | 600 | state=Repository.STATE_CREATED |
|
600 | 601 | ) |
|
601 | 602 | sa.commit() |
|
602 | 603 | # we added that repo just now, and make sure we updated server info |
|
603 | 604 | if db_repo.repo_type == 'git': |
|
604 | 605 | git_repo = db_repo.scm_instance() |
|
605 | 606 | # update repository server-info |
|
606 | 607 | log.debug('Running update server info') |
|
607 | 608 | git_repo._update_server_info() |
|
608 | 609 | |
|
609 | 610 | db_repo.update_commit_cache() |
|
610 | 611 | |
|
611 | 612 | config = db_repo._config |
|
612 | 613 | config.set('extensions', 'largefiles', '') |
|
613 | 614 | ScmModel().install_hooks( |
|
614 | 615 | db_repo.scm_instance(config=config), |
|
615 | 616 | repo_type=db_repo.repo_type) |
|
616 | 617 | |
|
617 | 618 | removed = [] |
|
618 | 619 | if remove_obsolete: |
|
619 | 620 | # remove from database those repositories that are not in the filesystem |
|
620 | 621 | for repo in sa.query(Repository).all(): |
|
621 | 622 | if repo.repo_name not in initial_repo_list.keys(): |
|
622 | 623 | log.debug("Removing non-existing repository found in db `%s`", |
|
623 | 624 | repo.repo_name) |
|
624 | 625 | try: |
|
625 | 626 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
626 | 627 | sa.commit() |
|
627 | 628 | removed.append(repo.repo_name) |
|
628 | 629 | except Exception: |
|
629 | 630 | # don't hold further removals on error |
|
630 | 631 | log.error(traceback.format_exc()) |
|
631 | 632 | sa.rollback() |
|
632 | 633 | |
|
633 | 634 | def splitter(full_repo_name): |
|
634 | 635 | _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1) |
|
635 | 636 | gr_name = None |
|
636 | 637 | if len(_parts) == 2: |
|
637 | 638 | gr_name = _parts[0] |
|
638 | 639 | return gr_name |
|
639 | 640 | |
|
640 | 641 | initial_repo_group_list = [splitter(x) for x in |
|
641 | 642 | initial_repo_list.keys() if splitter(x)] |
|
642 | 643 | |
|
643 | 644 | # remove from database those repository groups that are not in the |
|
644 | 645 | # filesystem due to parent child relationships we need to delete them |
|
645 | 646 | # in a specific order of most nested first |
|
646 | 647 | all_groups = [x.group_name for x in sa.query(RepoGroup).all()] |
|
647 | 648 | nested_sort = lambda gr: len(gr.split('/')) |
|
648 | 649 | for group_name in sorted(all_groups, key=nested_sort, reverse=True): |
|
649 | 650 | if group_name not in initial_repo_group_list: |
|
650 | 651 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
651 | 652 | if (repo_group.children.all() or |
|
652 | 653 | not RepoGroupModel().check_exist_filesystem( |
|
653 | 654 | group_name=group_name, exc_on_failure=False)): |
|
654 | 655 | continue |
|
655 | 656 | |
|
656 | 657 | log.info( |
|
657 | 658 | 'Removing non-existing repository group found in db `%s`', |
|
658 | 659 | group_name) |
|
659 | 660 | try: |
|
660 | 661 | RepoGroupModel(sa).delete(group_name, fs_remove=False) |
|
661 | 662 | sa.commit() |
|
662 | 663 | removed.append(group_name) |
|
663 | 664 | except Exception: |
|
664 | 665 | # don't hold further removals on error |
|
665 | 666 | log.exception( |
|
666 | 667 | 'Unable to remove repository group `%s`', |
|
667 | 668 | group_name) |
|
668 | 669 | sa.rollback() |
|
669 | 670 | raise |
|
670 | 671 | |
|
671 | 672 | return added, removed |
|
672 | 673 | |
|
673 | 674 | |
|
674 | 675 | def get_default_cache_settings(settings): |
|
675 | 676 | cache_settings = {} |
|
676 | 677 | for key in settings.keys(): |
|
677 | 678 | for prefix in ['beaker.cache.', 'cache.']: |
|
678 | 679 | if key.startswith(prefix): |
|
679 | 680 | name = key.split(prefix)[1].strip() |
|
680 | 681 | cache_settings[name] = settings[key].strip() |
|
681 | 682 | return cache_settings |
|
682 | 683 | |
|
683 | 684 | |
|
684 | 685 | # set cache regions for beaker so celery can utilise it |
|
685 | 686 | def add_cache(settings): |
|
686 | 687 | from rhodecode.lib import caches |
|
687 | 688 | cache_settings = {'regions': None} |
|
688 | 689 | # main cache settings used as default ... |
|
689 | 690 | cache_settings.update(get_default_cache_settings(settings)) |
|
690 | 691 | |
|
691 | 692 | if cache_settings['regions']: |
|
692 | 693 | for region in cache_settings['regions'].split(','): |
|
693 | 694 | region = region.strip() |
|
694 | 695 | region_settings = {} |
|
695 | 696 | for key, value in cache_settings.items(): |
|
696 | 697 | if key.startswith(region): |
|
697 | 698 | region_settings[key.split('.')[1]] = value |
|
698 | 699 | |
|
699 | 700 | caches.configure_cache_region( |
|
700 | 701 | region, region_settings, cache_settings) |
|
701 | 702 | |
|
702 | 703 | |
|
703 | 704 | def load_rcextensions(root_path): |
|
704 | 705 | import rhodecode |
|
705 | 706 | from rhodecode.config import conf |
|
706 | 707 | |
|
707 | 708 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
708 | 709 | if os.path.isfile(path): |
|
709 | 710 | rcext = create_module('rc', path) |
|
710 | 711 | EXT = rhodecode.EXTENSIONS = rcext |
|
711 | 712 | log.debug('Found rcextensions now loading %s...', rcext) |
|
712 | 713 | |
|
713 | 714 | # Additional mappings that are not present in the pygments lexers |
|
714 | 715 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
715 | 716 | |
|
716 | 717 | # auto check if the module is not missing any data, set to default if is |
|
717 | 718 | # this will help autoupdate new feature of rcext module |
|
718 | 719 | #from rhodecode.config import rcextensions |
|
719 | 720 | #for k in dir(rcextensions): |
|
720 | 721 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
721 | 722 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
722 | 723 | |
|
723 | 724 | |
|
724 | 725 | def get_custom_lexer(extension): |
|
725 | 726 | """ |
|
726 | 727 | returns a custom lexer if it is defined in rcextensions module, or None |
|
727 | 728 | if there's no custom lexer defined |
|
728 | 729 | """ |
|
729 | 730 | import rhodecode |
|
730 | 731 | from pygments import lexers |
|
731 | 732 | |
|
732 | 733 | # custom override made by RhodeCode |
|
733 | 734 | if extension in ['mako']: |
|
734 | 735 | return lexers.get_lexer_by_name('html+mako') |
|
735 | 736 | |
|
736 | 737 | # check if we didn't define this extension as other lexer |
|
737 | 738 | extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None) |
|
738 | 739 | if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: |
|
739 | 740 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] |
|
740 | 741 | return lexers.get_lexer_by_name(_lexer_name) |
|
741 | 742 | |
|
742 | 743 | |
|
743 | 744 | #============================================================================== |
|
744 | 745 | # TEST FUNCTIONS AND CREATORS |
|
745 | 746 | #============================================================================== |
|
746 | 747 | def create_test_index(repo_location, config): |
|
747 | 748 | """ |
|
748 | 749 | Makes default test index. |
|
749 | 750 | """ |
|
750 | 751 | import rc_testdata |
|
751 | 752 | |
|
752 | 753 | rc_testdata.extract_search_index( |
|
753 | 754 | 'vcs_search_index', os.path.dirname(config['search.location'])) |
|
754 | 755 | |
|
755 | 756 | |
|
756 | 757 | def create_test_directory(test_path): |
|
757 | 758 | """ |
|
758 | 759 | Create test directory if it doesn't exist. |
|
759 | 760 | """ |
|
760 | 761 | if not os.path.isdir(test_path): |
|
761 | 762 | log.debug('Creating testdir %s', test_path) |
|
762 | 763 | os.makedirs(test_path) |
|
763 | 764 | |
|
764 | 765 | |
|
765 | 766 | def create_test_database(test_path, config): |
|
766 | 767 | """ |
|
767 | 768 | Makes a fresh database. |
|
768 | 769 | """ |
|
769 | 770 | from rhodecode.lib.db_manage import DbManage |
|
770 | 771 | |
|
771 | 772 | # PART ONE create db |
|
772 | 773 | dbconf = config['sqlalchemy.db1.url'] |
|
773 | 774 | log.debug('making test db %s', dbconf) |
|
774 | 775 | |
|
775 | 776 | dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'], |
|
776 | 777 | tests=True, cli_args={'force_ask': True}) |
|
777 | 778 | dbmanage.create_tables(override=True) |
|
778 | 779 | dbmanage.set_db_version() |
|
779 | 780 | # for tests dynamically set new root paths based on generated content |
|
780 | 781 | dbmanage.create_settings(dbmanage.config_prompt(test_path)) |
|
781 | 782 | dbmanage.create_default_user() |
|
782 | 783 | dbmanage.create_test_admin_and_users() |
|
783 | 784 | dbmanage.create_permissions() |
|
784 | 785 | dbmanage.populate_default_permissions() |
|
785 | 786 | Session().commit() |
|
786 | 787 | |
|
787 | 788 | |
|
788 | 789 | def create_test_repositories(test_path, config): |
|
789 | 790 | """ |
|
790 | 791 | Creates test repositories in the temporary directory. Repositories are |
|
791 | 792 | extracted from archives within the rc_testdata package. |
|
792 | 793 | """ |
|
793 | 794 | import rc_testdata |
|
794 | 795 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO |
|
795 | 796 | |
|
796 | 797 | log.debug('making test vcs repositories') |
|
797 | 798 | |
|
798 | 799 | idx_path = config['search.location'] |
|
799 | 800 | data_path = config['cache_dir'] |
|
800 | 801 | |
|
801 | 802 | # clean index and data |
|
802 | 803 | if idx_path and os.path.exists(idx_path): |
|
803 | 804 | log.debug('remove %s', idx_path) |
|
804 | 805 | shutil.rmtree(idx_path) |
|
805 | 806 | |
|
806 | 807 | if data_path and os.path.exists(data_path): |
|
807 | 808 | log.debug('remove %s', data_path) |
|
808 | 809 | shutil.rmtree(data_path) |
|
809 | 810 | |
|
810 | 811 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) |
|
811 | 812 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
|
812 | 813 | |
|
813 | 814 | # Note: Subversion is in the process of being integrated with the system, |
|
814 | 815 | # until we have a properly packed version of the test svn repository, this |
|
815 | 816 | # tries to copy over the repo from a package "rc_testdata" |
|
816 | 817 | svn_repo_path = rc_testdata.get_svn_repo_archive() |
|
817 | 818 | with tarfile.open(svn_repo_path) as tar: |
|
818 | 819 | tar.extractall(jn(test_path, SVN_REPO)) |
|
819 | 820 | |
|
820 | 821 | |
|
821 | 822 | #============================================================================== |
|
822 | 823 | # PASTER COMMANDS |
|
823 | 824 | #============================================================================== |
|
824 | 825 | class BasePasterCommand(Command): |
|
825 | 826 | """ |
|
826 | 827 | Abstract Base Class for paster commands. |
|
827 | 828 | |
|
828 | 829 | The celery commands are somewhat aggressive about loading |
|
829 | 830 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
830 | 831 | environment variable to our loader, we have to bootstrap a bit and |
|
831 | 832 | make sure we've had a chance to load the pylons config off of the |
|
832 | 833 | command line, otherwise everything fails. |
|
833 | 834 | """ |
|
834 | 835 | min_args = 1 |
|
835 | 836 | min_args_error = "Please provide a paster config file as an argument." |
|
836 | 837 | takes_config_file = 1 |
|
837 | 838 | requires_config_file = True |
|
838 | 839 | |
|
839 | 840 | def notify_msg(self, msg, log=False): |
|
840 | 841 | """Make a notification to user, additionally if logger is passed |
|
841 | 842 | it logs this action using given logger |
|
842 | 843 | |
|
843 | 844 | :param msg: message that will be printed to user |
|
844 | 845 | :param log: logging instance, to use to additionally log this message |
|
845 | 846 | |
|
846 | 847 | """ |
|
847 | 848 | if log and isinstance(log, logging): |
|
848 | 849 | log(msg) |
|
849 | 850 | |
|
850 | 851 | def run(self, args): |
|
851 | 852 | """ |
|
852 | 853 | Overrides Command.run |
|
853 | 854 | |
|
854 | 855 | Checks for a config file argument and loads it. |
|
855 | 856 | """ |
|
856 | 857 | if len(args) < self.min_args: |
|
857 | 858 | raise BadCommand( |
|
858 | 859 | self.min_args_error % {'min_args': self.min_args, |
|
859 | 860 | 'actual_args': len(args)}) |
|
860 | 861 | |
|
861 | 862 | # Decrement because we're going to lob off the first argument. |
|
862 | 863 | # @@ This is hacky |
|
863 | 864 | self.min_args -= 1 |
|
864 | 865 | self.bootstrap_config(args[0]) |
|
865 | 866 | self.update_parser() |
|
866 | 867 | return super(BasePasterCommand, self).run(args[1:]) |
|
867 | 868 | |
|
868 | 869 | def update_parser(self): |
|
869 | 870 | """ |
|
870 | 871 | Abstract method. Allows for the class' parser to be updated |
|
871 | 872 | before the superclass' `run` method is called. Necessary to |
|
872 | 873 | allow options/arguments to be passed through to the underlying |
|
873 | 874 | celery command. |
|
874 | 875 | """ |
|
875 | 876 | raise NotImplementedError("Abstract Method.") |
|
876 | 877 | |
|
877 | 878 | def bootstrap_config(self, conf): |
|
878 | 879 | """ |
|
879 | 880 | Loads the pylons configuration. |
|
880 | 881 | """ |
|
881 | 882 | from pylons import config as pylonsconfig |
|
882 | 883 | |
|
883 | 884 | self.path_to_ini_file = os.path.realpath(conf) |
|
884 | 885 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) |
|
885 | 886 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
886 | 887 | |
|
887 | 888 | def _init_session(self): |
|
888 | 889 | """ |
|
889 | 890 | Inits SqlAlchemy Session |
|
890 | 891 | """ |
|
891 | 892 | logging.config.fileConfig(self.path_to_ini_file) |
|
892 | 893 | from pylons import config |
|
893 | 894 | from rhodecode.config.utils import initialize_database |
|
894 | 895 | |
|
895 | 896 | # get to remove repos !! |
|
896 | 897 | add_cache(config) |
|
897 | 898 | initialize_database(config) |
|
898 | 899 | |
|
899 | 900 | |
|
900 | 901 | @decorator.decorator |
|
901 | 902 | def jsonify(func, *args, **kwargs): |
|
902 | 903 | """Action decorator that formats output for JSON |
|
903 | 904 | |
|
904 | 905 | Given a function that will return content, this decorator will turn |
|
905 | 906 | the result into JSON, with a content-type of 'application/json' and |
|
906 | 907 | output it. |
|
907 | 908 | |
|
908 | 909 | """ |
|
909 | 910 | from pylons.decorators.util import get_pylons |
|
910 | 911 | from rhodecode.lib.ext_json import json |
|
911 | 912 | pylons = get_pylons(args) |
|
912 | 913 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
913 | 914 | data = func(*args, **kwargs) |
|
914 | 915 | if isinstance(data, (list, tuple)): |
|
915 | 916 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
916 | 917 | "cross-site data leak attacks, see " \ |
|
917 | 918 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
918 | 919 | warnings.warn(msg, Warning, 2) |
|
919 | 920 | log.warning(msg) |
|
920 | 921 | log.debug("Returning JSON wrapped action output") |
|
921 | 922 | return json.dumps(data, encoding='utf-8') |
|
922 | 923 | |
|
923 | 924 | |
|
924 | 925 | class PartialRenderer(object): |
|
925 | 926 | """ |
|
926 | 927 | Partial renderer used to render chunks of html used in datagrids |
|
927 | 928 | use like:: |
|
928 | 929 | |
|
929 | 930 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
930 | 931 | _render('quick_menu', args, kwargs) |
|
931 | 932 | PartialRenderer.h, |
|
932 | 933 | c, |
|
933 | 934 | _, |
|
934 | 935 | ungettext |
|
935 | 936 | are the template stuff initialized inside and can be re-used later |
|
936 | 937 | |
|
937 | 938 | :param tmpl_name: template path relate to /templates/ dir |
|
938 | 939 | """ |
|
939 | 940 | |
|
940 | 941 | def __init__(self, tmpl_name): |
|
941 | 942 | import rhodecode |
|
942 | 943 | from pylons import request, tmpl_context as c |
|
943 | 944 | from pylons.i18n.translation import _, ungettext |
|
944 | 945 | from rhodecode.lib import helpers as h |
|
945 | 946 | |
|
946 | 947 | self.tmpl_name = tmpl_name |
|
947 | 948 | self.rhodecode = rhodecode |
|
948 | 949 | self.c = c |
|
949 | 950 | self._ = _ |
|
950 | 951 | self.ungettext = ungettext |
|
951 | 952 | self.h = h |
|
952 | 953 | self.request = request |
|
953 | 954 | |
|
954 | 955 | def _mako_lookup(self): |
|
955 | 956 | _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup |
|
956 | 957 | return _tmpl_lookup.get_template(self.tmpl_name) |
|
957 | 958 | |
|
958 | 959 | def _update_kwargs_for_render(self, kwargs): |
|
959 | 960 | """ |
|
960 | 961 | Inject params required for Mako rendering |
|
961 | 962 | """ |
|
962 | 963 | _kwargs = { |
|
963 | 964 | '_': self._, |
|
964 | 965 | 'h': self.h, |
|
965 | 966 | 'c': self.c, |
|
966 | 967 | 'request': self.request, |
|
967 | 968 | 'ungettext': self.ungettext, |
|
968 | 969 | } |
|
969 | 970 | _kwargs.update(kwargs) |
|
970 | 971 | return _kwargs |
|
971 | 972 | |
|
972 | 973 | def _render_with_exc(self, render_func, args, kwargs): |
|
973 | 974 | try: |
|
974 | 975 | return render_func.render(*args, **kwargs) |
|
975 | 976 | except: |
|
976 | 977 | log.error(exceptions.text_error_template().render()) |
|
977 | 978 | raise |
|
978 | 979 | |
|
979 | 980 | def _get_template(self, template_obj, def_name): |
|
980 | 981 | if def_name: |
|
981 | 982 | tmpl = template_obj.get_def(def_name) |
|
982 | 983 | else: |
|
983 | 984 | tmpl = template_obj |
|
984 | 985 | return tmpl |
|
985 | 986 | |
|
986 | 987 | def render(self, def_name, *args, **kwargs): |
|
987 | 988 | lookup_obj = self._mako_lookup() |
|
988 | 989 | tmpl = self._get_template(lookup_obj, def_name=def_name) |
|
989 | 990 | kwargs = self._update_kwargs_for_render(kwargs) |
|
990 | 991 | return self._render_with_exc(tmpl, args, kwargs) |
|
991 | 992 | |
|
992 | 993 | def __call__(self, tmpl, *args, **kwargs): |
|
993 | 994 | return self.render(tmpl, *args, **kwargs) |
|
994 | 995 | |
|
995 | 996 | |
|
996 | 997 | def password_changed(auth_user, session): |
|
997 | 998 | # Never report password change in case of default user or anonymous user. |
|
998 | 999 | if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None: |
|
999 | 1000 | return False |
|
1000 | 1001 | |
|
1001 | 1002 | password_hash = md5(auth_user.password) if auth_user.password else None |
|
1002 | 1003 | rhodecode_user = session.get('rhodecode_user', {}) |
|
1003 | 1004 | session_password_hash = rhodecode_user.get('password', '') |
|
1004 | 1005 | return password_hash != session_password_hash |
|
1005 | 1006 | |
|
1006 | 1007 | |
|
1007 | 1008 | def read_opensource_licenses(): |
|
1008 | 1009 | global _license_cache |
|
1009 | 1010 | |
|
1010 | 1011 | if not _license_cache: |
|
1011 | 1012 | licenses = pkg_resources.resource_string( |
|
1012 | 1013 | 'rhodecode', 'config/licenses.json') |
|
1013 | 1014 | _license_cache = json.loads(licenses) |
|
1014 | 1015 | |
|
1015 | 1016 | return _license_cache |
|
1016 | 1017 | |
|
1017 | 1018 | |
|
1018 | 1019 | def get_registry(request): |
|
1019 | 1020 | """ |
|
1020 | 1021 | Utility to get the pyramid registry from a request. During migration to |
|
1021 | 1022 | pyramid we sometimes want to use the pyramid registry from pylons context. |
|
1022 | 1023 | Therefore this utility returns `request.registry` for pyramid requests and |
|
1023 | 1024 | uses `get_current_registry()` for pylons requests. |
|
1024 | 1025 | """ |
|
1025 | 1026 | try: |
|
1026 | 1027 | return request.registry |
|
1027 | 1028 | except AttributeError: |
|
1028 | 1029 | return get_current_registry() |
|
1029 | 1030 | |
|
1030 | 1031 | |
|
1031 | 1032 | def generate_platform_uuid(): |
|
1032 | 1033 | """ |
|
1033 | 1034 | Generates platform UUID based on it's name |
|
1034 | 1035 | """ |
|
1035 | 1036 | import platform |
|
1036 | 1037 | |
|
1037 | 1038 | try: |
|
1038 | 1039 | uuid_list = [platform.platform()] |
|
1039 | 1040 | return hashlib.sha256(':'.join(uuid_list)).hexdigest() |
|
1040 | 1041 | except Exception as e: |
|
1041 | 1042 | log.error('Failed to generate host uuid: %s' % e) |
|
1042 | 1043 | return 'UNDEFINED' |
@@ -1,810 +1,811 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import hashlib |
|
23 | 23 | import logging |
|
24 | 24 | from collections import namedtuple |
|
25 | 25 | from functools import wraps |
|
26 | 26 | |
|
27 | 27 | from rhodecode.lib import caches |
|
28 | 28 | from rhodecode.lib.utils2 import ( |
|
29 | 29 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) |
|
30 | 30 | from rhodecode.lib.vcs.backends import base |
|
31 | 31 | from rhodecode.model import BaseModel |
|
32 | 32 | from rhodecode.model.db import ( |
|
33 | 33 | RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting) |
|
34 | 34 | from rhodecode.model.meta import Session |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | UiSetting = namedtuple( |
|
41 | 41 | 'UiSetting', ['section', 'key', 'value', 'active']) |
|
42 | 42 | |
|
43 | 43 | SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google'] |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class SettingNotFound(Exception): |
|
47 | 47 | def __init__(self): |
|
48 | 48 | super(SettingNotFound, self).__init__('Setting is not found') |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | class SettingsModel(BaseModel): |
|
52 | 52 | BUILTIN_HOOKS = ( |
|
53 | 53 | RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH, |
|
54 | 54 | RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH, |
|
55 |
RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL |
|
|
55 | RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL, | |
|
56 | RhodeCodeUi.HOOK_PUSH_KEY,) | |
|
56 | 57 | HOOKS_SECTION = 'hooks' |
|
57 | 58 | |
|
58 | 59 | def __init__(self, sa=None, repo=None): |
|
59 | 60 | self.repo = repo |
|
60 | 61 | self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi |
|
61 | 62 | self.SettingsDbModel = ( |
|
62 | 63 | RepoRhodeCodeSetting if repo else RhodeCodeSetting) |
|
63 | 64 | super(SettingsModel, self).__init__(sa) |
|
64 | 65 | |
|
65 | 66 | def get_ui_by_key(self, key): |
|
66 | 67 | q = self.UiDbModel.query() |
|
67 | 68 | q = q.filter(self.UiDbModel.ui_key == key) |
|
68 | 69 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
69 | 70 | return q.scalar() |
|
70 | 71 | |
|
71 | 72 | def get_ui_by_section(self, section): |
|
72 | 73 | q = self.UiDbModel.query() |
|
73 | 74 | q = q.filter(self.UiDbModel.ui_section == section) |
|
74 | 75 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
75 | 76 | return q.all() |
|
76 | 77 | |
|
77 | 78 | def get_ui_by_section_and_key(self, section, key): |
|
78 | 79 | q = self.UiDbModel.query() |
|
79 | 80 | q = q.filter(self.UiDbModel.ui_section == section) |
|
80 | 81 | q = q.filter(self.UiDbModel.ui_key == key) |
|
81 | 82 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
82 | 83 | return q.scalar() |
|
83 | 84 | |
|
84 | 85 | def get_ui(self, section=None, key=None): |
|
85 | 86 | q = self.UiDbModel.query() |
|
86 | 87 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
87 | 88 | |
|
88 | 89 | if section: |
|
89 | 90 | q = q.filter(self.UiDbModel.ui_section == section) |
|
90 | 91 | if key: |
|
91 | 92 | q = q.filter(self.UiDbModel.ui_key == key) |
|
92 | 93 | |
|
93 | 94 | # TODO: mikhail: add caching |
|
94 | 95 | result = [ |
|
95 | 96 | UiSetting( |
|
96 | 97 | section=safe_str(r.ui_section), key=safe_str(r.ui_key), |
|
97 | 98 | value=safe_str(r.ui_value), active=r.ui_active |
|
98 | 99 | ) |
|
99 | 100 | for r in q.all() |
|
100 | 101 | ] |
|
101 | 102 | return result |
|
102 | 103 | |
|
103 | 104 | def get_builtin_hooks(self): |
|
104 | 105 | q = self.UiDbModel.query() |
|
105 | 106 | q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) |
|
106 | 107 | return self._get_hooks(q) |
|
107 | 108 | |
|
108 | 109 | def get_custom_hooks(self): |
|
109 | 110 | q = self.UiDbModel.query() |
|
110 | 111 | q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) |
|
111 | 112 | return self._get_hooks(q) |
|
112 | 113 | |
|
113 | 114 | def create_ui_section_value(self, section, val, key=None, active=True): |
|
114 | 115 | new_ui = self.UiDbModel() |
|
115 | 116 | new_ui.ui_section = section |
|
116 | 117 | new_ui.ui_value = val |
|
117 | 118 | new_ui.ui_active = active |
|
118 | 119 | |
|
119 | 120 | if self.repo: |
|
120 | 121 | repo = self._get_repo(self.repo) |
|
121 | 122 | repository_id = repo.repo_id |
|
122 | 123 | new_ui.repository_id = repository_id |
|
123 | 124 | |
|
124 | 125 | if not key: |
|
125 | 126 | # keys are unique so they need appended info |
|
126 | 127 | if self.repo: |
|
127 | 128 | key = hashlib.sha1( |
|
128 | 129 | '{}{}{}'.format(section, val, repository_id)).hexdigest() |
|
129 | 130 | else: |
|
130 | 131 | key = hashlib.sha1('{}{}'.format(section, val)).hexdigest() |
|
131 | 132 | |
|
132 | 133 | new_ui.ui_key = key |
|
133 | 134 | |
|
134 | 135 | Session().add(new_ui) |
|
135 | 136 | return new_ui |
|
136 | 137 | |
|
137 | 138 | def create_or_update_hook(self, key, value): |
|
138 | 139 | ui = ( |
|
139 | 140 | self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or |
|
140 | 141 | self.UiDbModel()) |
|
141 | 142 | ui.ui_section = self.HOOKS_SECTION |
|
142 | 143 | ui.ui_active = True |
|
143 | 144 | ui.ui_key = key |
|
144 | 145 | ui.ui_value = value |
|
145 | 146 | |
|
146 | 147 | if self.repo: |
|
147 | 148 | repo = self._get_repo(self.repo) |
|
148 | 149 | repository_id = repo.repo_id |
|
149 | 150 | ui.repository_id = repository_id |
|
150 | 151 | |
|
151 | 152 | Session().add(ui) |
|
152 | 153 | return ui |
|
153 | 154 | |
|
154 | 155 | def delete_ui(self, id_): |
|
155 | 156 | ui = self.UiDbModel.get(id_) |
|
156 | 157 | if not ui: |
|
157 | 158 | raise SettingNotFound() |
|
158 | 159 | Session().delete(ui) |
|
159 | 160 | |
|
160 | 161 | def get_setting_by_name(self, name): |
|
161 | 162 | q = self._get_settings_query() |
|
162 | 163 | q = q.filter(self.SettingsDbModel.app_settings_name == name) |
|
163 | 164 | return q.scalar() |
|
164 | 165 | |
|
165 | 166 | def create_or_update_setting( |
|
166 | 167 | self, name, val=Optional(''), type_=Optional('unicode')): |
|
167 | 168 | """ |
|
168 | 169 | Creates or updates RhodeCode setting. If updates is triggered it will |
|
169 | 170 | only update parameters that are explicityl set Optional instance will |
|
170 | 171 | be skipped |
|
171 | 172 | |
|
172 | 173 | :param name: |
|
173 | 174 | :param val: |
|
174 | 175 | :param type_: |
|
175 | 176 | :return: |
|
176 | 177 | """ |
|
177 | 178 | |
|
178 | 179 | res = self.get_setting_by_name(name) |
|
179 | 180 | repo = self._get_repo(self.repo) if self.repo else None |
|
180 | 181 | |
|
181 | 182 | if not res: |
|
182 | 183 | val = Optional.extract(val) |
|
183 | 184 | type_ = Optional.extract(type_) |
|
184 | 185 | |
|
185 | 186 | args = ( |
|
186 | 187 | (repo.repo_id, name, val, type_) |
|
187 | 188 | if repo else (name, val, type_)) |
|
188 | 189 | res = self.SettingsDbModel(*args) |
|
189 | 190 | |
|
190 | 191 | else: |
|
191 | 192 | if self.repo: |
|
192 | 193 | res.repository_id = repo.repo_id |
|
193 | 194 | |
|
194 | 195 | res.app_settings_name = name |
|
195 | 196 | if not isinstance(type_, Optional): |
|
196 | 197 | # update if set |
|
197 | 198 | res.app_settings_type = type_ |
|
198 | 199 | if not isinstance(val, Optional): |
|
199 | 200 | # update if set |
|
200 | 201 | res.app_settings_value = val |
|
201 | 202 | |
|
202 | 203 | Session().add(res) |
|
203 | 204 | return res |
|
204 | 205 | |
|
205 | 206 | def invalidate_settings_cache(self): |
|
206 | 207 | namespace = 'rhodecode_settings' |
|
207 | 208 | cache_manager = caches.get_cache_manager('sql_cache_short', namespace) |
|
208 | 209 | caches.clear_cache_manager(cache_manager) |
|
209 | 210 | |
|
210 | 211 | def get_all_settings(self, cache=False): |
|
211 | 212 | |
|
212 | 213 | def _compute(): |
|
213 | 214 | q = self._get_settings_query() |
|
214 | 215 | if not q: |
|
215 | 216 | raise Exception('Could not get application settings !') |
|
216 | 217 | |
|
217 | 218 | settings = { |
|
218 | 219 | 'rhodecode_' + result.app_settings_name: result.app_settings_value |
|
219 | 220 | for result in q |
|
220 | 221 | } |
|
221 | 222 | return settings |
|
222 | 223 | |
|
223 | 224 | if cache: |
|
224 | 225 | log.debug('Fetching app settings using cache') |
|
225 | 226 | repo = self._get_repo(self.repo) if self.repo else None |
|
226 | 227 | namespace = 'rhodecode_settings' |
|
227 | 228 | cache_manager = caches.get_cache_manager( |
|
228 | 229 | 'sql_cache_short', namespace) |
|
229 | 230 | _cache_key = ( |
|
230 | 231 | "get_repo_{}_settings".format(repo.repo_id) |
|
231 | 232 | if repo else "get_app_settings") |
|
232 | 233 | |
|
233 | 234 | return cache_manager.get(_cache_key, createfunc=_compute) |
|
234 | 235 | |
|
235 | 236 | else: |
|
236 | 237 | return _compute() |
|
237 | 238 | |
|
238 | 239 | def get_auth_settings(self): |
|
239 | 240 | q = self._get_settings_query() |
|
240 | 241 | q = q.filter( |
|
241 | 242 | self.SettingsDbModel.app_settings_name.startswith('auth_')) |
|
242 | 243 | rows = q.all() |
|
243 | 244 | auth_settings = { |
|
244 | 245 | row.app_settings_name: row.app_settings_value for row in rows} |
|
245 | 246 | return auth_settings |
|
246 | 247 | |
|
247 | 248 | def get_auth_plugins(self): |
|
248 | 249 | auth_plugins = self.get_setting_by_name("auth_plugins") |
|
249 | 250 | return auth_plugins.app_settings_value |
|
250 | 251 | |
|
251 | 252 | def get_default_repo_settings(self, strip_prefix=False): |
|
252 | 253 | q = self._get_settings_query() |
|
253 | 254 | q = q.filter( |
|
254 | 255 | self.SettingsDbModel.app_settings_name.startswith('default_')) |
|
255 | 256 | rows = q.all() |
|
256 | 257 | |
|
257 | 258 | result = {} |
|
258 | 259 | for row in rows: |
|
259 | 260 | key = row.app_settings_name |
|
260 | 261 | if strip_prefix: |
|
261 | 262 | key = remove_prefix(key, prefix='default_') |
|
262 | 263 | result.update({key: row.app_settings_value}) |
|
263 | 264 | return result |
|
264 | 265 | |
|
265 | 266 | def get_repo(self): |
|
266 | 267 | repo = self._get_repo(self.repo) |
|
267 | 268 | if not repo: |
|
268 | 269 | raise Exception( |
|
269 | 270 | 'Repository `{}` cannot be found inside the database'.format( |
|
270 | 271 | self.repo)) |
|
271 | 272 | return repo |
|
272 | 273 | |
|
273 | 274 | def _filter_by_repo(self, model, query): |
|
274 | 275 | if self.repo: |
|
275 | 276 | repo = self.get_repo() |
|
276 | 277 | query = query.filter(model.repository_id == repo.repo_id) |
|
277 | 278 | return query |
|
278 | 279 | |
|
279 | 280 | def _get_hooks(self, query): |
|
280 | 281 | query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION) |
|
281 | 282 | query = self._filter_by_repo(RepoRhodeCodeUi, query) |
|
282 | 283 | return query.all() |
|
283 | 284 | |
|
284 | 285 | def _get_settings_query(self): |
|
285 | 286 | q = self.SettingsDbModel.query() |
|
286 | 287 | return self._filter_by_repo(RepoRhodeCodeSetting, q) |
|
287 | 288 | |
|
288 | 289 | def list_enabled_social_plugins(self, settings): |
|
289 | 290 | enabled = [] |
|
290 | 291 | for plug in SOCIAL_PLUGINS_LIST: |
|
291 | 292 | if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug) |
|
292 | 293 | )): |
|
293 | 294 | enabled.append(plug) |
|
294 | 295 | return enabled |
|
295 | 296 | |
|
296 | 297 | |
|
297 | 298 | def assert_repo_settings(func): |
|
298 | 299 | @wraps(func) |
|
299 | 300 | def _wrapper(self, *args, **kwargs): |
|
300 | 301 | if not self.repo_settings: |
|
301 | 302 | raise Exception('Repository is not specified') |
|
302 | 303 | return func(self, *args, **kwargs) |
|
303 | 304 | return _wrapper |
|
304 | 305 | |
|
305 | 306 | |
|
306 | 307 | class IssueTrackerSettingsModel(object): |
|
307 | 308 | INHERIT_SETTINGS = 'inherit_issue_tracker_settings' |
|
308 | 309 | SETTINGS_PREFIX = 'issuetracker_' |
|
309 | 310 | |
|
310 | 311 | def __init__(self, sa=None, repo=None): |
|
311 | 312 | self.global_settings = SettingsModel(sa=sa) |
|
312 | 313 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None |
|
313 | 314 | |
|
314 | 315 | @property |
|
315 | 316 | def inherit_global_settings(self): |
|
316 | 317 | if not self.repo_settings: |
|
317 | 318 | return True |
|
318 | 319 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) |
|
319 | 320 | return setting.app_settings_value if setting else True |
|
320 | 321 | |
|
321 | 322 | @inherit_global_settings.setter |
|
322 | 323 | def inherit_global_settings(self, value): |
|
323 | 324 | if self.repo_settings: |
|
324 | 325 | settings = self.repo_settings.create_or_update_setting( |
|
325 | 326 | self.INHERIT_SETTINGS, value, type_='bool') |
|
326 | 327 | Session().add(settings) |
|
327 | 328 | |
|
328 | 329 | def _get_keyname(self, key, uid, prefix=''): |
|
329 | 330 | return '{0}{1}{2}_{3}'.format( |
|
330 | 331 | prefix, self.SETTINGS_PREFIX, key, uid) |
|
331 | 332 | |
|
332 | 333 | def _make_dict_for_settings(self, qs): |
|
333 | 334 | prefix_match = self._get_keyname('pat', '', 'rhodecode_') |
|
334 | 335 | |
|
335 | 336 | issuetracker_entries = {} |
|
336 | 337 | # create keys |
|
337 | 338 | for k, v in qs.items(): |
|
338 | 339 | if k.startswith(prefix_match): |
|
339 | 340 | uid = k[len(prefix_match):] |
|
340 | 341 | issuetracker_entries[uid] = None |
|
341 | 342 | |
|
342 | 343 | # populate |
|
343 | 344 | for uid in issuetracker_entries: |
|
344 | 345 | issuetracker_entries[uid] = AttributeDict({ |
|
345 | 346 | 'pat': qs.get(self._get_keyname('pat', uid, 'rhodecode_')), |
|
346 | 347 | 'url': qs.get(self._get_keyname('url', uid, 'rhodecode_')), |
|
347 | 348 | 'pref': qs.get(self._get_keyname('pref', uid, 'rhodecode_')), |
|
348 | 349 | 'desc': qs.get(self._get_keyname('desc', uid, 'rhodecode_')), |
|
349 | 350 | }) |
|
350 | 351 | return issuetracker_entries |
|
351 | 352 | |
|
352 | 353 | def get_global_settings(self, cache=False): |
|
353 | 354 | """ |
|
354 | 355 | Returns list of global issue tracker settings |
|
355 | 356 | """ |
|
356 | 357 | defaults = self.global_settings.get_all_settings(cache=cache) |
|
357 | 358 | settings = self._make_dict_for_settings(defaults) |
|
358 | 359 | return settings |
|
359 | 360 | |
|
360 | 361 | def get_repo_settings(self, cache=False): |
|
361 | 362 | """ |
|
362 | 363 | Returns list of issue tracker settings per repository |
|
363 | 364 | """ |
|
364 | 365 | if not self.repo_settings: |
|
365 | 366 | raise Exception('Repository is not specified') |
|
366 | 367 | all_settings = self.repo_settings.get_all_settings(cache=cache) |
|
367 | 368 | settings = self._make_dict_for_settings(all_settings) |
|
368 | 369 | return settings |
|
369 | 370 | |
|
370 | 371 | def get_settings(self, cache=False): |
|
371 | 372 | if self.inherit_global_settings: |
|
372 | 373 | return self.get_global_settings(cache=cache) |
|
373 | 374 | else: |
|
374 | 375 | return self.get_repo_settings(cache=cache) |
|
375 | 376 | |
|
376 | 377 | def delete_entries(self, uid): |
|
377 | 378 | if self.repo_settings: |
|
378 | 379 | all_patterns = self.get_repo_settings() |
|
379 | 380 | settings_model = self.repo_settings |
|
380 | 381 | else: |
|
381 | 382 | all_patterns = self.get_global_settings() |
|
382 | 383 | settings_model = self.global_settings |
|
383 | 384 | entries = all_patterns.get(uid) |
|
384 | 385 | |
|
385 | 386 | for del_key in entries: |
|
386 | 387 | setting_name = self._get_keyname(del_key, uid) |
|
387 | 388 | entry = settings_model.get_setting_by_name(setting_name) |
|
388 | 389 | if entry: |
|
389 | 390 | Session().delete(entry) |
|
390 | 391 | |
|
391 | 392 | Session().commit() |
|
392 | 393 | |
|
393 | 394 | def create_or_update_setting( |
|
394 | 395 | self, name, val=Optional(''), type_=Optional('unicode')): |
|
395 | 396 | if self.repo_settings: |
|
396 | 397 | setting = self.repo_settings.create_or_update_setting( |
|
397 | 398 | name, val, type_) |
|
398 | 399 | else: |
|
399 | 400 | setting = self.global_settings.create_or_update_setting( |
|
400 | 401 | name, val, type_) |
|
401 | 402 | return setting |
|
402 | 403 | |
|
403 | 404 | |
|
404 | 405 | class VcsSettingsModel(object): |
|
405 | 406 | |
|
406 | 407 | INHERIT_SETTINGS = 'inherit_vcs_settings' |
|
407 | 408 | GENERAL_SETTINGS = ( |
|
408 | 409 | 'use_outdated_comments', |
|
409 | 410 | 'pr_merge_enabled', |
|
410 | 411 | 'hg_use_rebase_for_merging') |
|
411 | 412 | |
|
412 | 413 | HOOKS_SETTINGS = ( |
|
413 | 414 | ('hooks', 'changegroup.repo_size'), |
|
414 | 415 | ('hooks', 'changegroup.push_logger'), |
|
415 | 416 | ('hooks', 'outgoing.pull_logger'),) |
|
416 | 417 | HG_SETTINGS = ( |
|
417 | 418 | ('extensions', 'largefiles'), |
|
418 | 419 | ('phases', 'publish'), |
|
419 | 420 | ('extensions', 'evolve'),) |
|
420 | 421 | GIT_SETTINGS = ( |
|
421 | 422 | ('vcs_git_lfs', 'enabled'),) |
|
422 | 423 | GLOBAL_HG_SETTINGS = ( |
|
423 | 424 | ('extensions', 'largefiles'), |
|
424 | 425 | ('largefiles', 'usercache'), |
|
425 | 426 | ('phases', 'publish'), |
|
426 | 427 | ('extensions', 'hgsubversion'), |
|
427 | 428 | ('extensions', 'evolve'),) |
|
428 | 429 | GLOBAL_GIT_SETTINGS = ( |
|
429 | 430 | ('vcs_git_lfs', 'enabled'), |
|
430 | 431 | ('vcs_git_lfs', 'store_location')) |
|
431 | 432 | GLOBAL_SVN_SETTINGS = ( |
|
432 | 433 | ('vcs_svn_proxy', 'http_requests_enabled'), |
|
433 | 434 | ('vcs_svn_proxy', 'http_server_url')) |
|
434 | 435 | |
|
435 | 436 | SVN_BRANCH_SECTION = 'vcs_svn_branch' |
|
436 | 437 | SVN_TAG_SECTION = 'vcs_svn_tag' |
|
437 | 438 | SSL_SETTING = ('web', 'push_ssl') |
|
438 | 439 | PATH_SETTING = ('paths', '/') |
|
439 | 440 | |
|
440 | 441 | def __init__(self, sa=None, repo=None): |
|
441 | 442 | self.global_settings = SettingsModel(sa=sa) |
|
442 | 443 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None |
|
443 | 444 | self._ui_settings = ( |
|
444 | 445 | self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS) |
|
445 | 446 | self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION) |
|
446 | 447 | |
|
447 | 448 | @property |
|
448 | 449 | @assert_repo_settings |
|
449 | 450 | def inherit_global_settings(self): |
|
450 | 451 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) |
|
451 | 452 | return setting.app_settings_value if setting else True |
|
452 | 453 | |
|
453 | 454 | @inherit_global_settings.setter |
|
454 | 455 | @assert_repo_settings |
|
455 | 456 | def inherit_global_settings(self, value): |
|
456 | 457 | self.repo_settings.create_or_update_setting( |
|
457 | 458 | self.INHERIT_SETTINGS, value, type_='bool') |
|
458 | 459 | |
|
459 | 460 | def get_global_svn_branch_patterns(self): |
|
460 | 461 | return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) |
|
461 | 462 | |
|
462 | 463 | @assert_repo_settings |
|
463 | 464 | def get_repo_svn_branch_patterns(self): |
|
464 | 465 | return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) |
|
465 | 466 | |
|
466 | 467 | def get_global_svn_tag_patterns(self): |
|
467 | 468 | return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION) |
|
468 | 469 | |
|
469 | 470 | @assert_repo_settings |
|
470 | 471 | def get_repo_svn_tag_patterns(self): |
|
471 | 472 | return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION) |
|
472 | 473 | |
|
473 | 474 | def get_global_settings(self): |
|
474 | 475 | return self._collect_all_settings(global_=True) |
|
475 | 476 | |
|
476 | 477 | @assert_repo_settings |
|
477 | 478 | def get_repo_settings(self): |
|
478 | 479 | return self._collect_all_settings(global_=False) |
|
479 | 480 | |
|
480 | 481 | @assert_repo_settings |
|
481 | 482 | def create_or_update_repo_settings( |
|
482 | 483 | self, data, inherit_global_settings=False): |
|
483 | 484 | from rhodecode.model.scm import ScmModel |
|
484 | 485 | |
|
485 | 486 | self.inherit_global_settings = inherit_global_settings |
|
486 | 487 | |
|
487 | 488 | repo = self.repo_settings.get_repo() |
|
488 | 489 | if not inherit_global_settings: |
|
489 | 490 | if repo.repo_type == 'svn': |
|
490 | 491 | self.create_repo_svn_settings(data) |
|
491 | 492 | else: |
|
492 | 493 | self.create_or_update_repo_hook_settings(data) |
|
493 | 494 | self.create_or_update_repo_pr_settings(data) |
|
494 | 495 | |
|
495 | 496 | if repo.repo_type == 'hg': |
|
496 | 497 | self.create_or_update_repo_hg_settings(data) |
|
497 | 498 | |
|
498 | 499 | if repo.repo_type == 'git': |
|
499 | 500 | self.create_or_update_repo_git_settings(data) |
|
500 | 501 | |
|
501 | 502 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) |
|
502 | 503 | |
|
503 | 504 | @assert_repo_settings |
|
504 | 505 | def create_or_update_repo_hook_settings(self, data): |
|
505 | 506 | for section, key in self.HOOKS_SETTINGS: |
|
506 | 507 | data_key = self._get_form_ui_key(section, key) |
|
507 | 508 | if data_key not in data: |
|
508 | 509 | raise ValueError( |
|
509 | 510 | 'The given data does not contain {} key'.format(data_key)) |
|
510 | 511 | |
|
511 | 512 | active = data.get(data_key) |
|
512 | 513 | repo_setting = self.repo_settings.get_ui_by_section_and_key( |
|
513 | 514 | section, key) |
|
514 | 515 | if not repo_setting: |
|
515 | 516 | global_setting = self.global_settings.\ |
|
516 | 517 | get_ui_by_section_and_key(section, key) |
|
517 | 518 | self.repo_settings.create_ui_section_value( |
|
518 | 519 | section, global_setting.ui_value, key=key, active=active) |
|
519 | 520 | else: |
|
520 | 521 | repo_setting.ui_active = active |
|
521 | 522 | Session().add(repo_setting) |
|
522 | 523 | |
|
523 | 524 | def update_global_hook_settings(self, data): |
|
524 | 525 | for section, key in self.HOOKS_SETTINGS: |
|
525 | 526 | data_key = self._get_form_ui_key(section, key) |
|
526 | 527 | if data_key not in data: |
|
527 | 528 | raise ValueError( |
|
528 | 529 | 'The given data does not contain {} key'.format(data_key)) |
|
529 | 530 | active = data.get(data_key) |
|
530 | 531 | repo_setting = self.global_settings.get_ui_by_section_and_key( |
|
531 | 532 | section, key) |
|
532 | 533 | repo_setting.ui_active = active |
|
533 | 534 | Session().add(repo_setting) |
|
534 | 535 | |
|
535 | 536 | @assert_repo_settings |
|
536 | 537 | def create_or_update_repo_pr_settings(self, data): |
|
537 | 538 | return self._create_or_update_general_settings( |
|
538 | 539 | self.repo_settings, data) |
|
539 | 540 | |
|
540 | 541 | def create_or_update_global_pr_settings(self, data): |
|
541 | 542 | return self._create_or_update_general_settings( |
|
542 | 543 | self.global_settings, data) |
|
543 | 544 | |
|
544 | 545 | @assert_repo_settings |
|
545 | 546 | def create_repo_svn_settings(self, data): |
|
546 | 547 | return self._create_svn_settings(self.repo_settings, data) |
|
547 | 548 | |
|
548 | 549 | @assert_repo_settings |
|
549 | 550 | def create_or_update_repo_hg_settings(self, data): |
|
550 | 551 | largefiles, phases, evolve = \ |
|
551 | 552 | self.HG_SETTINGS |
|
552 | 553 | largefiles_key, phases_key, evolve_key = \ |
|
553 | 554 | self._get_settings_keys(self.HG_SETTINGS, data) |
|
554 | 555 | |
|
555 | 556 | self._create_or_update_ui( |
|
556 | 557 | self.repo_settings, *largefiles, value='', |
|
557 | 558 | active=data[largefiles_key]) |
|
558 | 559 | self._create_or_update_ui( |
|
559 | 560 | self.repo_settings, *evolve, value='', |
|
560 | 561 | active=data[evolve_key]) |
|
561 | 562 | self._create_or_update_ui( |
|
562 | 563 | self.repo_settings, *phases, value=safe_str(data[phases_key])) |
|
563 | 564 | |
|
564 | 565 | def create_or_update_global_hg_settings(self, data): |
|
565 | 566 | largefiles, largefiles_store, phases, hgsubversion, evolve \ |
|
566 | 567 | = self.GLOBAL_HG_SETTINGS |
|
567 | 568 | largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \ |
|
568 | 569 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data) |
|
569 | 570 | |
|
570 | 571 | self._create_or_update_ui( |
|
571 | 572 | self.global_settings, *largefiles, value='', |
|
572 | 573 | active=data[largefiles_key]) |
|
573 | 574 | self._create_or_update_ui( |
|
574 | 575 | self.global_settings, *largefiles_store, |
|
575 | 576 | value=data[largefiles_store_key]) |
|
576 | 577 | self._create_or_update_ui( |
|
577 | 578 | self.global_settings, *phases, value=safe_str(data[phases_key])) |
|
578 | 579 | self._create_or_update_ui( |
|
579 | 580 | self.global_settings, *hgsubversion, active=data[subversion_key]) |
|
580 | 581 | self._create_or_update_ui( |
|
581 | 582 | self.global_settings, *evolve, value='', |
|
582 | 583 | active=data[evolve_key]) |
|
583 | 584 | |
|
584 | 585 | def create_or_update_repo_git_settings(self, data): |
|
585 | 586 | # NOTE(marcink): # comma make unpack work properly |
|
586 | 587 | lfs_enabled, \ |
|
587 | 588 | = self.GIT_SETTINGS |
|
588 | 589 | |
|
589 | 590 | lfs_enabled_key, \ |
|
590 | 591 | = self._get_settings_keys(self.GIT_SETTINGS, data) |
|
591 | 592 | |
|
592 | 593 | self._create_or_update_ui( |
|
593 | 594 | self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
594 | 595 | active=data[lfs_enabled_key]) |
|
595 | 596 | |
|
596 | 597 | def create_or_update_global_git_settings(self, data): |
|
597 | 598 | lfs_enabled, lfs_store_location \ |
|
598 | 599 | = self.GLOBAL_GIT_SETTINGS |
|
599 | 600 | lfs_enabled_key, lfs_store_location_key \ |
|
600 | 601 | = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data) |
|
601 | 602 | |
|
602 | 603 | self._create_or_update_ui( |
|
603 | 604 | self.global_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
604 | 605 | active=data[lfs_enabled_key]) |
|
605 | 606 | self._create_or_update_ui( |
|
606 | 607 | self.global_settings, *lfs_store_location, |
|
607 | 608 | value=data[lfs_store_location_key]) |
|
608 | 609 | |
|
609 | 610 | def create_or_update_global_svn_settings(self, data): |
|
610 | 611 | # branch/tags patterns |
|
611 | 612 | self._create_svn_settings(self.global_settings, data) |
|
612 | 613 | |
|
613 | 614 | http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS |
|
614 | 615 | http_requests_enabled_key, http_server_url_key = self._get_settings_keys( |
|
615 | 616 | self.GLOBAL_SVN_SETTINGS, data) |
|
616 | 617 | |
|
617 | 618 | self._create_or_update_ui( |
|
618 | 619 | self.global_settings, *http_requests_enabled, |
|
619 | 620 | value=safe_str(data[http_requests_enabled_key])) |
|
620 | 621 | self._create_or_update_ui( |
|
621 | 622 | self.global_settings, *http_server_url, |
|
622 | 623 | value=data[http_server_url_key]) |
|
623 | 624 | |
|
624 | 625 | def update_global_ssl_setting(self, value): |
|
625 | 626 | self._create_or_update_ui( |
|
626 | 627 | self.global_settings, *self.SSL_SETTING, value=value) |
|
627 | 628 | |
|
628 | 629 | def update_global_path_setting(self, value): |
|
629 | 630 | self._create_or_update_ui( |
|
630 | 631 | self.global_settings, *self.PATH_SETTING, value=value) |
|
631 | 632 | |
|
632 | 633 | @assert_repo_settings |
|
633 | 634 | def delete_repo_svn_pattern(self, id_): |
|
634 | 635 | self.repo_settings.delete_ui(id_) |
|
635 | 636 | |
|
636 | 637 | def delete_global_svn_pattern(self, id_): |
|
637 | 638 | self.global_settings.delete_ui(id_) |
|
638 | 639 | |
|
639 | 640 | @assert_repo_settings |
|
640 | 641 | def get_repo_ui_settings(self, section=None, key=None): |
|
641 | 642 | global_uis = self.global_settings.get_ui(section, key) |
|
642 | 643 | repo_uis = self.repo_settings.get_ui(section, key) |
|
643 | 644 | filtered_repo_uis = self._filter_ui_settings(repo_uis) |
|
644 | 645 | filtered_repo_uis_keys = [ |
|
645 | 646 | (s.section, s.key) for s in filtered_repo_uis] |
|
646 | 647 | |
|
647 | 648 | def _is_global_ui_filtered(ui): |
|
648 | 649 | return ( |
|
649 | 650 | (ui.section, ui.key) in filtered_repo_uis_keys |
|
650 | 651 | or ui.section in self._svn_sections) |
|
651 | 652 | |
|
652 | 653 | filtered_global_uis = [ |
|
653 | 654 | ui for ui in global_uis if not _is_global_ui_filtered(ui)] |
|
654 | 655 | |
|
655 | 656 | return filtered_global_uis + filtered_repo_uis |
|
656 | 657 | |
|
657 | 658 | def get_global_ui_settings(self, section=None, key=None): |
|
658 | 659 | return self.global_settings.get_ui(section, key) |
|
659 | 660 | |
|
660 | 661 | def get_ui_settings_as_config_obj(self, section=None, key=None): |
|
661 | 662 | config = base.Config() |
|
662 | 663 | |
|
663 | 664 | ui_settings = self.get_ui_settings(section=section, key=key) |
|
664 | 665 | |
|
665 | 666 | for entry in ui_settings: |
|
666 | 667 | config.set(entry.section, entry.key, entry.value) |
|
667 | 668 | |
|
668 | 669 | return config |
|
669 | 670 | |
|
670 | 671 | def get_ui_settings(self, section=None, key=None): |
|
671 | 672 | if not self.repo_settings or self.inherit_global_settings: |
|
672 | 673 | return self.get_global_ui_settings(section, key) |
|
673 | 674 | else: |
|
674 | 675 | return self.get_repo_ui_settings(section, key) |
|
675 | 676 | |
|
676 | 677 | def get_svn_patterns(self, section=None): |
|
677 | 678 | if not self.repo_settings: |
|
678 | 679 | return self.get_global_ui_settings(section) |
|
679 | 680 | else: |
|
680 | 681 | return self.get_repo_ui_settings(section) |
|
681 | 682 | |
|
682 | 683 | @assert_repo_settings |
|
683 | 684 | def get_repo_general_settings(self): |
|
684 | 685 | global_settings = self.global_settings.get_all_settings() |
|
685 | 686 | repo_settings = self.repo_settings.get_all_settings() |
|
686 | 687 | filtered_repo_settings = self._filter_general_settings(repo_settings) |
|
687 | 688 | global_settings.update(filtered_repo_settings) |
|
688 | 689 | return global_settings |
|
689 | 690 | |
|
690 | 691 | def get_global_general_settings(self): |
|
691 | 692 | return self.global_settings.get_all_settings() |
|
692 | 693 | |
|
693 | 694 | def get_general_settings(self): |
|
694 | 695 | if not self.repo_settings or self.inherit_global_settings: |
|
695 | 696 | return self.get_global_general_settings() |
|
696 | 697 | else: |
|
697 | 698 | return self.get_repo_general_settings() |
|
698 | 699 | |
|
699 | 700 | def get_repos_location(self): |
|
700 | 701 | return self.global_settings.get_ui_by_key('/').ui_value |
|
701 | 702 | |
|
702 | 703 | def _filter_ui_settings(self, settings): |
|
703 | 704 | filtered_settings = [ |
|
704 | 705 | s for s in settings if self._should_keep_setting(s)] |
|
705 | 706 | return filtered_settings |
|
706 | 707 | |
|
707 | 708 | def _should_keep_setting(self, setting): |
|
708 | 709 | keep = ( |
|
709 | 710 | (setting.section, setting.key) in self._ui_settings or |
|
710 | 711 | setting.section in self._svn_sections) |
|
711 | 712 | return keep |
|
712 | 713 | |
|
713 | 714 | def _filter_general_settings(self, settings): |
|
714 | 715 | keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS] |
|
715 | 716 | return { |
|
716 | 717 | k: settings[k] |
|
717 | 718 | for k in settings if k in keys} |
|
718 | 719 | |
|
719 | 720 | def _collect_all_settings(self, global_=False): |
|
720 | 721 | settings = self.global_settings if global_ else self.repo_settings |
|
721 | 722 | result = {} |
|
722 | 723 | |
|
723 | 724 | for section, key in self._ui_settings: |
|
724 | 725 | ui = settings.get_ui_by_section_and_key(section, key) |
|
725 | 726 | result_key = self._get_form_ui_key(section, key) |
|
726 | 727 | |
|
727 | 728 | if ui: |
|
728 | 729 | if section in ('hooks', 'extensions'): |
|
729 | 730 | result[result_key] = ui.ui_active |
|
730 | 731 | elif result_key in ['vcs_git_lfs_enabled']: |
|
731 | 732 | result[result_key] = ui.ui_active |
|
732 | 733 | else: |
|
733 | 734 | result[result_key] = ui.ui_value |
|
734 | 735 | |
|
735 | 736 | for name in self.GENERAL_SETTINGS: |
|
736 | 737 | setting = settings.get_setting_by_name(name) |
|
737 | 738 | if setting: |
|
738 | 739 | result_key = 'rhodecode_{}'.format(name) |
|
739 | 740 | result[result_key] = setting.app_settings_value |
|
740 | 741 | |
|
741 | 742 | return result |
|
742 | 743 | |
|
743 | 744 | def _get_form_ui_key(self, section, key): |
|
744 | 745 | return '{section}_{key}'.format( |
|
745 | 746 | section=section, key=key.replace('.', '_')) |
|
746 | 747 | |
|
747 | 748 | def _create_or_update_ui( |
|
748 | 749 | self, settings, section, key, value=None, active=None): |
|
749 | 750 | ui = settings.get_ui_by_section_and_key(section, key) |
|
750 | 751 | if not ui: |
|
751 | 752 | active = True if active is None else active |
|
752 | 753 | settings.create_ui_section_value( |
|
753 | 754 | section, value, key=key, active=active) |
|
754 | 755 | else: |
|
755 | 756 | if active is not None: |
|
756 | 757 | ui.ui_active = active |
|
757 | 758 | if value is not None: |
|
758 | 759 | ui.ui_value = value |
|
759 | 760 | Session().add(ui) |
|
760 | 761 | |
|
761 | 762 | def _create_svn_settings(self, settings, data): |
|
762 | 763 | svn_settings = { |
|
763 | 764 | 'new_svn_branch': self.SVN_BRANCH_SECTION, |
|
764 | 765 | 'new_svn_tag': self.SVN_TAG_SECTION |
|
765 | 766 | } |
|
766 | 767 | for key in svn_settings: |
|
767 | 768 | if data.get(key): |
|
768 | 769 | settings.create_ui_section_value(svn_settings[key], data[key]) |
|
769 | 770 | |
|
770 | 771 | def _create_or_update_general_settings(self, settings, data): |
|
771 | 772 | for name in self.GENERAL_SETTINGS: |
|
772 | 773 | data_key = 'rhodecode_{}'.format(name) |
|
773 | 774 | if data_key not in data: |
|
774 | 775 | raise ValueError( |
|
775 | 776 | 'The given data does not contain {} key'.format(data_key)) |
|
776 | 777 | setting = settings.create_or_update_setting( |
|
777 | 778 | name, data[data_key], 'bool') |
|
778 | 779 | Session().add(setting) |
|
779 | 780 | |
|
780 | 781 | def _get_settings_keys(self, settings, data): |
|
781 | 782 | data_keys = [self._get_form_ui_key(*s) for s in settings] |
|
782 | 783 | for data_key in data_keys: |
|
783 | 784 | if data_key not in data: |
|
784 | 785 | raise ValueError( |
|
785 | 786 | 'The given data does not contain {} key'.format(data_key)) |
|
786 | 787 | return data_keys |
|
787 | 788 | |
|
788 | 789 | def create_largeobjects_dirs_if_needed(self, repo_store_path): |
|
789 | 790 | """ |
|
790 | 791 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
791 | 792 | does a repository scan if enabled in the settings. |
|
792 | 793 | """ |
|
793 | 794 | |
|
794 | 795 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
795 | 796 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
796 | 797 | |
|
797 | 798 | paths = [ |
|
798 | 799 | largefiles_store(repo_store_path), |
|
799 | 800 | lfs_store(repo_store_path)] |
|
800 | 801 | |
|
801 | 802 | for path in paths: |
|
802 | 803 | if os.path.isdir(path): |
|
803 | 804 | continue |
|
804 | 805 | if os.path.isfile(path): |
|
805 | 806 | continue |
|
806 | 807 | # not a file nor dir, we try to create it |
|
807 | 808 | try: |
|
808 | 809 | os.makedirs(path) |
|
809 | 810 | except Exception: |
|
810 | 811 | log.warning('Failed to create largefiles dir:%s', path) |
@@ -1,471 +1,473 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | import multiprocessing |
|
23 | 23 | import os |
|
24 | 24 | |
|
25 | 25 | import mock |
|
26 | 26 | import py |
|
27 | 27 | import pytest |
|
28 | 28 | |
|
29 | 29 | from rhodecode.lib import caching_query |
|
30 | 30 | from rhodecode.lib import utils |
|
31 | 31 | from rhodecode.lib.utils2 import md5 |
|
32 | 32 | from rhodecode.model import settings |
|
33 | 33 | from rhodecode.model import db |
|
34 | 34 | from rhodecode.model import meta |
|
35 | 35 | from rhodecode.model.repo import RepoModel |
|
36 | 36 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | 37 | from rhodecode.model.scm import ScmModel |
|
38 | 38 | from rhodecode.model.settings import UiSetting, SettingsModel |
|
39 | 39 | from rhodecode.tests.fixture import Fixture |
|
40 | 40 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | fixture = Fixture() |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | def extract_hooks(config): |
|
47 | 47 | """Return a dictionary with the hook entries of the given config.""" |
|
48 | 48 | hooks = {} |
|
49 | 49 | config_items = config.serialize() |
|
50 | 50 | for section, name, value in config_items: |
|
51 | 51 | if section != 'hooks': |
|
52 | 52 | continue |
|
53 | 53 | hooks[name] = value |
|
54 | 54 | |
|
55 | 55 | return hooks |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | def disable_hooks(request, hooks): |
|
59 | 59 | """Disables the given hooks from the UI settings.""" |
|
60 | 60 | session = meta.Session() |
|
61 | 61 | |
|
62 | 62 | model = SettingsModel() |
|
63 | 63 | for hook_key in hooks: |
|
64 | 64 | sett = model.get_ui_by_key(hook_key) |
|
65 | 65 | sett.ui_active = False |
|
66 | 66 | session.add(sett) |
|
67 | 67 | |
|
68 | 68 | # Invalidate cache |
|
69 | 69 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
70 | 70 | caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings')) |
|
71 | 71 | ui_settings.invalidate() |
|
72 | 72 | |
|
73 | 73 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
74 | 74 | caching_query.FromCache( |
|
75 | 75 | 'sql_cache_short', 'get_hook_settings', 'get_hook_settings')) |
|
76 | 76 | ui_settings.invalidate() |
|
77 | 77 | |
|
78 | 78 | @request.addfinalizer |
|
79 | 79 | def rollback(): |
|
80 | 80 | session.rollback() |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH |
|
84 | 84 | HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH |
|
85 | 85 | HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH |
|
86 | 86 | HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL |
|
87 | 87 | HOOK_PULL = db.RhodeCodeUi.HOOK_PULL |
|
88 | 88 | HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE |
|
89 | HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY | |
|
89 | 90 | |
|
90 | 91 | HG_HOOKS = frozenset( |
|
91 | 92 | (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, |
|
92 | HOOK_REPO_SIZE)) | |
|
93 | HOOK_REPO_SIZE, HOOK_PUSH_KEY)) | |
|
93 | 94 | |
|
94 | 95 | |
|
95 | 96 | @pytest.mark.parametrize('disabled_hooks,expected_hooks', [ |
|
96 | 97 | ([], HG_HOOKS), |
|
97 | 98 | (HG_HOOKS, []), |
|
98 | 99 | |
|
99 | ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]), | |
|
100 | ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]), | |
|
100 | 101 | |
|
101 | 102 | # When a pull/push hook is disabled, its pre-pull/push counterpart should |
|
102 | 103 | # be disabled too. |
|
103 | 104 | ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]), |
|
104 |
([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE |
|
|
105 | ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE, | |
|
106 | HOOK_PUSH_KEY]), | |
|
105 | 107 | ]) |
|
106 | 108 | def test_make_db_config_hg_hooks(pylonsapp, request, disabled_hooks, |
|
107 | 109 | expected_hooks): |
|
108 | 110 | disable_hooks(request, disabled_hooks) |
|
109 | 111 | |
|
110 | 112 | config = utils.make_db_config() |
|
111 | 113 | hooks = extract_hooks(config) |
|
112 | 114 | |
|
113 | 115 | assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks) |
|
114 | 116 | |
|
115 | 117 | |
|
116 | 118 | @pytest.mark.parametrize('disabled_hooks,expected_hooks', [ |
|
117 | 119 | ([], ['pull', 'push']), |
|
118 | 120 | ([HOOK_PUSH], ['pull']), |
|
119 | 121 | ([HOOK_PULL], ['push']), |
|
120 | 122 | ([HOOK_PULL, HOOK_PUSH], []), |
|
121 | 123 | ]) |
|
122 | 124 | def test_get_enabled_hook_classes(disabled_hooks, expected_hooks): |
|
123 | 125 | hook_keys = (HOOK_PUSH, HOOK_PULL) |
|
124 | 126 | ui_settings = [ |
|
125 | 127 | ('hooks', key, 'some value', key not in disabled_hooks) |
|
126 | 128 | for key in hook_keys] |
|
127 | 129 | |
|
128 | 130 | result = utils.get_enabled_hook_classes(ui_settings) |
|
129 | 131 | assert sorted(result) == expected_hooks |
|
130 | 132 | |
|
131 | 133 | |
|
132 | 134 | def test_get_filesystem_repos_finds_repos(tmpdir, pylonsapp): |
|
133 | 135 | _stub_git_repo(tmpdir.ensure('repo', dir=True)) |
|
134 | 136 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
135 | 137 | assert repos == [('repo', ('git', tmpdir.join('repo')))] |
|
136 | 138 | |
|
137 | 139 | |
|
138 | 140 | def test_get_filesystem_repos_skips_directories(tmpdir, pylonsapp): |
|
139 | 141 | tmpdir.ensure('not-a-repo', dir=True) |
|
140 | 142 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
141 | 143 | assert repos == [] |
|
142 | 144 | |
|
143 | 145 | |
|
144 | 146 | def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, pylonsapp): |
|
145 | 147 | _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True)) |
|
146 | 148 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
147 | 149 | assert repos == [] |
|
148 | 150 | |
|
149 | 151 | |
|
150 | 152 | def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, pylonsapp): |
|
151 | 153 | _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True)) |
|
152 | 154 | repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True)) |
|
153 | 155 | assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))] |
|
154 | 156 | |
|
155 | 157 | |
|
156 | 158 | def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir): |
|
157 | 159 | _stub_git_repo(tmpdir.ensure('.repo', dir=True)) |
|
158 | 160 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
159 | 161 | assert repos == [] |
|
160 | 162 | |
|
161 | 163 | |
|
162 | 164 | def test_get_filesystem_repos_skips_files(tmpdir): |
|
163 | 165 | tmpdir.ensure('test-file') |
|
164 | 166 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
165 | 167 | assert repos == [] |
|
166 | 168 | |
|
167 | 169 | |
|
168 | 170 | def test_get_filesystem_repos_skips_removed_repositories(tmpdir): |
|
169 | 171 | removed_repo_name = 'rm__00000000_000000_000000__.stub' |
|
170 | 172 | assert utils.REMOVED_REPO_PAT.match(removed_repo_name) |
|
171 | 173 | _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True)) |
|
172 | 174 | repos = list(utils.get_filesystem_repos(str(tmpdir))) |
|
173 | 175 | assert repos == [] |
|
174 | 176 | |
|
175 | 177 | |
|
176 | 178 | def _stub_git_repo(repo_path): |
|
177 | 179 | """ |
|
178 | 180 | Make `repo_path` look like a Git repository. |
|
179 | 181 | """ |
|
180 | 182 | repo_path.ensure('.git', dir=True) |
|
181 | 183 | |
|
182 | 184 | |
|
183 | 185 | @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode']) |
|
184 | 186 | def test_get_dirpaths_returns_all_paths(tmpdir, str_class): |
|
185 | 187 | tmpdir.ensure('test-file') |
|
186 | 188 | dirpaths = utils._get_dirpaths(str_class(tmpdir)) |
|
187 | 189 | assert dirpaths == ['test-file'] |
|
188 | 190 | |
|
189 | 191 | |
|
190 | 192 | def test_get_dirpaths_returns_all_paths_bytes( |
|
191 | 193 | tmpdir, platform_encodes_filenames): |
|
192 | 194 | if platform_encodes_filenames: |
|
193 | 195 | pytest.skip("This platform seems to encode filenames.") |
|
194 | 196 | tmpdir.ensure('repo-a-umlaut-\xe4') |
|
195 | 197 | dirpaths = utils._get_dirpaths(str(tmpdir)) |
|
196 | 198 | assert dirpaths == ['repo-a-umlaut-\xe4'] |
|
197 | 199 | |
|
198 | 200 | |
|
199 | 201 | def test_get_dirpaths_skips_paths_it_cannot_decode( |
|
200 | 202 | tmpdir, platform_encodes_filenames): |
|
201 | 203 | if platform_encodes_filenames: |
|
202 | 204 | pytest.skip("This platform seems to encode filenames.") |
|
203 | 205 | path_with_latin1 = 'repo-a-umlaut-\xe4' |
|
204 | 206 | tmpdir.ensure(path_with_latin1) |
|
205 | 207 | dirpaths = utils._get_dirpaths(unicode(tmpdir)) |
|
206 | 208 | assert dirpaths == [] |
|
207 | 209 | |
|
208 | 210 | |
|
209 | 211 | @pytest.fixture(scope='session') |
|
210 | 212 | def platform_encodes_filenames(): |
|
211 | 213 | """ |
|
212 | 214 | Boolean indicator if the current platform changes filename encodings. |
|
213 | 215 | """ |
|
214 | 216 | path_with_latin1 = 'repo-a-umlaut-\xe4' |
|
215 | 217 | tmpdir = py.path.local.mkdtemp() |
|
216 | 218 | tmpdir.ensure(path_with_latin1) |
|
217 | 219 | read_path = tmpdir.listdir()[0].basename |
|
218 | 220 | tmpdir.remove() |
|
219 | 221 | return path_with_latin1 != read_path |
|
220 | 222 | |
|
221 | 223 | |
|
222 | 224 | def test_action_logger_action_size(pylonsapp, test_repo): |
|
223 | 225 | action = 'x' * 1200001 |
|
224 | 226 | utils.action_logger(TEST_USER_ADMIN_LOGIN, action, test_repo, commit=True) |
|
225 | 227 | |
|
226 | 228 | |
|
227 | 229 | @pytest.fixture |
|
228 | 230 | def repo_groups(request): |
|
229 | 231 | session = meta.Session() |
|
230 | 232 | zombie_group = fixture.create_repo_group('zombie') |
|
231 | 233 | parent_group = fixture.create_repo_group('parent') |
|
232 | 234 | child_group = fixture.create_repo_group('parent/child') |
|
233 | 235 | groups_in_db = session.query(db.RepoGroup).all() |
|
234 | 236 | assert len(groups_in_db) == 3 |
|
235 | 237 | assert child_group.group_parent_id == parent_group.group_id |
|
236 | 238 | |
|
237 | 239 | @request.addfinalizer |
|
238 | 240 | def cleanup(): |
|
239 | 241 | fixture.destroy_repo_group(zombie_group) |
|
240 | 242 | fixture.destroy_repo_group(child_group) |
|
241 | 243 | fixture.destroy_repo_group(parent_group) |
|
242 | 244 | |
|
243 | 245 | return (zombie_group, parent_group, child_group) |
|
244 | 246 | |
|
245 | 247 | |
|
246 | 248 | def test_repo2db_mapper_groups(repo_groups): |
|
247 | 249 | session = meta.Session() |
|
248 | 250 | zombie_group, parent_group, child_group = repo_groups |
|
249 | 251 | zombie_path = os.path.join( |
|
250 | 252 | RepoGroupModel().repos_path, zombie_group.full_path) |
|
251 | 253 | os.rmdir(zombie_path) |
|
252 | 254 | |
|
253 | 255 | # Avoid removing test repos when calling repo2db_mapper |
|
254 | 256 | repo_list = { |
|
255 | 257 | repo.repo_name: 'test' for repo in session.query(db.Repository).all() |
|
256 | 258 | } |
|
257 | 259 | utils.repo2db_mapper(repo_list, remove_obsolete=True) |
|
258 | 260 | |
|
259 | 261 | groups_in_db = session.query(db.RepoGroup).all() |
|
260 | 262 | assert child_group in groups_in_db |
|
261 | 263 | assert parent_group in groups_in_db |
|
262 | 264 | assert zombie_path not in groups_in_db |
|
263 | 265 | |
|
264 | 266 | |
|
265 | 267 | def test_repo2db_mapper_enables_largefiles(backend): |
|
266 | 268 | repo = backend.create_repo() |
|
267 | 269 | repo_list = {repo.repo_name: 'test'} |
|
268 | 270 | with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock: |
|
269 | 271 | with mock.patch.multiple('rhodecode.model.scm.ScmModel', |
|
270 | 272 | install_git_hook=mock.DEFAULT, |
|
271 | 273 | install_svn_hooks=mock.DEFAULT): |
|
272 | 274 | utils.repo2db_mapper(repo_list, remove_obsolete=False) |
|
273 | 275 | _, kwargs = scm_mock.call_args |
|
274 | 276 | assert kwargs['config'].get('extensions', 'largefiles') == '' |
|
275 | 277 | |
|
276 | 278 | |
|
277 | 279 | @pytest.mark.backends("git", "svn") |
|
278 | 280 | def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend): |
|
279 | 281 | repo = backend.create_repo() |
|
280 | 282 | repo_list = {repo.repo_name: 'test'} |
|
281 | 283 | with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock: |
|
282 | 284 | utils.repo2db_mapper(repo_list, remove_obsolete=False) |
|
283 | 285 | install_hooks_mock.assert_called_once_with( |
|
284 | 286 | repo.scm_instance(), repo_type=backend.alias) |
|
285 | 287 | |
|
286 | 288 | |
|
287 | 289 | @pytest.mark.backends("git", "svn") |
|
288 | 290 | def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend): |
|
289 | 291 | repo = backend.create_repo() |
|
290 | 292 | RepoModel().delete(repo, fs_remove=False) |
|
291 | 293 | meta.Session().commit() |
|
292 | 294 | repo_list = {repo.repo_name: repo.scm_instance()} |
|
293 | 295 | with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock: |
|
294 | 296 | utils.repo2db_mapper(repo_list, remove_obsolete=False) |
|
295 | 297 | assert install_hooks_mock.call_count == 1 |
|
296 | 298 | install_hooks_args, _ = install_hooks_mock.call_args |
|
297 | 299 | assert install_hooks_args[0].name == repo.repo_name |
|
298 | 300 | |
|
299 | 301 | |
|
300 | 302 | class TestPasswordChanged(object): |
|
301 | 303 | def setup(self): |
|
302 | 304 | self.session = { |
|
303 | 305 | 'rhodecode_user': { |
|
304 | 306 | 'password': '0cc175b9c0f1b6a831c399e269772661' |
|
305 | 307 | } |
|
306 | 308 | } |
|
307 | 309 | self.auth_user = mock.Mock() |
|
308 | 310 | self.auth_user.userame = 'test' |
|
309 | 311 | self.auth_user.password = 'abc123' |
|
310 | 312 | |
|
311 | 313 | def test_returns_false_for_default_user(self): |
|
312 | 314 | self.auth_user.username = db.User.DEFAULT_USER |
|
313 | 315 | result = utils.password_changed(self.auth_user, self.session) |
|
314 | 316 | assert result is False |
|
315 | 317 | |
|
316 | 318 | def test_returns_false_if_password_was_not_changed(self): |
|
317 | 319 | self.session['rhodecode_user']['password'] = md5( |
|
318 | 320 | self.auth_user.password) |
|
319 | 321 | result = utils.password_changed(self.auth_user, self.session) |
|
320 | 322 | assert result is False |
|
321 | 323 | |
|
322 | 324 | def test_returns_true_if_password_was_changed(self): |
|
323 | 325 | result = utils.password_changed(self.auth_user, self.session) |
|
324 | 326 | assert result is True |
|
325 | 327 | |
|
326 | 328 | def test_returns_true_if_auth_user_password_is_empty(self): |
|
327 | 329 | self.auth_user.password = None |
|
328 | 330 | result = utils.password_changed(self.auth_user, self.session) |
|
329 | 331 | assert result is True |
|
330 | 332 | |
|
331 | 333 | def test_returns_true_if_session_password_is_empty(self): |
|
332 | 334 | self.session['rhodecode_user'].pop('password') |
|
333 | 335 | result = utils.password_changed(self.auth_user, self.session) |
|
334 | 336 | assert result is True |
|
335 | 337 | |
|
336 | 338 | |
|
337 | 339 | class TestReadOpensourceLicenses(object): |
|
338 | 340 | def test_success(self): |
|
339 | 341 | utils._license_cache = None |
|
340 | 342 | json_data = ''' |
|
341 | 343 | { |
|
342 | 344 | "python2.7-pytest-2.7.1": {"UNKNOWN": null}, |
|
343 | 345 | "python2.7-Markdown-2.6.2": { |
|
344 | 346 | "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause" |
|
345 | 347 | } |
|
346 | 348 | } |
|
347 | 349 | ''' |
|
348 | 350 | resource_string_patch = mock.patch.object( |
|
349 | 351 | utils.pkg_resources, 'resource_string', return_value=json_data) |
|
350 | 352 | with resource_string_patch: |
|
351 | 353 | result = utils.read_opensource_licenses() |
|
352 | 354 | assert result == json.loads(json_data) |
|
353 | 355 | |
|
354 | 356 | def test_caching(self): |
|
355 | 357 | utils._license_cache = { |
|
356 | 358 | "python2.7-pytest-2.7.1": { |
|
357 | 359 | "UNKNOWN": None |
|
358 | 360 | }, |
|
359 | 361 | "python2.7-Markdown-2.6.2": { |
|
360 | 362 | "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause" |
|
361 | 363 | } |
|
362 | 364 | } |
|
363 | 365 | resource_patch = mock.patch.object( |
|
364 | 366 | utils.pkg_resources, 'resource_string', side_effect=Exception) |
|
365 | 367 | json_patch = mock.patch.object( |
|
366 | 368 | utils.json, 'loads', side_effect=Exception) |
|
367 | 369 | |
|
368 | 370 | with resource_patch as resource_mock, json_patch as json_mock: |
|
369 | 371 | result = utils.read_opensource_licenses() |
|
370 | 372 | |
|
371 | 373 | assert resource_mock.call_count == 0 |
|
372 | 374 | assert json_mock.call_count == 0 |
|
373 | 375 | assert result == utils._license_cache |
|
374 | 376 | |
|
375 | 377 | def test_licenses_file_contains_no_unknown_licenses(self): |
|
376 | 378 | utils._license_cache = None |
|
377 | 379 | result = utils.read_opensource_licenses() |
|
378 | 380 | license_names = [] |
|
379 | 381 | for licenses in result.values(): |
|
380 | 382 | license_names.extend(licenses.keys()) |
|
381 | 383 | assert 'UNKNOWN' not in license_names |
|
382 | 384 | |
|
383 | 385 | |
|
384 | 386 | class TestMakeDbConfig(object): |
|
385 | 387 | def test_data_from_config_data_from_db_returned(self): |
|
386 | 388 | test_data = [ |
|
387 | 389 | ('section1', 'option1', 'value1'), |
|
388 | 390 | ('section2', 'option2', 'value2'), |
|
389 | 391 | ('section3', 'option3', 'value3'), |
|
390 | 392 | ] |
|
391 | 393 | with mock.patch.object(utils, 'config_data_from_db') as config_mock: |
|
392 | 394 | config_mock.return_value = test_data |
|
393 | 395 | kwargs = {'clear_session': False, 'repo': 'test_repo'} |
|
394 | 396 | result = utils.make_db_config(**kwargs) |
|
395 | 397 | config_mock.assert_called_once_with(**kwargs) |
|
396 | 398 | for section, option, expected_value in test_data: |
|
397 | 399 | value = result.get(section, option) |
|
398 | 400 | assert value == expected_value |
|
399 | 401 | |
|
400 | 402 | |
|
401 | 403 | class TestConfigDataFromDb(object): |
|
402 | 404 | def test_config_data_from_db_returns_active_settings(self): |
|
403 | 405 | test_data = [ |
|
404 | 406 | UiSetting('section1', 'option1', 'value1', True), |
|
405 | 407 | UiSetting('section2', 'option2', 'value2', True), |
|
406 | 408 | UiSetting('section3', 'option3', 'value3', False), |
|
407 | 409 | ] |
|
408 | 410 | repo_name = 'test_repo' |
|
409 | 411 | |
|
410 | 412 | model_patch = mock.patch.object(settings, 'VcsSettingsModel') |
|
411 | 413 | hooks_patch = mock.patch.object( |
|
412 | 414 | utils, 'get_enabled_hook_classes', |
|
413 | 415 | return_value=['pull', 'push', 'repo_size']) |
|
414 | 416 | with model_patch as model_mock, hooks_patch: |
|
415 | 417 | instance_mock = mock.Mock() |
|
416 | 418 | model_mock.return_value = instance_mock |
|
417 | 419 | instance_mock.get_ui_settings.return_value = test_data |
|
418 | 420 | result = utils.config_data_from_db( |
|
419 | 421 | clear_session=False, repo=repo_name) |
|
420 | 422 | |
|
421 | 423 | self._assert_repo_name_passed(model_mock, repo_name) |
|
422 | 424 | |
|
423 | 425 | expected_result = [ |
|
424 | 426 | ('section1', 'option1', 'value1'), |
|
425 | 427 | ('section2', 'option2', 'value2'), |
|
426 | 428 | ] |
|
427 | 429 | assert result == expected_result |
|
428 | 430 | |
|
429 | 431 | def _assert_repo_name_passed(self, model_mock, repo_name): |
|
430 | 432 | assert model_mock.call_count == 1 |
|
431 | 433 | call_args, call_kwargs = model_mock.call_args |
|
432 | 434 | assert call_kwargs['repo'] == repo_name |
|
433 | 435 | |
|
434 | 436 | |
|
435 | 437 | class TestIsDirWritable(object): |
|
436 | 438 | def test_returns_false_when_not_writable(self): |
|
437 | 439 | with mock.patch('__builtin__.open', side_effect=OSError): |
|
438 | 440 | assert not utils._is_dir_writable('/stub-path') |
|
439 | 441 | |
|
440 | 442 | def test_returns_true_when_writable(self, tmpdir): |
|
441 | 443 | assert utils._is_dir_writable(str(tmpdir)) |
|
442 | 444 | |
|
443 | 445 | def test_is_safe_against_race_conditions(self, tmpdir): |
|
444 | 446 | workers = multiprocessing.Pool() |
|
445 | 447 | directories = [str(tmpdir)] * 10 |
|
446 | 448 | workers.map(utils._is_dir_writable, directories) |
|
447 | 449 | |
|
448 | 450 | |
|
449 | 451 | class TestGetEnabledHooks(object): |
|
450 | 452 | def test_only_active_hooks_are_enabled(self): |
|
451 | 453 | ui_settings = [ |
|
452 | 454 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True), |
|
453 | 455 | UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True), |
|
454 | 456 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False) |
|
455 | 457 | ] |
|
456 | 458 | result = utils.get_enabled_hook_classes(ui_settings) |
|
457 | 459 | assert result == ['push', 'repo_size'] |
|
458 | 460 | |
|
459 | 461 | def test_all_hooks_are_enabled(self): |
|
460 | 462 | ui_settings = [ |
|
461 | 463 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True), |
|
462 | 464 | UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True), |
|
463 | 465 | UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True) |
|
464 | 466 | ] |
|
465 | 467 | result = utils.get_enabled_hook_classes(ui_settings) |
|
466 | 468 | assert result == ['push', 'repo_size', 'pull'] |
|
467 | 469 | |
|
468 | 470 | def test_no_enabled_hooks_when_no_hook_settings_are_found(self): |
|
469 | 471 | ui_settings = [] |
|
470 | 472 | result = utils.get_enabled_hook_classes(ui_settings) |
|
471 | 473 | assert result == [] |
@@ -1,147 +1,154 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Base for test suite for making push/pull operations. |
|
23 | 23 | |
|
24 | 24 | .. important:: |
|
25 | 25 | |
|
26 | 26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
27 | 27 | to redirect things to stderr instead of stdout. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | 30 | from os.path import join as jn |
|
31 | 31 | from subprocess32 import Popen, PIPE |
|
32 | 32 | import logging |
|
33 | 33 | import os |
|
34 | 34 | import tempfile |
|
35 | 35 | |
|
36 | 36 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
37 | 37 | |
|
38 | 38 | DEBUG = True |
|
39 | 39 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') |
|
40 | 40 | REPO_GROUP = 'a_repo_group' |
|
41 | 41 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
42 | 42 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
43 | 43 | |
|
44 | 44 | log = logging.getLogger(__name__) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | class Command(object): |
|
48 | 48 | |
|
49 | 49 | def __init__(self, cwd): |
|
50 | 50 | self.cwd = cwd |
|
51 | 51 | self.process = None |
|
52 | 52 | |
|
53 | 53 | def execute(self, cmd, *args): |
|
54 | 54 | """ |
|
55 | 55 | Runs command on the system with given ``args``. |
|
56 | 56 | """ |
|
57 | 57 | |
|
58 | 58 | command = cmd + ' ' + ' '.join(args) |
|
59 | 59 | if DEBUG: |
|
60 | 60 | log.debug('*** CMD %s ***' % (command,)) |
|
61 | 61 | |
|
62 | 62 | env = dict(os.environ) |
|
63 | 63 | # Delete coverage variables, as they make the test fail for Mercurial |
|
64 | 64 | for key in env.keys(): |
|
65 | 65 | if key.startswith('COV_CORE_'): |
|
66 | 66 | del env[key] |
|
67 | 67 | |
|
68 | 68 | self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, |
|
69 | 69 | cwd=self.cwd, env=env) |
|
70 | 70 | stdout, stderr = self.process.communicate() |
|
71 | 71 | if DEBUG: |
|
72 | 72 | log.debug('STDOUT:%s' % (stdout,)) |
|
73 | 73 | log.debug('STDERR:%s' % (stderr,)) |
|
74 | 74 | return stdout, stderr |
|
75 | 75 | |
|
76 | 76 | def assert_returncode_success(self): |
|
77 | 77 | assert self.process.returncode == 0 |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | def _add_files_and_push(vcs, dest, clone_url=None, **kwargs): |
|
81 | 81 | """ |
|
82 | 82 | Generate some files, add it to DEST repo and push back |
|
83 | 83 | vcs is git or hg and defines what VCS we want to make those files for |
|
84 | 84 | """ |
|
85 | 85 | # commit some stuff into this repo |
|
86 | 86 | cwd = path = jn(dest) |
|
87 | 87 | added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next()) |
|
88 | 88 | Command(cwd).execute('touch %s' % added_file) |
|
89 | 89 | Command(cwd).execute('%s add %s' % (vcs, added_file)) |
|
90 | 90 | author_str = 'Marcin KuΕΊminski <me@email.com>' |
|
91 | 91 | |
|
92 | 92 | git_ident = "git config user.name {} && git config user.email {}".format( |
|
93 | 93 | 'Marcin KuΕΊminski', 'me@email.com') |
|
94 | 94 | |
|
95 | 95 | for i in xrange(kwargs.get('files_no', 3)): |
|
96 | 96 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) |
|
97 | 97 | Command(cwd).execute(cmd) |
|
98 | 98 | if vcs == 'hg': |
|
99 | 99 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( |
|
100 | 100 | i, author_str, added_file |
|
101 | 101 | ) |
|
102 | 102 | elif vcs == 'git': |
|
103 | 103 | cmd = """%s && git commit -m 'commited new %s' %s""" % ( |
|
104 | 104 | git_ident, i, added_file) |
|
105 | 105 | Command(cwd).execute(cmd) |
|
106 | 106 | |
|
107 | 107 | # PUSH it back |
|
108 | 108 | stdout = stderr = None |
|
109 | 109 | if vcs == 'hg': |
|
110 | 110 | stdout, stderr = Command(cwd).execute( |
|
111 | 111 | 'hg push --verbose', clone_url) |
|
112 | 112 | elif vcs == 'git': |
|
113 | 113 | stdout, stderr = Command(cwd).execute( |
|
114 | 114 | """%s && git push --verbose %s master""" % ( |
|
115 | 115 | git_ident, clone_url)) |
|
116 | 116 | |
|
117 | 117 | return stdout, stderr |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | def _check_proper_git_push( |
|
121 | 121 | stdout, stderr, branch='master', should_set_default_branch=False): |
|
122 | 122 | # Note: Git is writing most information to stderr intentionally |
|
123 | 123 | assert 'fatal' not in stderr |
|
124 | 124 | assert 'rejected' not in stderr |
|
125 | 125 | assert 'Pushing to' in stderr |
|
126 | 126 | assert '%s -> %s' % (branch, branch) in stderr |
|
127 | 127 | |
|
128 | 128 | if should_set_default_branch: |
|
129 | 129 | assert "Setting default branch to %s" % branch in stderr |
|
130 | 130 | else: |
|
131 | 131 | assert "Setting default branch" not in stderr |
|
132 | 132 | |
|
133 | 133 | |
|
134 | def _check_proper_hg_push(stdout, stderr, branch='default'): | |
|
135 | assert 'pushing to' in stdout | |
|
136 | assert 'searching for changes' in stdout | |
|
137 | ||
|
138 | assert 'abort:' not in stderr | |
|
139 | ||
|
140 | ||
|
134 | 141 | def _check_proper_clone(stdout, stderr, vcs): |
|
135 | 142 | if vcs == 'hg': |
|
136 | 143 | assert 'requesting all changes' in stdout |
|
137 | 144 | assert 'adding changesets' in stdout |
|
138 | 145 | assert 'adding manifests' in stdout |
|
139 | 146 | assert 'adding file changes' in stdout |
|
140 | 147 | |
|
141 | 148 | assert stderr == '' |
|
142 | 149 | |
|
143 | 150 | if vcs == 'git': |
|
144 | 151 | assert '' == stdout |
|
145 | 152 | assert 'Cloning into' in stderr |
|
146 | 153 | assert 'abort:' not in stderr |
|
147 | 154 | assert 'fatal:' not in stderr |
@@ -1,267 +1,270 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | py.test config for test suite for making push/pull operations. |
|
23 | 23 | |
|
24 | 24 | .. important:: |
|
25 | 25 | |
|
26 | 26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
27 | 27 | to redirect things to stderr instead of stdout. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | 30 | import ConfigParser |
|
31 | 31 | import os |
|
32 | 32 | import subprocess32 |
|
33 | 33 | import tempfile |
|
34 | 34 | import textwrap |
|
35 | 35 | import pytest |
|
36 | 36 | |
|
37 | 37 | import rhodecode |
|
38 | 38 | from rhodecode.model.db import Repository |
|
39 | 39 | from rhodecode.model.meta import Session |
|
40 | 40 | from rhodecode.model.settings import SettingsModel |
|
41 | 41 | from rhodecode.tests import ( |
|
42 | 42 | GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,) |
|
43 | 43 | from rhodecode.tests.fixture import Fixture |
|
44 | 44 | from rhodecode.tests.utils import ( |
|
45 | 45 | set_anonymous_access, is_url_reachable, wait_for_url) |
|
46 | 46 | |
|
47 | 47 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') |
|
48 | 48 | REPO_GROUP = 'a_repo_group' |
|
49 | 49 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
50 | 50 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def assert_no_running_instance(url): |
|
54 | 54 | if is_url_reachable(url): |
|
55 | 55 | print("Hint: Usually this means another instance of Enterprise " |
|
56 | 56 | "is running in the background.") |
|
57 | 57 | pytest.fail( |
|
58 | 58 | "Port is not free at %s, cannot start web interface" % url) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | def get_host_url(pylons_config): |
|
62 | 62 | """Construct the host url using the port in the test configuration.""" |
|
63 | 63 | config = ConfigParser.ConfigParser() |
|
64 | 64 | config.read(pylons_config) |
|
65 | 65 | |
|
66 | 66 | return '127.0.0.1:%s' % config.get('server:main', 'port') |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class RcWebServer(object): |
|
70 | 70 | """ |
|
71 | 71 | Represents a running RCE web server used as a test fixture. |
|
72 | 72 | """ |
|
73 | 73 | def __init__(self, pylons_config): |
|
74 | 74 | self.pylons_config = pylons_config |
|
75 | 75 | |
|
76 | 76 | def repo_clone_url(self, repo_name, **kwargs): |
|
77 | 77 | params = { |
|
78 | 78 | 'user': TEST_USER_ADMIN_LOGIN, |
|
79 | 79 | 'passwd': TEST_USER_ADMIN_PASS, |
|
80 | 80 | 'host': get_host_url(self.pylons_config), |
|
81 | 81 | 'cloned_repo': repo_name, |
|
82 | 82 | } |
|
83 | 83 | params.update(**kwargs) |
|
84 | 84 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params |
|
85 | 85 | return _url |
|
86 | 86 | |
|
87 | def host_url(self): | |
|
88 | return 'http://' + get_host_url(self.pylons_config) | |
|
89 | ||
|
87 | 90 | |
|
88 | 91 | @pytest.fixture(scope="module") |
|
89 | 92 | def rcextensions(request, pylonsapp, tmpdir_factory): |
|
90 | 93 | """ |
|
91 | 94 | Installs a testing rcextensions pack to ensure they work as expected. |
|
92 | 95 | """ |
|
93 | 96 | init_content = textwrap.dedent(""" |
|
94 | 97 | # Forward import the example rcextensions to make it |
|
95 | 98 | # active for our tests. |
|
96 | 99 | from rhodecode.tests.other.example_rcextensions import * |
|
97 | 100 | """) |
|
98 | 101 | |
|
99 | 102 | # Note: rcextensions are looked up based on the path of the ini file |
|
100 | 103 | root_path = tmpdir_factory.getbasetemp() |
|
101 | 104 | rcextensions_path = root_path.join('rcextensions') |
|
102 | 105 | init_path = rcextensions_path.join('__init__.py') |
|
103 | 106 | |
|
104 | 107 | if rcextensions_path.check(): |
|
105 | 108 | pytest.fail( |
|
106 | 109 | "Path for rcextensions already exists, please clean up before " |
|
107 | 110 | "test run this path: %s" % (rcextensions_path, )) |
|
108 | 111 | return |
|
109 | 112 | |
|
110 | 113 | request.addfinalizer(rcextensions_path.remove) |
|
111 | 114 | init_path.write_binary(init_content, ensure=True) |
|
112 | 115 | |
|
113 | 116 | |
|
114 | 117 | @pytest.fixture(scope="module") |
|
115 | 118 | def repos(request, pylonsapp): |
|
116 | 119 | """Create a copy of each test repo in a repo group.""" |
|
117 | 120 | fixture = Fixture() |
|
118 | 121 | repo_group = fixture.create_repo_group(REPO_GROUP) |
|
119 | 122 | repo_group_id = repo_group.group_id |
|
120 | 123 | fixture.create_fork(HG_REPO, HG_REPO, |
|
121 | 124 | repo_name_full=HG_REPO_WITH_GROUP, |
|
122 | 125 | repo_group=repo_group_id) |
|
123 | 126 | fixture.create_fork(GIT_REPO, GIT_REPO, |
|
124 | 127 | repo_name_full=GIT_REPO_WITH_GROUP, |
|
125 | 128 | repo_group=repo_group_id) |
|
126 | 129 | |
|
127 | 130 | @request.addfinalizer |
|
128 | 131 | def cleanup(): |
|
129 | 132 | fixture.destroy_repo(HG_REPO_WITH_GROUP) |
|
130 | 133 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) |
|
131 | 134 | fixture.destroy_repo_group(repo_group_id) |
|
132 | 135 | |
|
133 | 136 | |
|
134 | 137 | @pytest.fixture(scope="module") |
|
135 | 138 | def rc_web_server_config(testini_factory): |
|
136 | 139 | """ |
|
137 | 140 | Configuration file used for the fixture `rc_web_server`. |
|
138 | 141 | """ |
|
139 | 142 | CUSTOM_PARAMS = [ |
|
140 | 143 | {'handler_console': {'level': 'DEBUG'}}, |
|
141 | 144 | ] |
|
142 | 145 | return testini_factory(CUSTOM_PARAMS) |
|
143 | 146 | |
|
144 | 147 | |
|
145 | 148 | @pytest.fixture(scope="module") |
|
146 | 149 | def rc_web_server( |
|
147 | 150 | request, pylonsapp, rc_web_server_config, repos, rcextensions): |
|
148 | 151 | """ |
|
149 | 152 | Run the web server as a subprocess. |
|
150 | 153 | |
|
151 | 154 | Since we have already a running vcsserver, this is not spawned again. |
|
152 | 155 | """ |
|
153 | 156 | env = os.environ.copy() |
|
154 | 157 | env['RC_NO_TMP_PATH'] = '1' |
|
155 | 158 | |
|
156 | 159 | rc_log = RC_LOG |
|
157 | 160 | server_out = open(rc_log, 'w') |
|
158 | 161 | |
|
159 | 162 | # TODO: Would be great to capture the output and err of the subprocess |
|
160 | 163 | # and make it available in a section of the py.test report in case of an |
|
161 | 164 | # error. |
|
162 | 165 | |
|
163 | 166 | host_url = 'http://' + get_host_url(rc_web_server_config) |
|
164 | 167 | assert_no_running_instance(host_url) |
|
165 | 168 | command = ['pserve', rc_web_server_config] |
|
166 | 169 | |
|
167 | 170 | print('Starting rcserver: {}'.format(host_url)) |
|
168 | 171 | print('Command: {}'.format(command)) |
|
169 | 172 | print('Logfile: {}'.format(rc_log)) |
|
170 | 173 | |
|
171 | 174 | proc = subprocess32.Popen( |
|
172 | 175 | command, bufsize=0, env=env, stdout=server_out, stderr=server_out) |
|
173 | 176 | |
|
174 | 177 | wait_for_url(host_url, timeout=30) |
|
175 | 178 | |
|
176 | 179 | @request.addfinalizer |
|
177 | 180 | def stop_web_server(): |
|
178 | 181 | # TODO: Find out how to integrate with the reporting of py.test to |
|
179 | 182 | # make this information available. |
|
180 | 183 | print("\nServer log file written to %s" % (rc_log, )) |
|
181 | 184 | proc.kill() |
|
182 | 185 | server_out.flush() |
|
183 | 186 | server_out.close() |
|
184 | 187 | |
|
185 | 188 | return RcWebServer(rc_web_server_config) |
|
186 | 189 | |
|
187 | 190 | |
|
188 | 191 | @pytest.fixture(scope='class', autouse=True) |
|
189 | 192 | def disable_anonymous_user_access(pylonsapp): |
|
190 | 193 | set_anonymous_access(False) |
|
191 | 194 | |
|
192 | 195 | |
|
193 | 196 | @pytest.fixture |
|
194 | 197 | def disable_locking(pylonsapp): |
|
195 | 198 | r = Repository.get_by_repo_name(GIT_REPO) |
|
196 | 199 | Repository.unlock(r) |
|
197 | 200 | r.enable_locking = False |
|
198 | 201 | Session().add(r) |
|
199 | 202 | Session().commit() |
|
200 | 203 | |
|
201 | 204 | r = Repository.get_by_repo_name(HG_REPO) |
|
202 | 205 | Repository.unlock(r) |
|
203 | 206 | r.enable_locking = False |
|
204 | 207 | Session().add(r) |
|
205 | 208 | Session().commit() |
|
206 | 209 | |
|
207 | 210 | |
|
208 | 211 | @pytest.fixture |
|
209 | 212 | def enable_auth_plugins(request, pylonsapp, csrf_token): |
|
210 | 213 | """ |
|
211 | 214 | Return a factory object that when called, allows to control which |
|
212 | 215 | authentication plugins are enabled. |
|
213 | 216 | """ |
|
214 | 217 | def _enable_plugins(plugins_list, override=None): |
|
215 | 218 | override = override or {} |
|
216 | 219 | params = { |
|
217 | 220 | 'auth_plugins': ','.join(plugins_list), |
|
218 | 221 | } |
|
219 | 222 | |
|
220 | 223 | # helper translate some names to others |
|
221 | 224 | name_map = { |
|
222 | 225 | 'token': 'authtoken' |
|
223 | 226 | } |
|
224 | 227 | |
|
225 | 228 | for module in plugins_list: |
|
226 | 229 | plugin_name = module.partition('#')[-1] |
|
227 | 230 | if plugin_name in name_map: |
|
228 | 231 | plugin_name = name_map[plugin_name] |
|
229 | 232 | enabled_plugin = 'auth_%s_enabled' % plugin_name |
|
230 | 233 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name |
|
231 | 234 | |
|
232 | 235 | # default params that are needed for each plugin, |
|
233 | 236 | # `enabled` and `cache_ttl` |
|
234 | 237 | params.update({ |
|
235 | 238 | enabled_plugin: True, |
|
236 | 239 | cache_ttl: 0 |
|
237 | 240 | }) |
|
238 | 241 | if override.get: |
|
239 | 242 | params.update(override.get(module, {})) |
|
240 | 243 | |
|
241 | 244 | validated_params = params |
|
242 | 245 | for k, v in validated_params.items(): |
|
243 | 246 | setting = SettingsModel().create_or_update_setting(k, v) |
|
244 | 247 | Session().add(setting) |
|
245 | 248 | Session().commit() |
|
246 | 249 | |
|
247 | 250 | def cleanup(): |
|
248 | 251 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) |
|
249 | 252 | |
|
250 | 253 | request.addfinalizer(cleanup) |
|
251 | 254 | |
|
252 | 255 | return _enable_plugins |
|
253 | 256 | |
|
254 | 257 | |
|
255 | 258 | @pytest.fixture |
|
256 | 259 | def fs_repo_only(request, rhodecode_fixtures): |
|
257 | 260 | def fs_repo_fabric(repo_name, repo_type): |
|
258 | 261 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) |
|
259 | 262 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) |
|
260 | 263 | |
|
261 | 264 | def cleanup(): |
|
262 | 265 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) |
|
263 | 266 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) |
|
264 | 267 | |
|
265 | 268 | request.addfinalizer(cleanup) |
|
266 | 269 | |
|
267 | 270 | return fs_repo_fabric |
@@ -1,481 +1,655 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Test suite for making push/pull operations, on specially modified INI files |
|
23 | 23 | |
|
24 | 24 | .. important:: |
|
25 | 25 | |
|
26 | 26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
27 | 27 | to redirect things to stderr instead of stdout. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | import os |
|
32 | 32 | import time |
|
33 | 33 | |
|
34 | 34 | import pytest |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib.vcs.backends.git.repository import GitRepository |
|
37 | from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository | |
|
37 | 38 | from rhodecode.lib.vcs.nodes import FileNode |
|
38 | 39 | from rhodecode.model.auth_token import AuthTokenModel |
|
39 | 40 | from rhodecode.model.db import Repository, UserIpMap, CacheKey |
|
40 | 41 | from rhodecode.model.meta import Session |
|
41 | 42 | from rhodecode.model.user import UserModel |
|
42 | 43 | from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN) |
|
43 | 44 | |
|
44 | 45 | from rhodecode.tests.other.vcs_operations import ( |
|
45 |
Command, _check_proper_clone, _check_proper_git_push, |
|
|
46 | Command, _check_proper_clone, _check_proper_git_push, | |
|
47 | _check_proper_hg_push, _add_files_and_push, | |
|
46 | 48 | HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP) |
|
47 | 49 | |
|
48 | 50 | |
|
49 | 51 | @pytest.mark.usefixtures("disable_locking") |
|
50 | 52 | class TestVCSOperations(object): |
|
51 | 53 | |
|
52 | 54 | def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir): |
|
53 | 55 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
54 | 56 | stdout, stderr = Command('/tmp').execute( |
|
55 | 57 | 'hg clone', clone_url, tmpdir.strpath) |
|
56 | 58 | _check_proper_clone(stdout, stderr, 'hg') |
|
57 | 59 | |
|
58 | 60 | def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir): |
|
59 | 61 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
60 | 62 | cmd = Command('/tmp') |
|
61 | 63 | stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) |
|
62 | 64 | _check_proper_clone(stdout, stderr, 'git') |
|
63 | 65 | cmd.assert_returncode_success() |
|
64 | 66 | |
|
65 | 67 | def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir): |
|
66 | 68 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
67 | 69 | cmd = Command('/tmp') |
|
68 | 70 | stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath) |
|
69 | 71 | _check_proper_clone(stdout, stderr, 'git') |
|
70 | 72 | cmd.assert_returncode_success() |
|
71 | 73 | |
|
72 | 74 | def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir): |
|
73 | 75 | repo_id = Repository.get_by_repo_name(HG_REPO).repo_id |
|
74 | 76 | clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) |
|
75 | 77 | stdout, stderr = Command('/tmp').execute( |
|
76 | 78 | 'hg clone', clone_url, tmpdir.strpath) |
|
77 | 79 | _check_proper_clone(stdout, stderr, 'hg') |
|
78 | 80 | |
|
79 | 81 | def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir): |
|
80 | 82 | repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id |
|
81 | 83 | clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) |
|
82 | 84 | cmd = Command('/tmp') |
|
83 | 85 | stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) |
|
84 | 86 | _check_proper_clone(stdout, stderr, 'git') |
|
85 | 87 | cmd.assert_returncode_success() |
|
86 | 88 | |
|
87 | 89 | def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir): |
|
88 | 90 | clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP) |
|
89 | 91 | stdout, stderr = Command('/tmp').execute( |
|
90 | 92 | 'hg clone', clone_url, tmpdir.strpath) |
|
91 | 93 | _check_proper_clone(stdout, stderr, 'hg') |
|
92 | 94 | |
|
93 | 95 | def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir): |
|
94 | 96 | clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP) |
|
95 | 97 | cmd = Command('/tmp') |
|
96 | 98 | stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) |
|
97 | 99 | _check_proper_clone(stdout, stderr, 'git') |
|
98 | 100 | cmd.assert_returncode_success() |
|
99 | 101 | |
|
100 | 102 | def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir): |
|
101 | 103 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
102 | 104 | cmd = Command('/tmp') |
|
103 | 105 | stdout, stderr = cmd.execute( |
|
104 | 106 | 'git clone --depth=1', clone_url, tmpdir.strpath) |
|
105 | 107 | |
|
106 | 108 | assert '' == stdout |
|
107 | 109 | assert 'Cloning into' in stderr |
|
108 | 110 | cmd.assert_returncode_success() |
|
109 | 111 | |
|
110 | 112 | def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir): |
|
111 | 113 | clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') |
|
112 | 114 | stdout, stderr = Command('/tmp').execute( |
|
113 | 115 | 'hg clone', clone_url, tmpdir.strpath) |
|
114 | 116 | assert 'abort: authorization failed' in stderr |
|
115 | 117 | |
|
116 | 118 | def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir): |
|
117 | 119 | clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') |
|
118 | 120 | stdout, stderr = Command('/tmp').execute( |
|
119 | 121 | 'git clone', clone_url, tmpdir.strpath) |
|
120 | 122 | assert 'fatal: Authentication failed' in stderr |
|
121 | 123 | |
|
122 | 124 | def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir): |
|
123 | 125 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
124 | 126 | stdout, stderr = Command('/tmp').execute( |
|
125 | 127 | 'hg clone', clone_url, tmpdir.strpath) |
|
126 | 128 | assert 'HTTP Error 404: Not Found' in stderr |
|
127 | 129 | |
|
128 | 130 | def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir): |
|
129 | 131 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
130 | 132 | stdout, stderr = Command('/tmp').execute( |
|
131 | 133 | 'git clone', clone_url, tmpdir.strpath) |
|
132 | 134 | assert 'not found' in stderr |
|
133 | 135 | |
|
134 | 136 | def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir): |
|
135 | 137 | clone_url = rc_web_server.repo_clone_url('trololo') |
|
136 | 138 | stdout, stderr = Command('/tmp').execute( |
|
137 | 139 | 'hg clone', clone_url, tmpdir.strpath) |
|
138 | 140 | assert 'HTTP Error 404: Not Found' in stderr |
|
139 | 141 | |
|
140 | 142 | def test_clone_non_existing_path_git(self, rc_web_server, tmpdir): |
|
141 | 143 | clone_url = rc_web_server.repo_clone_url('trololo') |
|
142 | 144 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
143 | 145 | assert 'not found' in stderr |
|
144 | 146 | |
|
145 | 147 | def test_clone_existing_path_hg_not_in_database( |
|
146 | 148 | self, rc_web_server, tmpdir, fs_repo_only): |
|
147 | 149 | |
|
148 | 150 | db_name = fs_repo_only('not-in-db-hg', repo_type='hg') |
|
149 | 151 | clone_url = rc_web_server.repo_clone_url(db_name) |
|
150 | 152 | stdout, stderr = Command('/tmp').execute( |
|
151 | 153 | 'hg clone', clone_url, tmpdir.strpath) |
|
152 | 154 | assert 'HTTP Error 404: Not Found' in stderr |
|
153 | 155 | |
|
154 | 156 | def test_clone_existing_path_git_not_in_database( |
|
155 | 157 | self, rc_web_server, tmpdir, fs_repo_only): |
|
156 | 158 | db_name = fs_repo_only('not-in-db-git', repo_type='git') |
|
157 | 159 | clone_url = rc_web_server.repo_clone_url(db_name) |
|
158 | 160 | stdout, stderr = Command('/tmp').execute( |
|
159 | 161 | 'git clone', clone_url, tmpdir.strpath) |
|
160 | 162 | assert 'not found' in stderr |
|
161 | 163 | |
|
162 | 164 | def test_clone_existing_path_hg_not_in_database_different_scm( |
|
163 | 165 | self, rc_web_server, tmpdir, fs_repo_only): |
|
164 | 166 | db_name = fs_repo_only('not-in-db-git', repo_type='git') |
|
165 | 167 | clone_url = rc_web_server.repo_clone_url(db_name) |
|
166 | 168 | stdout, stderr = Command('/tmp').execute( |
|
167 | 169 | 'hg clone', clone_url, tmpdir.strpath) |
|
168 | 170 | assert 'HTTP Error 404: Not Found' in stderr |
|
169 | 171 | |
|
170 | 172 | def test_clone_existing_path_git_not_in_database_different_scm( |
|
171 | 173 | self, rc_web_server, tmpdir, fs_repo_only): |
|
172 | 174 | db_name = fs_repo_only('not-in-db-hg', repo_type='hg') |
|
173 | 175 | clone_url = rc_web_server.repo_clone_url(db_name) |
|
174 | 176 | stdout, stderr = Command('/tmp').execute( |
|
175 | 177 | 'git clone', clone_url, tmpdir.strpath) |
|
176 | 178 | assert 'not found' in stderr |
|
177 | 179 | |
|
178 | 180 | def test_push_new_file_hg(self, rc_web_server, tmpdir): |
|
179 | 181 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
180 | 182 | stdout, stderr = Command('/tmp').execute( |
|
181 | 183 | 'hg clone', clone_url, tmpdir.strpath) |
|
182 | 184 | |
|
183 | 185 | stdout, stderr = _add_files_and_push( |
|
184 | 186 | 'hg', tmpdir.strpath, clone_url=clone_url) |
|
185 | 187 | |
|
186 | 188 | assert 'pushing to' in stdout |
|
187 | 189 | assert 'size summary' in stdout |
|
188 | 190 | |
|
189 | 191 | def test_push_new_file_git(self, rc_web_server, tmpdir): |
|
190 | 192 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
191 | 193 | stdout, stderr = Command('/tmp').execute( |
|
192 | 194 | 'git clone', clone_url, tmpdir.strpath) |
|
193 | 195 | |
|
194 | 196 | # commit some stuff into this repo |
|
195 | 197 | stdout, stderr = _add_files_and_push( |
|
196 | 198 | 'git', tmpdir.strpath, clone_url=clone_url) |
|
197 | 199 | |
|
198 | 200 | _check_proper_git_push(stdout, stderr) |
|
199 | 201 | |
|
200 | 202 | def test_push_invalidates_cache_hg(self, rc_web_server, tmpdir): |
|
201 | 203 | key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).scalar() |
|
202 | 204 | if not key: |
|
203 | 205 | key = CacheKey(HG_REPO, HG_REPO) |
|
204 | 206 | |
|
205 | 207 | key.cache_active = True |
|
206 | 208 | Session().add(key) |
|
207 | 209 | Session().commit() |
|
208 | 210 | |
|
209 | 211 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
210 | 212 | stdout, stderr = Command('/tmp').execute( |
|
211 | 213 | 'hg clone', clone_url, tmpdir.strpath) |
|
212 | 214 | |
|
213 | 215 | stdout, stderr = _add_files_and_push( |
|
214 | 216 | 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1) |
|
215 | 217 | |
|
216 | 218 | key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).one() |
|
217 | 219 | assert key.cache_active is False |
|
218 | 220 | |
|
219 | 221 | def test_push_invalidates_cache_git(self, rc_web_server, tmpdir): |
|
220 | 222 | key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).scalar() |
|
221 | 223 | if not key: |
|
222 | 224 | key = CacheKey(GIT_REPO, GIT_REPO) |
|
223 | 225 | |
|
224 | 226 | key.cache_active = True |
|
225 | 227 | Session().add(key) |
|
226 | 228 | Session().commit() |
|
227 | 229 | |
|
228 | 230 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
229 | 231 | stdout, stderr = Command('/tmp').execute( |
|
230 | 232 | 'git clone', clone_url, tmpdir.strpath) |
|
231 | 233 | |
|
232 | 234 | # commit some stuff into this repo |
|
233 | 235 | stdout, stderr = _add_files_and_push( |
|
234 | 236 | 'git', tmpdir.strpath, clone_url=clone_url, files_no=1) |
|
235 | 237 | _check_proper_git_push(stdout, stderr) |
|
236 | 238 | |
|
237 | 239 | key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).one() |
|
238 | 240 | |
|
239 | 241 | assert key.cache_active is False |
|
240 | 242 | |
|
241 | 243 | def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): |
|
242 | 244 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
243 | 245 | stdout, stderr = Command('/tmp').execute( |
|
244 | 246 | 'hg clone', clone_url, tmpdir.strpath) |
|
245 | 247 | |
|
246 | 248 | push_url = rc_web_server.repo_clone_url( |
|
247 | 249 | HG_REPO, user='bad', passwd='name') |
|
248 | 250 | stdout, stderr = _add_files_and_push( |
|
249 | 251 | 'hg', tmpdir.strpath, clone_url=push_url) |
|
250 | 252 | |
|
251 | 253 | assert 'abort: authorization failed' in stderr |
|
252 | 254 | |
|
253 | 255 | def test_push_wrong_credentials_git(self, rc_web_server, tmpdir): |
|
254 | 256 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
255 | 257 | stdout, stderr = Command('/tmp').execute( |
|
256 | 258 | 'git clone', clone_url, tmpdir.strpath) |
|
257 | 259 | |
|
258 | 260 | push_url = rc_web_server.repo_clone_url( |
|
259 | 261 | GIT_REPO, user='bad', passwd='name') |
|
260 | 262 | stdout, stderr = _add_files_and_push( |
|
261 | 263 | 'git', tmpdir.strpath, clone_url=push_url) |
|
262 | 264 | |
|
263 | 265 | assert 'fatal: Authentication failed' in stderr |
|
264 | 266 | |
|
265 | 267 | def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir): |
|
266 | 268 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
267 | 269 | stdout, stderr = Command('/tmp').execute( |
|
268 | 270 | 'hg clone', clone_url, tmpdir.strpath) |
|
269 | 271 | |
|
270 | 272 | stdout, stderr = _add_files_and_push( |
|
271 | 273 | 'hg', tmpdir.strpath, |
|
272 | 274 | clone_url=rc_web_server.repo_clone_url('not-existing')) |
|
273 | 275 | |
|
274 | 276 | assert 'HTTP Error 404: Not Found' in stderr |
|
275 | 277 | |
|
276 | 278 | def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir): |
|
277 | 279 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
278 | 280 | stdout, stderr = Command('/tmp').execute( |
|
279 | 281 | 'git clone', clone_url, tmpdir.strpath) |
|
280 | 282 | |
|
281 | 283 | stdout, stderr = _add_files_and_push( |
|
282 | 284 | 'git', tmpdir.strpath, |
|
283 | 285 | clone_url=rc_web_server.repo_clone_url('not-existing')) |
|
284 | 286 | |
|
285 | 287 | assert 'not found' in stderr |
|
286 | 288 | |
|
287 | 289 | def test_ip_restriction_hg(self, rc_web_server, tmpdir): |
|
288 | 290 | user_model = UserModel() |
|
289 | 291 | try: |
|
290 | 292 | user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') |
|
291 | 293 | Session().commit() |
|
292 | 294 | time.sleep(2) |
|
293 | 295 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
294 | 296 | stdout, stderr = Command('/tmp').execute( |
|
295 | 297 | 'hg clone', clone_url, tmpdir.strpath) |
|
296 | 298 | assert 'abort: HTTP Error 403: Forbidden' in stderr |
|
297 | 299 | finally: |
|
298 | 300 | # release IP restrictions |
|
299 | 301 | for ip in UserIpMap.getAll(): |
|
300 | 302 | UserIpMap.delete(ip.ip_id) |
|
301 | 303 | Session().commit() |
|
302 | 304 | |
|
303 | 305 | time.sleep(2) |
|
304 | 306 | |
|
305 | 307 | stdout, stderr = Command('/tmp').execute( |
|
306 | 308 | 'hg clone', clone_url, tmpdir.strpath) |
|
307 | 309 | _check_proper_clone(stdout, stderr, 'hg') |
|
308 | 310 | |
|
309 | 311 | def test_ip_restriction_git(self, rc_web_server, tmpdir): |
|
310 | 312 | user_model = UserModel() |
|
311 | 313 | try: |
|
312 | 314 | user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') |
|
313 | 315 | Session().commit() |
|
314 | 316 | time.sleep(2) |
|
315 | 317 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
316 | 318 | stdout, stderr = Command('/tmp').execute( |
|
317 | 319 | 'git clone', clone_url, tmpdir.strpath) |
|
318 | 320 | msg = "The requested URL returned error: 403" |
|
319 | 321 | assert msg in stderr |
|
320 | 322 | finally: |
|
321 | 323 | # release IP restrictions |
|
322 | 324 | for ip in UserIpMap.getAll(): |
|
323 | 325 | UserIpMap.delete(ip.ip_id) |
|
324 | 326 | Session().commit() |
|
325 | 327 | |
|
326 | 328 | time.sleep(2) |
|
327 | 329 | |
|
328 | 330 | cmd = Command('/tmp') |
|
329 | 331 | stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) |
|
330 | 332 | cmd.assert_returncode_success() |
|
331 | 333 | _check_proper_clone(stdout, stderr, 'git') |
|
332 | 334 | |
|
333 | 335 | def test_clone_by_auth_token( |
|
334 | 336 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): |
|
335 | 337 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', |
|
336 | 338 | 'egg:rhodecode-enterprise-ce#rhodecode']) |
|
337 | 339 | |
|
338 | 340 | user = user_util.create_user() |
|
339 | 341 | token = user.auth_tokens[1] |
|
340 | 342 | |
|
341 | 343 | clone_url = rc_web_server.repo_clone_url( |
|
342 | 344 | HG_REPO, user=user.username, passwd=token) |
|
343 | 345 | |
|
344 | 346 | stdout, stderr = Command('/tmp').execute( |
|
345 | 347 | 'hg clone', clone_url, tmpdir.strpath) |
|
346 | 348 | _check_proper_clone(stdout, stderr, 'hg') |
|
347 | 349 | |
|
348 | 350 | def test_clone_by_auth_token_expired( |
|
349 | 351 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): |
|
350 | 352 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', |
|
351 | 353 | 'egg:rhodecode-enterprise-ce#rhodecode']) |
|
352 | 354 | |
|
353 | 355 | user = user_util.create_user() |
|
354 | 356 | auth_token = AuthTokenModel().create( |
|
355 | 357 | user.user_id, 'test-token', -10, AuthTokenModel.cls.ROLE_VCS) |
|
356 | 358 | token = auth_token.api_key |
|
357 | 359 | |
|
358 | 360 | clone_url = rc_web_server.repo_clone_url( |
|
359 | 361 | HG_REPO, user=user.username, passwd=token) |
|
360 | 362 | |
|
361 | 363 | stdout, stderr = Command('/tmp').execute( |
|
362 | 364 | 'hg clone', clone_url, tmpdir.strpath) |
|
363 | 365 | assert 'abort: authorization failed' in stderr |
|
364 | 366 | |
|
365 | 367 | def test_clone_by_auth_token_bad_role( |
|
366 | 368 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): |
|
367 | 369 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', |
|
368 | 370 | 'egg:rhodecode-enterprise-ce#rhodecode']) |
|
369 | 371 | |
|
370 | 372 | user = user_util.create_user() |
|
371 | 373 | auth_token = AuthTokenModel().create( |
|
372 | 374 | user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_API) |
|
373 | 375 | token = auth_token.api_key |
|
374 | 376 | |
|
375 | 377 | clone_url = rc_web_server.repo_clone_url( |
|
376 | 378 | HG_REPO, user=user.username, passwd=token) |
|
377 | 379 | |
|
378 | 380 | stdout, stderr = Command('/tmp').execute( |
|
379 | 381 | 'hg clone', clone_url, tmpdir.strpath) |
|
380 | 382 | assert 'abort: authorization failed' in stderr |
|
381 | 383 | |
|
382 | 384 | def test_clone_by_auth_token_user_disabled( |
|
383 | 385 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): |
|
384 | 386 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', |
|
385 | 387 | 'egg:rhodecode-enterprise-ce#rhodecode']) |
|
386 | 388 | user = user_util.create_user() |
|
387 | 389 | user.active = False |
|
388 | 390 | Session().add(user) |
|
389 | 391 | Session().commit() |
|
390 | 392 | token = user.auth_tokens[1] |
|
391 | 393 | |
|
392 | 394 | clone_url = rc_web_server.repo_clone_url( |
|
393 | 395 | HG_REPO, user=user.username, passwd=token) |
|
394 | 396 | |
|
395 | 397 | stdout, stderr = Command('/tmp').execute( |
|
396 | 398 | 'hg clone', clone_url, tmpdir.strpath) |
|
397 | 399 | assert 'abort: authorization failed' in stderr |
|
398 | 400 | |
|
399 | ||
|
400 | 401 | def test_clone_by_auth_token_with_scope( |
|
401 | 402 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): |
|
402 | 403 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', |
|
403 | 404 | 'egg:rhodecode-enterprise-ce#rhodecode']) |
|
404 | 405 | user = user_util.create_user() |
|
405 | 406 | auth_token = AuthTokenModel().create( |
|
406 | 407 | user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_VCS) |
|
407 | 408 | token = auth_token.api_key |
|
408 | 409 | |
|
409 | 410 | # manually set scope |
|
410 | 411 | auth_token.repo = Repository.get_by_repo_name(HG_REPO) |
|
411 | 412 | Session().add(auth_token) |
|
412 | 413 | Session().commit() |
|
413 | 414 | |
|
414 | 415 | clone_url = rc_web_server.repo_clone_url( |
|
415 | 416 | HG_REPO, user=user.username, passwd=token) |
|
416 | 417 | |
|
417 | 418 | stdout, stderr = Command('/tmp').execute( |
|
418 | 419 | 'hg clone', clone_url, tmpdir.strpath) |
|
419 | 420 | _check_proper_clone(stdout, stderr, 'hg') |
|
420 | 421 | |
|
421 | 422 | def test_clone_by_auth_token_with_wrong_scope( |
|
422 | 423 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): |
|
423 | 424 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', |
|
424 | 425 | 'egg:rhodecode-enterprise-ce#rhodecode']) |
|
425 | 426 | user = user_util.create_user() |
|
426 | 427 | auth_token = AuthTokenModel().create( |
|
427 | 428 | user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_VCS) |
|
428 | 429 | token = auth_token.api_key |
|
429 | 430 | |
|
430 | 431 | # manually set scope |
|
431 | 432 | auth_token.repo = Repository.get_by_repo_name(GIT_REPO) |
|
432 | 433 | Session().add(auth_token) |
|
433 | 434 | Session().commit() |
|
434 | 435 | |
|
435 | 436 | clone_url = rc_web_server.repo_clone_url( |
|
436 | 437 | HG_REPO, user=user.username, passwd=token) |
|
437 | 438 | |
|
438 | 439 | stdout, stderr = Command('/tmp').execute( |
|
439 | 440 | 'hg clone', clone_url, tmpdir.strpath) |
|
440 | 441 | assert 'abort: authorization failed' in stderr |
|
441 | 442 | |
|
442 | 443 | |
|
443 | 444 | def test_git_sets_default_branch_if_not_master( |
|
444 | 445 | backend_git, tmpdir, disable_locking, rc_web_server): |
|
445 | 446 | empty_repo = backend_git.create_repo() |
|
446 | 447 | clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) |
|
447 | 448 | |
|
448 | 449 | cmd = Command(tmpdir.strpath) |
|
449 | 450 | cmd.execute('git clone', clone_url) |
|
450 | 451 | |
|
451 | 452 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
452 | 453 | repo.in_memory_commit.add(FileNode('file', content='')) |
|
453 | 454 | repo.in_memory_commit.commit( |
|
454 | 455 | message='Commit on branch test', |
|
455 | 456 | author='Automatic test', |
|
456 | 457 | branch='test') |
|
457 | 458 | |
|
458 | 459 | repo_cmd = Command(repo.path) |
|
459 | 460 | stdout, stderr = repo_cmd.execute('git push --verbose origin test') |
|
460 | 461 | _check_proper_git_push( |
|
461 | 462 | stdout, stderr, branch='test', should_set_default_branch=True) |
|
462 | 463 | |
|
463 | 464 | stdout, stderr = cmd.execute( |
|
464 | 465 | 'git clone', clone_url, empty_repo.repo_name + '-clone') |
|
465 | 466 | _check_proper_clone(stdout, stderr, 'git') |
|
466 | 467 | |
|
467 | 468 | # Doing an explicit commit in order to get latest user logs on MySQL |
|
468 | 469 | Session().commit() |
|
469 | 470 | |
|
470 | 471 | |
|
471 | 472 | def test_git_fetches_from_remote_repository_with_annotated_tags( |
|
472 | 473 | backend_git, disable_locking, rc_web_server): |
|
473 | 474 | # Note: This is a test specific to the git backend. It checks the |
|
474 | 475 | # integration of fetching from a remote repository which contains |
|
475 | 476 | # annotated tags. |
|
476 | 477 | |
|
477 | 478 | # Dulwich shows this specific behavior only when |
|
478 | 479 | # operating against a remote repository. |
|
479 | 480 | source_repo = backend_git['annotated-tag'] |
|
480 | 481 | target_vcs_repo = backend_git.create_repo().scm_instance() |
|
481 | 482 | target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name)) |
|
483 | ||
|
484 | ||
|
485 | def test_git_push_shows_pull_request_refs(backend_git, rc_web_server, tmpdir): | |
|
486 | """ | |
|
487 | test if remote info about refs is visible | |
|
488 | """ | |
|
489 | empty_repo = backend_git.create_repo() | |
|
490 | ||
|
491 | clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) | |
|
492 | ||
|
493 | cmd = Command(tmpdir.strpath) | |
|
494 | cmd.execute('git clone', clone_url) | |
|
495 | ||
|
496 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
497 | repo.in_memory_commit.add(FileNode('readme.md', content='## Hello')) | |
|
498 | repo.in_memory_commit.commit( | |
|
499 | message='Commit on branch Master', | |
|
500 | author='Automatic test', | |
|
501 | branch='master') | |
|
502 | ||
|
503 | repo_cmd = Command(repo.path) | |
|
504 | stdout, stderr = repo_cmd.execute('git push --verbose origin master') | |
|
505 | _check_proper_git_push(stdout, stderr, branch='master') | |
|
506 | ||
|
507 | ref = '{}/{}/pull-request/new?branch=master'.format( | |
|
508 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
509 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr | |
|
510 | assert 'remote: RhodeCode: push completed' in stderr | |
|
511 | ||
|
512 | # push on the same branch | |
|
513 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
514 | repo.in_memory_commit.add(FileNode('setup.py', content='print\n')) | |
|
515 | repo.in_memory_commit.commit( | |
|
516 | message='Commit2 on branch Master', | |
|
517 | author='Automatic test2', | |
|
518 | branch='master') | |
|
519 | ||
|
520 | repo_cmd = Command(repo.path) | |
|
521 | stdout, stderr = repo_cmd.execute('git push --verbose origin master') | |
|
522 | _check_proper_git_push(stdout, stderr, branch='master') | |
|
523 | ||
|
524 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr | |
|
525 | assert 'remote: RhodeCode: push completed' in stderr | |
|
526 | ||
|
527 | # new Branch | |
|
528 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
529 | repo.in_memory_commit.add(FileNode('feature1.py', content='## Hello world')) | |
|
530 | repo.in_memory_commit.commit( | |
|
531 | message='Commit on branch feature', | |
|
532 | author='Automatic test', | |
|
533 | branch='feature') | |
|
534 | ||
|
535 | repo_cmd = Command(repo.path) | |
|
536 | stdout, stderr = repo_cmd.execute('git push --verbose origin feature') | |
|
537 | _check_proper_git_push(stdout, stderr, branch='feature') | |
|
538 | ||
|
539 | ref = '{}/{}/pull-request/new?branch=feature'.format( | |
|
540 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
541 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr | |
|
542 | assert 'remote: RhodeCode: push completed' in stderr | |
|
543 | ||
|
544 | ||
|
545 | def test_hg_push_shows_pull_request_refs(backend_hg, rc_web_server, tmpdir): | |
|
546 | empty_repo = backend_hg.create_repo() | |
|
547 | ||
|
548 | clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) | |
|
549 | ||
|
550 | cmd = Command(tmpdir.strpath) | |
|
551 | cmd.execute('hg clone', clone_url) | |
|
552 | ||
|
553 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
554 | repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello')) | |
|
555 | repo.in_memory_commit.commit( | |
|
556 | message=u'Commit on branch default', | |
|
557 | author=u'Automatic test', | |
|
558 | branch='default') | |
|
559 | ||
|
560 | repo_cmd = Command(repo.path) | |
|
561 | repo_cmd.execute('hg checkout default') | |
|
562 | ||
|
563 | stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) | |
|
564 | _check_proper_hg_push(stdout, stderr, branch='default') | |
|
565 | ||
|
566 | ref = '{}/{}/pull-request/new?branch=default'.format( | |
|
567 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
568 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
569 | assert 'remote: RhodeCode: push completed' in stdout | |
|
570 | ||
|
571 | # push on the same branch | |
|
572 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
573 | repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n')) | |
|
574 | repo.in_memory_commit.commit( | |
|
575 | message=u'Commit2 on branch default', | |
|
576 | author=u'Automatic test2', | |
|
577 | branch=u'default') | |
|
578 | ||
|
579 | repo_cmd = Command(repo.path) | |
|
580 | repo_cmd.execute('hg checkout default') | |
|
581 | ||
|
582 | stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) | |
|
583 | _check_proper_hg_push(stdout, stderr, branch='default') | |
|
584 | ||
|
585 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
586 | assert 'remote: RhodeCode: push completed' in stdout | |
|
587 | ||
|
588 | # new Branch | |
|
589 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
590 | repo.in_memory_commit.add(FileNode(u'feature1.py', content=u'## Hello world')) | |
|
591 | repo.in_memory_commit.commit( | |
|
592 | message=u'Commit on branch feature', | |
|
593 | author=u'Automatic test', | |
|
594 | branch=u'feature') | |
|
595 | ||
|
596 | repo_cmd = Command(repo.path) | |
|
597 | repo_cmd.execute('hg checkout feature') | |
|
598 | ||
|
599 | stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url) | |
|
600 | _check_proper_hg_push(stdout, stderr, branch='feature') | |
|
601 | ||
|
602 | ref = '{}/{}/pull-request/new?branch=feature'.format( | |
|
603 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
604 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
605 | assert 'remote: RhodeCode: push completed' in stdout | |
|
606 | ||
|
607 | ||
|
608 | def test_hg_push_shows_pull_request_refs_book(backend_hg, rc_web_server, tmpdir): | |
|
609 | empty_repo = backend_hg.create_repo() | |
|
610 | ||
|
611 | clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) | |
|
612 | ||
|
613 | cmd = Command(tmpdir.strpath) | |
|
614 | cmd.execute('hg clone', clone_url) | |
|
615 | ||
|
616 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
617 | repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello')) | |
|
618 | repo.in_memory_commit.commit( | |
|
619 | message=u'Commit on branch default', | |
|
620 | author=u'Automatic test', | |
|
621 | branch='default') | |
|
622 | ||
|
623 | repo_cmd = Command(repo.path) | |
|
624 | repo_cmd.execute('hg checkout default') | |
|
625 | ||
|
626 | stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) | |
|
627 | _check_proper_hg_push(stdout, stderr, branch='default') | |
|
628 | ||
|
629 | ref = '{}/{}/pull-request/new?branch=default'.format( | |
|
630 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
631 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
632 | assert 'remote: RhodeCode: push completed' in stdout | |
|
633 | ||
|
634 | # add bookmark | |
|
635 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) | |
|
636 | repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n')) | |
|
637 | repo.in_memory_commit.commit( | |
|
638 | message=u'Commit2 on branch default', | |
|
639 | author=u'Automatic test2', | |
|
640 | branch=u'default') | |
|
641 | ||
|
642 | repo_cmd = Command(repo.path) | |
|
643 | repo_cmd.execute('hg checkout default') | |
|
644 | repo_cmd.execute('hg bookmark feature2') | |
|
645 | stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url) | |
|
646 | _check_proper_hg_push(stdout, stderr, branch='default') | |
|
647 | ||
|
648 | ref = '{}/{}/pull-request/new?branch=default'.format( | |
|
649 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
650 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
651 | ref = '{}/{}/pull-request/new?bookmark=feature2'.format( | |
|
652 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
653 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
654 | assert 'remote: RhodeCode: push completed' in stdout | |
|
655 | assert 'exporting bookmark feature2' in stdout |
General Comments 0
You need to be logged in to leave comments.
Login now