##// END OF EJS Templates
simplified boolean expressions
marcink -
r3888:7aa0ff5b beta
parent child Browse files
Show More
@@ -1,98 +1,98 b''
1 1 import rhodecode
2 2 from rhodecode.lib.utils import BasePasterCommand, Command, load_rcextensions
3 3 from celery.app import app_or_default
4 4 from celery.bin import camqadm, celerybeat, celeryd, celeryev
5 5
6 6 from rhodecode.lib.utils2 import str2bool
7 7
8 8 __all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand',
9 9 'CAMQPAdminCommand', 'CeleryEventCommand']
10 10
11 11
12 12 class CeleryCommand(BasePasterCommand):
13 13 """Abstract class implements run methods needed for celery
14 14
15 15 Starts the celery worker that uses a paste.deploy configuration
16 16 file.
17 17 """
18 18
19 19 def update_parser(self):
20 20 """
21 21 Abstract method. Allows for the class's parser to be updated
22 22 before the superclass's `run` method is called. Necessary to
23 23 allow options/arguments to be passed through to the underlying
24 24 celery command.
25 25 """
26 26
27 27 cmd = self.celery_command(app_or_default())
28 28 for x in cmd.get_options():
29 29 self.parser.add_option(x)
30 30
31 31 def command(self):
32 32 from pylons import config
33 33 try:
34 34 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
35 35 except KeyError:
36 36 CELERY_ON = False
37 37
38 if CELERY_ON == False:
38 if not CELERY_ON:
39 39 raise Exception('Please enable celery_on in .ini config '
40 40 'file before running celeryd')
41 41 rhodecode.CELERY_ON = CELERY_ON
42 42 load_rcextensions(config['here'])
43 43 cmd = self.celery_command(app_or_default())
44 44 return cmd.run(**vars(self.options))
45 45
46 46
47 47 class CeleryDaemonCommand(CeleryCommand):
48 48 """Start the celery worker
49 49
50 50 Starts the celery worker that uses a paste.deploy configuration
51 51 file.
52 52 """
53 53 usage = 'CONFIG_FILE [celeryd options...]'
54 54 summary = __doc__.splitlines()[0]
55 55 description = "".join(__doc__.splitlines()[2:])
56 56
57 57 parser = Command.standard_parser(quiet=True)
58 58 celery_command = celeryd.WorkerCommand
59 59
60 60
61 61 class CeleryBeatCommand(CeleryCommand):
62 62 """Start the celery beat server
63 63
64 64 Starts the celery beat server using a paste.deploy configuration
65 65 file.
66 66 """
67 67 usage = 'CONFIG_FILE [celerybeat options...]'
68 68 summary = __doc__.splitlines()[0]
69 69 description = "".join(__doc__.splitlines()[2:])
70 70
71 71 parser = Command.standard_parser(quiet=True)
72 72 celery_command = celerybeat.BeatCommand
73 73
74 74
75 75 class CAMQPAdminCommand(CeleryCommand):
76 76 """CAMQP Admin
77 77
78 78 CAMQP celery admin tool.
79 79 """
80 80 usage = 'CONFIG_FILE [camqadm options...]'
81 81 summary = __doc__.splitlines()[0]
82 82 description = "".join(__doc__.splitlines()[2:])
83 83
84 84 parser = Command.standard_parser(quiet=True)
85 85 celery_command = camqadm.AMQPAdminCommand
86 86
87 87
88 88 class CeleryEventCommand(CeleryCommand):
89 89 """Celery event command.
90 90
91 91 Capture celery events.
92 92 """
93 93 usage = 'CONFIG_FILE [celeryev options...]'
94 94 summary = __doc__.splitlines()[0]
95 95 description = "".join(__doc__.splitlines()[2:])
96 96
97 97 parser = Command.standard_parser(quiet=True)
98 98 celery_command = celeryev.EvCommand
@@ -1,238 +1,238 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3
4 4 import os
5 5 import re
6 6 import shutil
7 7 import logging
8 8
9 9 from rhodecode.lib.dbmigrate.migrate import exceptions
10 10 from rhodecode.lib.dbmigrate.migrate.versioning import pathed, script
11 11 from datetime import datetime
12 12
13 13
14 14 log = logging.getLogger(__name__)
15 15
16 16 class VerNum(object):
17 17 """A version number that behaves like a string and int at the same time"""
18 18
19 19 _instances = dict()
20 20
21 21 def __new__(cls, value):
22 22 val = str(value)
23 23 if val not in cls._instances:
24 24 cls._instances[val] = super(VerNum, cls).__new__(cls)
25 25 ret = cls._instances[val]
26 26 return ret
27 27
28 28 def __init__(self,value):
29 29 self.value = str(int(value))
30 30 if self < 0:
31 31 raise ValueError("Version number cannot be negative")
32 32
33 33 def __add__(self, value):
34 34 ret = int(self) + int(value)
35 35 return VerNum(ret)
36 36
37 37 def __sub__(self, value):
38 38 return self + (int(value) * -1)
39 39
40 40 def __cmp__(self, value):
41 41 return int(self) - int(value)
42 42
43 43 def __repr__(self):
44 44 return "<VerNum(%s)>" % self.value
45 45
46 46 def __str__(self):
47 47 return str(self.value)
48 48
49 49 def __int__(self):
50 50 return int(self.value)
51 51
52 52
53 53 class Collection(pathed.Pathed):
54 54 """A collection of versioning scripts in a repository"""
55 55
56 56 FILENAME_WITH_VERSION = re.compile(r'^(\d{3,}).*')
57 57
58 58 def __init__(self, path):
59 59 """Collect current version scripts in repository
60 60 and store them in self.versions
61 61 """
62 62 super(Collection, self).__init__(path)
63 63
64 64 # Create temporary list of files, allowing skipped version numbers.
65 65 files = os.listdir(path)
66 66 if '1' in files:
67 67 # deprecation
68 68 raise Exception('It looks like you have a repository in the old '
69 69 'format (with directories for each version). '
70 70 'Please convert repository before proceeding.')
71 71
72 72 tempVersions = dict()
73 73 for filename in files:
74 74 match = self.FILENAME_WITH_VERSION.match(filename)
75 75 if match:
76 76 num = int(match.group(1))
77 77 tempVersions.setdefault(num, []).append(filename)
78 78 else:
79 79 pass # Must be a helper file or something, let's ignore it.
80 80
81 81 # Create the versions member where the keys
82 82 # are VerNum's and the values are Version's.
83 83 self.versions = dict()
84 84 for num, files in tempVersions.items():
85 85 self.versions[VerNum(num)] = Version(num, path, files)
86 86
87 87 @property
88 88 def latest(self):
89 89 """:returns: Latest version in Collection"""
90 90 return max([VerNum(0)] + self.versions.keys())
91 91
92 92 def _next_ver_num(self, use_timestamp_numbering):
93 if use_timestamp_numbering == True:
93 if use_timestamp_numbering:
94 94 return VerNum(int(datetime.utcnow().strftime('%Y%m%d%H%M%S')))
95 95 else:
96 96 return self.latest + 1
97 97
98 98 def create_new_python_version(self, description, **k):
99 99 """Create Python files for new version"""
100 100 ver = self._next_ver_num(k.pop('use_timestamp_numbering', False))
101 101 extra = str_to_filename(description)
102 102
103 103 if extra:
104 104 if extra == '_':
105 105 extra = ''
106 106 elif not extra.startswith('_'):
107 107 extra = '_%s' % extra
108 108
109 109 filename = '%03d%s.py' % (ver, extra)
110 110 filepath = self._version_path(filename)
111 111
112 112 script.PythonScript.create(filepath, **k)
113 113 self.versions[ver] = Version(ver, self.path, [filename])
114 114
115 115 def create_new_sql_version(self, database, description, **k):
116 116 """Create SQL files for new version"""
117 117 ver = self._next_ver_num(k.pop('use_timestamp_numbering', False))
118 118 self.versions[ver] = Version(ver, self.path, [])
119 119
120 120 extra = str_to_filename(description)
121 121
122 122 if extra:
123 123 if extra == '_':
124 124 extra = ''
125 125 elif not extra.startswith('_'):
126 126 extra = '_%s' % extra
127 127
128 128 # Create new files.
129 129 for op in ('upgrade', 'downgrade'):
130 130 filename = '%03d%s_%s_%s.sql' % (ver, extra, database, op)
131 131 filepath = self._version_path(filename)
132 132 script.SqlScript.create(filepath, **k)
133 133 self.versions[ver].add_script(filepath)
134 134
135 135 def version(self, vernum=None):
136 136 """Returns latest Version if vernum is not given.
137 137 Otherwise, returns wanted version"""
138 138 if vernum is None:
139 139 vernum = self.latest
140 140 return self.versions[VerNum(vernum)]
141 141
142 142 @classmethod
143 143 def clear(cls):
144 144 super(Collection, cls).clear()
145 145
146 146 def _version_path(self, ver):
147 147 """Returns path of file in versions repository"""
148 148 return os.path.join(self.path, str(ver))
149 149
150 150
151 151 class Version(object):
152 152 """A single version in a collection
153 153 :param vernum: Version Number
154 154 :param path: Path to script files
155 155 :param filelist: List of scripts
156 156 :type vernum: int, VerNum
157 157 :type path: string
158 158 :type filelist: list
159 159 """
160 160
161 161 def __init__(self, vernum, path, filelist):
162 162 self.version = VerNum(vernum)
163 163
164 164 # Collect scripts in this folder
165 165 self.sql = dict()
166 166 self.python = None
167 167
168 168 for script in filelist:
169 169 self.add_script(os.path.join(path, script))
170 170
171 171 def script(self, database=None, operation=None):
172 172 """Returns SQL or Python Script"""
173 173 for db in (database, 'default'):
174 174 # Try to return a .sql script first
175 175 try:
176 176 return self.sql[db][operation]
177 177 except KeyError:
178 178 continue # No .sql script exists
179 179
180 180 # TODO: maybe add force Python parameter?
181 181 ret = self.python
182 182
183 183 assert ret is not None, \
184 184 "There is no script for %d version" % self.version
185 185 return ret
186 186
187 187 def add_script(self, path):
188 188 """Add script to Collection/Version"""
189 189 if path.endswith(Extensions.py):
190 190 self._add_script_py(path)
191 191 elif path.endswith(Extensions.sql):
192 192 self._add_script_sql(path)
193 193
194 194 SQL_FILENAME = re.compile(r'^.*\.sql')
195 195
196 196 def _add_script_sql(self, path):
197 197 basename = os.path.basename(path)
198 198 match = self.SQL_FILENAME.match(basename)
199 199
200 200 if match:
201 201 basename = basename.replace('.sql', '')
202 202 parts = basename.split('_')
203 203 if len(parts) < 3:
204 204 raise exceptions.ScriptError(
205 205 "Invalid SQL script name %s " % basename + \
206 206 "(needs to be ###_description_database_operation.sql)")
207 207 version = parts[0]
208 208 op = parts[-1]
209 209 dbms = parts[-2]
210 210 else:
211 211 raise exceptions.ScriptError(
212 212 "Invalid SQL script name %s " % basename + \
213 213 "(needs to be ###_description_database_operation.sql)")
214 214
215 215 # File the script into a dictionary
216 216 self.sql.setdefault(dbms, {})[op] = script.SqlScript(path)
217 217
218 218 def _add_script_py(self, path):
219 219 if self.python is not None:
220 220 raise exceptions.ScriptError('You can only have one Python script '
221 221 'per version, but you have: %s and %s' % (self.python, path))
222 222 self.python = script.PythonScript(path)
223 223
224 224
225 225 class Extensions:
226 226 """A namespace for file extensions"""
227 227 py = 'py'
228 228 sql = 'sql'
229 229
230 230 def str_to_filename(s):
231 231 """Replaces spaces, (double and single) quotes
232 232 and double underscores to underscores
233 233 """
234 234
235 235 s = s.replace(' ', '_').replace('"', '_').replace("'", '_').replace(".", "_")
236 236 while '__' in s:
237 237 s = s.replace('__', '_')
238 238 return s
@@ -1,816 +1,816 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import datetime
30 30 import traceback
31 31 import paste
32 32 import beaker
33 33 import tarfile
34 34 import shutil
35 35 import decorator
36 36 import warnings
37 37 from os.path import abspath
38 38 from os.path import dirname as dn, join as jn
39 39
40 40 from paste.script.command import Command, BadCommand
41 41
42 42 from mercurial import ui, config
43 43
44 44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 45
46 46 from rhodecode.lib.vcs import get_backend
47 47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 50 from rhodecode.lib.vcs.exceptions import VCSError
51 51
52 52 from rhodecode.lib.caching_query import FromCache
53 53
54 54 from rhodecode.model import meta
55 55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
57 57 from rhodecode.model.meta import Session
58 58 from rhodecode.model.repos_group import ReposGroupModel
59 59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 60 from rhodecode.lib.vcs.utils.fakemod import create_module
61 61 from rhodecode.model.users_group import UserGroupModel
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
66 66
67 67
68 68 def recursive_replace(str_, replace=' '):
69 69 """
70 70 Recursive replace of given sign to just one instance
71 71
72 72 :param str_: given string
73 73 :param replace: char to find and replace multiple instances
74 74
75 75 Examples::
76 76 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
77 77 'Mighty-Mighty-Bo-sstones'
78 78 """
79 79
80 80 if str_.find(replace * 2) == -1:
81 81 return str_
82 82 else:
83 83 str_ = str_.replace(replace * 2, replace)
84 84 return recursive_replace(str_, replace)
85 85
86 86
87 87 def repo_name_slug(value):
88 88 """
89 89 Return slug of name of repository
90 90 This function is called on each creation/modification
91 91 of repository to prevent bad names in repo
92 92 """
93 93
94 94 slug = remove_formatting(value)
95 95 slug = strip_tags(slug)
96 96
97 97 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
98 98 slug = slug.replace(c, '-')
99 99 slug = recursive_replace(slug, '-')
100 100 slug = collapse(slug, '-')
101 101 return slug
102 102
103 103
104 104 #==============================================================================
105 105 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
106 106 #==============================================================================
107 107 def get_repo_slug(request):
108 108 _repo = request.environ['pylons.routes_dict'].get('repo_name')
109 109 if _repo:
110 110 _repo = _repo.rstrip('/')
111 111 return _repo
112 112
113 113
114 114 def get_repos_group_slug(request):
115 115 _group = request.environ['pylons.routes_dict'].get('group_name')
116 116 if _group:
117 117 _group = _group.rstrip('/')
118 118 return _group
119 119
120 120
121 121 def get_user_group_slug(request):
122 122 _group = request.environ['pylons.routes_dict'].get('id')
123 123 try:
124 124 _group = UserGroup.get(_group)
125 125 if _group:
126 126 _group = _group.users_group_name
127 127 except Exception:
128 128 log.debug(traceback.format_exc())
129 129 #catch all failures here
130 130 pass
131 131
132 132 return _group
133 133
134 134
135 135 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
136 136 """
137 137 Action logger for various actions made by users
138 138
139 139 :param user: user that made this action, can be a unique username string or
140 140 object containing user_id attribute
141 141 :param action: action to log, should be on of predefined unique actions for
142 142 easy translations
143 143 :param repo: string name of repository or object containing repo_id,
144 144 that action was made on
145 145 :param ipaddr: optional ip address from what the action was made
146 146 :param sa: optional sqlalchemy session
147 147
148 148 """
149 149
150 150 if not sa:
151 151 sa = meta.Session()
152 152
153 153 try:
154 154 if hasattr(user, 'user_id'):
155 155 user_obj = User.get(user.user_id)
156 156 elif isinstance(user, basestring):
157 157 user_obj = User.get_by_username(user)
158 158 else:
159 159 raise Exception('You have to provide a user object or a username')
160 160
161 161 if hasattr(repo, 'repo_id'):
162 162 repo_obj = Repository.get(repo.repo_id)
163 163 repo_name = repo_obj.repo_name
164 164 elif isinstance(repo, basestring):
165 165 repo_name = repo.lstrip('/')
166 166 repo_obj = Repository.get_by_repo_name(repo_name)
167 167 else:
168 168 repo_obj = None
169 169 repo_name = ''
170 170
171 171 user_log = UserLog()
172 172 user_log.user_id = user_obj.user_id
173 173 user_log.username = user_obj.username
174 174 user_log.action = safe_unicode(action)
175 175
176 176 user_log.repository = repo_obj
177 177 user_log.repository_name = repo_name
178 178
179 179 user_log.action_date = datetime.datetime.now()
180 180 user_log.user_ip = ipaddr
181 181 sa.add(user_log)
182 182
183 183 log.info('Logging action:%s on %s by user:%s ip:%s' %
184 184 (action, safe_unicode(repo), user_obj, ipaddr))
185 185 if commit:
186 186 sa.commit()
187 187 except Exception:
188 188 log.error(traceback.format_exc())
189 189 raise
190 190
191 191
192 192 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
193 193 """
194 194 Scans given path for repos and return (name,(type,path)) tuple
195 195
196 196 :param path: path to scan for repositories
197 197 :param recursive: recursive search and return names with subdirs in front
198 198 """
199 199
200 200 # remove ending slash for better results
201 201 path = path.rstrip(os.sep)
202 202 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
203 203
204 204 def _get_repos(p):
205 205 if not os.access(p, os.W_OK):
206 206 log.warn('ignoring repo path without write access: %s', p)
207 207 return
208 208 for dirpath in os.listdir(p):
209 209 if os.path.isfile(os.path.join(p, dirpath)):
210 210 continue
211 211 cur_path = os.path.join(p, dirpath)
212 212
213 213 # skip removed repos
214 214 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
215 215 continue
216 216
217 217 #skip .<somethin> dirs
218 218 if dirpath.startswith('.'):
219 219 continue
220 220
221 221 try:
222 222 scm_info = get_scm(cur_path)
223 223 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
224 224 except VCSError:
225 225 if not recursive:
226 226 continue
227 227 #check if this dir containts other repos for recursive scan
228 228 rec_path = os.path.join(p, dirpath)
229 229 if os.path.isdir(rec_path):
230 230 for inner_scm in _get_repos(rec_path):
231 231 yield inner_scm
232 232
233 233 return _get_repos(path)
234 234
235 235
236 236 def is_valid_repo(repo_name, base_path, scm=None):
237 237 """
238 238 Returns True if given path is a valid repository False otherwise.
239 239 If scm param is given also compare if given scm is the same as expected
240 240 from scm parameter
241 241
242 242 :param repo_name:
243 243 :param base_path:
244 244 :param scm:
245 245
246 246 :return True: if given path is a valid repository
247 247 """
248 248 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
249 249
250 250 try:
251 251 scm_ = get_scm(full_path)
252 252 if scm:
253 253 return scm_[0] == scm
254 254 return True
255 255 except VCSError:
256 256 return False
257 257
258 258
259 259 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
260 260 """
261 261 Returns True if given path is a repository group False otherwise
262 262
263 263 :param repo_name:
264 264 :param base_path:
265 265 """
266 266 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
267 267
268 268 # check if it's not a repo
269 269 if is_valid_repo(repos_group_name, base_path):
270 270 return False
271 271
272 272 try:
273 273 # we need to check bare git repos at higher level
274 274 # since we might match branches/hooks/info/objects or possible
275 275 # other things inside bare git repo
276 276 get_scm(os.path.dirname(full_path))
277 277 return False
278 278 except VCSError:
279 279 pass
280 280
281 281 # check if it's a valid path
282 282 if skip_path_check or os.path.isdir(full_path):
283 283 return True
284 284
285 285 return False
286 286
287 287
288 288 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
289 289 while True:
290 290 ok = raw_input(prompt)
291 291 if ok in ('y', 'ye', 'yes'):
292 292 return True
293 293 if ok in ('n', 'no', 'nop', 'nope'):
294 294 return False
295 295 retries = retries - 1
296 296 if retries < 0:
297 297 raise IOError
298 298 print complaint
299 299
300 300 #propagated from mercurial documentation
301 301 ui_sections = ['alias', 'auth',
302 302 'decode/encode', 'defaults',
303 303 'diff', 'email',
304 304 'extensions', 'format',
305 305 'merge-patterns', 'merge-tools',
306 306 'hooks', 'http_proxy',
307 307 'smtp', 'patch',
308 308 'paths', 'profiling',
309 309 'server', 'trusted',
310 310 'ui', 'web', ]
311 311
312 312
313 313 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
314 314 """
315 315 A function that will read python rc files or database
316 316 and make an mercurial ui object from read options
317 317
318 318 :param path: path to mercurial config file
319 319 :param checkpaths: check the path
320 320 :param read_from: read from 'file' or 'db'
321 321 """
322 322
323 323 baseui = ui.ui()
324 324
325 325 # clean the baseui object
326 326 baseui._ocfg = config.config()
327 327 baseui._ucfg = config.config()
328 328 baseui._tcfg = config.config()
329 329
330 330 if read_from == 'file':
331 331 if not os.path.isfile(path):
332 332 log.debug('hgrc file is not present at %s, skipping...' % path)
333 333 return False
334 334 log.debug('reading hgrc from %s' % path)
335 335 cfg = config.config()
336 336 cfg.read(path)
337 337 for section in ui_sections:
338 338 for k, v in cfg.items(section):
339 339 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
340 340 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
341 341
342 342 elif read_from == 'db':
343 343 sa = meta.Session()
344 344 ret = sa.query(RhodeCodeUi)\
345 345 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
346 346 .all()
347 347
348 348 hg_ui = ret
349 349 for ui_ in hg_ui:
350 350 if ui_.ui_active:
351 351 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
352 352 ui_.ui_key, ui_.ui_value)
353 353 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
354 354 safe_str(ui_.ui_value))
355 355 if ui_.ui_key == 'push_ssl':
356 356 # force set push_ssl requirement to False, rhodecode
357 357 # handles that
358 358 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
359 359 False)
360 360 if clear_session:
361 361 meta.Session.remove()
362 362 return baseui
363 363
364 364
365 365 def set_rhodecode_config(config):
366 366 """
367 367 Updates pylons config with new settings from database
368 368
369 369 :param config:
370 370 """
371 371 hgsettings = RhodeCodeSetting.get_app_settings()
372 372
373 373 for k, v in hgsettings.items():
374 374 config[k] = v
375 375
376 376
377 377 def set_vcs_config(config):
378 378 """
379 379 Patch VCS config with some RhodeCode specific stuff
380 380
381 381 :param config: rhodecode.CONFIG
382 382 """
383 383 import rhodecode
384 384 from rhodecode.lib.vcs import conf
385 385 from rhodecode.lib.utils2 import aslist
386 386 conf.settings.BACKENDS = {
387 387 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
388 388 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
389 389 }
390 390
391 391 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
392 392 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
393 393 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
394 394 'utf8'), sep=',')
395 395
396 396
397 397 def map_groups(path):
398 398 """
399 399 Given a full path to a repository, create all nested groups that this
400 400 repo is inside. This function creates parent-child relationships between
401 401 groups and creates default perms for all new groups.
402 402
403 403 :param paths: full path to repository
404 404 """
405 405 sa = meta.Session()
406 406 groups = path.split(Repository.url_sep())
407 407 parent = None
408 408 group = None
409 409
410 410 # last element is repo in nested groups structure
411 411 groups = groups[:-1]
412 412 rgm = ReposGroupModel(sa)
413 413 owner = User.get_first_admin()
414 414 for lvl, group_name in enumerate(groups):
415 415 group_name = '/'.join(groups[:lvl] + [group_name])
416 416 group = RepoGroup.get_by_group_name(group_name)
417 417 desc = '%s group' % group_name
418 418
419 419 # skip folders that are now removed repos
420 420 if REMOVED_REPO_PAT.match(group_name):
421 421 break
422 422
423 423 if group is None:
424 424 log.debug('creating group level: %s group_name: %s'
425 425 % (lvl, group_name))
426 426 group = RepoGroup(group_name, parent)
427 427 group.group_description = desc
428 428 group.user = owner
429 429 sa.add(group)
430 430 perm_obj = rgm._create_default_perms(group)
431 431 sa.add(perm_obj)
432 432 sa.flush()
433 433
434 434 parent = group
435 435 return group
436 436
437 437
438 438 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
439 439 install_git_hook=False):
440 440 """
441 441 maps all repos given in initial_repo_list, non existing repositories
442 442 are created, if remove_obsolete is True it also check for db entries
443 443 that are not in initial_repo_list and removes them.
444 444
445 445 :param initial_repo_list: list of repositories found by scanning methods
446 446 :param remove_obsolete: check for obsolete entries in database
447 447 :param install_git_hook: if this is True, also check and install githook
448 448 for a repo if missing
449 449 """
450 450 from rhodecode.model.repo import RepoModel
451 451 from rhodecode.model.scm import ScmModel
452 452 sa = meta.Session()
453 453 rm = RepoModel()
454 454 user = User.get_first_admin()
455 455 added = []
456 456
457 457 ##creation defaults
458 458 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
459 459 enable_statistics = defs.get('repo_enable_statistics')
460 460 enable_locking = defs.get('repo_enable_locking')
461 461 enable_downloads = defs.get('repo_enable_downloads')
462 462 private = defs.get('repo_private')
463 463
464 464 for name, repo in initial_repo_list.items():
465 465 group = map_groups(name)
466 466 db_repo = rm.get_by_repo_name(name)
467 467 # found repo that is on filesystem not in RhodeCode database
468 468 if not db_repo:
469 469 log.info('repository %s not found, creating now' % name)
470 470 added.append(name)
471 471 desc = (repo.description
472 472 if repo.description != 'unknown'
473 473 else '%s repository' % name)
474 474
475 475 new_repo = rm.create_repo(
476 476 repo_name=name,
477 477 repo_type=repo.alias,
478 478 description=desc,
479 479 repos_group=getattr(group, 'group_id', None),
480 480 owner=user,
481 481 just_db=True,
482 482 enable_locking=enable_locking,
483 483 enable_downloads=enable_downloads,
484 484 enable_statistics=enable_statistics,
485 485 private=private
486 486 )
487 487 # we added that repo just now, and make sure it has githook
488 488 # installed
489 489 if new_repo.repo_type == 'git':
490 490 ScmModel().install_git_hook(new_repo.scm_instance)
491 491 new_repo.update_changeset_cache()
492 492 elif install_git_hook:
493 493 if db_repo.repo_type == 'git':
494 494 ScmModel().install_git_hook(db_repo.scm_instance)
495 495
496 496 sa.commit()
497 497 removed = []
498 498 if remove_obsolete:
499 499 # remove from database those repositories that are not in the filesystem
500 500 for repo in sa.query(Repository).all():
501 501 if repo.repo_name not in initial_repo_list.keys():
502 502 log.debug("Removing non-existing repository found in db `%s`" %
503 503 repo.repo_name)
504 504 try:
505 505 removed.append(repo.repo_name)
506 506 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
507 507 sa.commit()
508 508 except Exception:
509 509 #don't hold further removals on error
510 510 log.error(traceback.format_exc())
511 511 sa.rollback()
512 512 return added, removed
513 513
514 514
515 515 # set cache regions for beaker so celery can utilise it
516 516 def add_cache(settings):
517 517 cache_settings = {'regions': None}
518 518 for key in settings.keys():
519 519 for prefix in ['beaker.cache.', 'cache.']:
520 520 if key.startswith(prefix):
521 521 name = key.split(prefix)[1].strip()
522 522 cache_settings[name] = settings[key].strip()
523 523 if cache_settings['regions']:
524 524 for region in cache_settings['regions'].split(','):
525 525 region = region.strip()
526 526 region_settings = {}
527 527 for key, value in cache_settings.items():
528 528 if key.startswith(region):
529 529 region_settings[key.split('.')[1]] = value
530 530 region_settings['expire'] = int(region_settings.get('expire',
531 531 60))
532 532 region_settings.setdefault('lock_dir',
533 533 cache_settings.get('lock_dir'))
534 534 region_settings.setdefault('data_dir',
535 535 cache_settings.get('data_dir'))
536 536
537 537 if 'type' not in region_settings:
538 538 region_settings['type'] = cache_settings.get('type',
539 539 'memory')
540 540 beaker.cache.cache_regions[region] = region_settings
541 541
542 542
543 543 def load_rcextensions(root_path):
544 544 import rhodecode
545 545 from rhodecode.config import conf
546 546
547 547 path = os.path.join(root_path, 'rcextensions', '__init__.py')
548 548 if os.path.isfile(path):
549 549 rcext = create_module('rc', path)
550 550 EXT = rhodecode.EXTENSIONS = rcext
551 551 log.debug('Found rcextensions now loading %s...' % rcext)
552 552
553 553 # Additional mappings that are not present in the pygments lexers
554 554 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
555 555
556 556 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
557 557
558 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
558 if getattr(EXT, 'INDEX_EXTENSIONS', []):
559 559 log.debug('settings custom INDEX_EXTENSIONS')
560 560 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
561 561
562 562 #ADDITIONAL MAPPINGS
563 563 log.debug('adding extra into INDEX_EXTENSIONS')
564 564 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
565 565
566 566 # auto check if the module is not missing any data, set to default if is
567 567 # this will help autoupdate new feature of rcext module
568 568 from rhodecode.config import rcextensions
569 569 for k in dir(rcextensions):
570 570 if not k.startswith('_') and not hasattr(EXT, k):
571 571 setattr(EXT, k, getattr(rcextensions, k))
572 572
573 573
574 574 def get_custom_lexer(extension):
575 575 """
576 576 returns a custom lexer if it's defined in rcextensions module, or None
577 577 if there's no custom lexer defined
578 578 """
579 579 import rhodecode
580 580 from pygments import lexers
581 581 #check if we didn't define this extension as other lexer
582 582 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
583 583 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
584 584 return lexers.get_lexer_by_name(_lexer_name)
585 585
586 586
587 587 #==============================================================================
588 588 # TEST FUNCTIONS AND CREATORS
589 589 #==============================================================================
590 590 def create_test_index(repo_location, config, full_index):
591 591 """
592 592 Makes default test index
593 593
594 594 :param config: test config
595 595 :param full_index:
596 596 """
597 597
598 598 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
599 599 from rhodecode.lib.pidlock import DaemonLock, LockHeld
600 600
601 601 repo_location = repo_location
602 602
603 603 index_location = os.path.join(config['app_conf']['index_dir'])
604 604 if not os.path.exists(index_location):
605 605 os.makedirs(index_location)
606 606
607 607 try:
608 608 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
609 609 WhooshIndexingDaemon(index_location=index_location,
610 610 repo_location=repo_location)\
611 611 .run(full_index=full_index)
612 612 l.release()
613 613 except LockHeld:
614 614 pass
615 615
616 616
617 617 def create_test_env(repos_test_path, config):
618 618 """
619 619 Makes a fresh database and
620 620 install test repository into tmp dir
621 621 """
622 622 from rhodecode.lib.db_manage import DbManage
623 623 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
624 624
625 625 # PART ONE create db
626 626 dbconf = config['sqlalchemy.db1.url']
627 627 log.debug('making test db %s' % dbconf)
628 628
629 629 # create test dir if it doesn't exist
630 630 if not os.path.isdir(repos_test_path):
631 631 log.debug('Creating testdir %s' % repos_test_path)
632 632 os.makedirs(repos_test_path)
633 633
634 634 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
635 635 tests=True)
636 636 dbmanage.create_tables(override=True)
637 637 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
638 638 dbmanage.create_default_user()
639 639 dbmanage.admin_prompt()
640 640 dbmanage.create_permissions()
641 641 dbmanage.populate_default_permissions()
642 642 Session().commit()
643 643 # PART TWO make test repo
644 644 log.debug('making test vcs repositories')
645 645
646 646 idx_path = config['app_conf']['index_dir']
647 647 data_path = config['app_conf']['cache_dir']
648 648
649 649 #clean index and data
650 650 if idx_path and os.path.exists(idx_path):
651 651 log.debug('remove %s' % idx_path)
652 652 shutil.rmtree(idx_path)
653 653
654 654 if data_path and os.path.exists(data_path):
655 655 log.debug('remove %s' % data_path)
656 656 shutil.rmtree(data_path)
657 657
658 658 #CREATE DEFAULT TEST REPOS
659 659 cur_dir = dn(dn(abspath(__file__)))
660 660 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
661 661 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
662 662 tar.close()
663 663
664 664 cur_dir = dn(dn(abspath(__file__)))
665 665 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
666 666 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
667 667 tar.close()
668 668
669 669 #LOAD VCS test stuff
670 670 from rhodecode.tests.vcs import setup_package
671 671 setup_package()
672 672
673 673
674 674 #==============================================================================
675 675 # PASTER COMMANDS
676 676 #==============================================================================
677 677 class BasePasterCommand(Command):
678 678 """
679 679 Abstract Base Class for paster commands.
680 680
681 681 The celery commands are somewhat aggressive about loading
682 682 celery.conf, and since our module sets the `CELERY_LOADER`
683 683 environment variable to our loader, we have to bootstrap a bit and
684 684 make sure we've had a chance to load the pylons config off of the
685 685 command line, otherwise everything fails.
686 686 """
687 687 min_args = 1
688 688 min_args_error = "Please provide a paster config file as an argument."
689 689 takes_config_file = 1
690 690 requires_config_file = True
691 691
692 692 def notify_msg(self, msg, log=False):
693 693 """Make a notification to user, additionally if logger is passed
694 694 it logs this action using given logger
695 695
696 696 :param msg: message that will be printed to user
697 697 :param log: logging instance, to use to additionally log this message
698 698
699 699 """
700 700 if log and isinstance(log, logging):
701 701 log(msg)
702 702
703 703 def run(self, args):
704 704 """
705 705 Overrides Command.run
706 706
707 707 Checks for a config file argument and loads it.
708 708 """
709 709 if len(args) < self.min_args:
710 710 raise BadCommand(
711 711 self.min_args_error % {'min_args': self.min_args,
712 712 'actual_args': len(args)})
713 713
714 714 # Decrement because we're going to lob off the first argument.
715 715 # @@ This is hacky
716 716 self.min_args -= 1
717 717 self.bootstrap_config(args[0])
718 718 self.update_parser()
719 719 return super(BasePasterCommand, self).run(args[1:])
720 720
721 721 def update_parser(self):
722 722 """
723 723 Abstract method. Allows for the class's parser to be updated
724 724 before the superclass's `run` method is called. Necessary to
725 725 allow options/arguments to be passed through to the underlying
726 726 celery command.
727 727 """
728 728 raise NotImplementedError("Abstract Method.")
729 729
730 730 def bootstrap_config(self, conf):
731 731 """
732 732 Loads the pylons configuration.
733 733 """
734 734 from pylons import config as pylonsconfig
735 735
736 736 self.path_to_ini_file = os.path.realpath(conf)
737 737 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
738 738 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
739 739
740 740 def _init_session(self):
741 741 """
742 742 Inits SqlAlchemy Session
743 743 """
744 744 logging.config.fileConfig(self.path_to_ini_file)
745 745 from pylons import config
746 746 from rhodecode.model import init_model
747 747 from rhodecode.lib.utils2 import engine_from_config
748 748
749 749 #get to remove repos !!
750 750 add_cache(config)
751 751 engine = engine_from_config(config, 'sqlalchemy.db1.')
752 752 init_model(engine)
753 753
754 754
755 755 def check_git_version():
756 756 """
757 757 Checks what version of git is installed in system, and issues a warning
758 758 if it's too old for RhodeCode to properly work.
759 759 """
760 760 from rhodecode import BACKENDS
761 761 from rhodecode.lib.vcs.backends.git.repository import GitRepository
762 762 from rhodecode.lib.vcs.conf import settings
763 763 from distutils.version import StrictVersion
764 764
765 765 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
766 766 _safe=True)
767 767
768 768 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
769 769 if len(ver.split('.')) > 3:
770 770 #StrictVersion needs to be only 3 element type
771 771 ver = '.'.join(ver.split('.')[:3])
772 772 try:
773 773 _ver = StrictVersion(ver)
774 774 except Exception:
775 775 _ver = StrictVersion('0.0.0')
776 776 stderr = traceback.format_exc()
777 777
778 778 req_ver = '1.7.4'
779 779 to_old_git = False
780 780 if _ver < StrictVersion(req_ver):
781 781 to_old_git = True
782 782
783 783 if 'git' in BACKENDS:
784 784 log.debug('GIT executable: "%s" version detected: %s'
785 785 % (settings.GIT_EXECUTABLE_PATH, stdout))
786 786 if stderr:
787 787 log.warning('Unable to detect git version, org error was: %r' % stderr)
788 788 elif to_old_git:
789 789 log.warning('RhodeCode detected git version %s, which is too old '
790 790 'for the system to function properly. Make sure '
791 791 'its version is at least %s' % (ver, req_ver))
792 792 return _ver
793 793
794 794
795 795 @decorator.decorator
796 796 def jsonify(func, *args, **kwargs):
797 797 """Action decorator that formats output for JSON
798 798
799 799 Given a function that will return content, this decorator will turn
800 800 the result into JSON, with a content-type of 'application/json' and
801 801 output it.
802 802
803 803 """
804 804 from pylons.decorators.util import get_pylons
805 805 from rhodecode.lib.compat import json
806 806 pylons = get_pylons(args)
807 807 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
808 808 data = func(*args, **kwargs)
809 809 if isinstance(data, (list, tuple)):
810 810 msg = "JSON responses with Array envelopes are susceptible to " \
811 811 "cross-site data leak attacks, see " \
812 812 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
813 813 warnings.warn(msg, Warning, 2)
814 814 log.warning(msg)
815 815 log.debug("Returning JSON wrapped action output")
816 816 return json.dumps(data, encoding='utf-8')
@@ -1,163 +1,163 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.permission
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 permissions model for RhodeCode
7 7
8 8 :created_on: Aug 20, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import logging
27 27 import traceback
28 28
29 29 from sqlalchemy.exc import DatabaseError
30 30
31 31 from rhodecode.model import BaseModel
32 32 from rhodecode.model.db import User, Permission, UserToPerm, UserRepoToPerm,\
33 33 UserRepoGroupToPerm, UserUserGroupToPerm
34 34 from rhodecode.lib.utils2 import str2bool
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class PermissionModel(BaseModel):
40 40 """
41 41 Permissions model for RhodeCode
42 42 """
43 43
44 44 cls = Permission
45 45
46 46 def create_permissions(self):
47 47 """
48 48 Create permissions for whole system
49 49 """
50 50 for p in Permission.PERMS:
51 51 if not Permission.get_by_key(p[0]):
52 52 new_perm = Permission()
53 53 new_perm.permission_name = p[0]
54 54 new_perm.permission_longname = p[0] #translation err with p[1]
55 55 self.sa.add(new_perm)
56 56
57 57 def create_default_permissions(self, user):
58 58 """
59 59 Creates only missing default permissions for user
60 60
61 61 :param user:
62 62 """
63 63 user = self._get_user(user)
64 64
65 65 def _make_perm(perm):
66 66 new_perm = UserToPerm()
67 67 new_perm.user = user
68 68 new_perm.permission = Permission.get_by_key(perm)
69 69 return new_perm
70 70
71 71 def _get_group(perm_name):
72 72 return '.'.join(perm_name.split('.')[:1])
73 73
74 74 perms = UserToPerm.query().filter(UserToPerm.user == user).all()
75 75 defined_perms_groups = map(_get_group,
76 76 (x.permission.permission_name for x in perms))
77 77 log.debug('GOT ALREADY DEFINED:%s' % perms)
78 78 DEFAULT_PERMS = Permission.DEFAULT_USER_PERMISSIONS
79 79
80 80 # for every default permission that needs to be created, we check if
81 81 # it's group is already defined, if it's not we create default perm
82 82 for perm_name in DEFAULT_PERMS:
83 83 gr = _get_group(perm_name)
84 84 if gr not in defined_perms_groups:
85 85 log.debug('GR:%s not found, creating permission %s'
86 86 % (gr, perm_name))
87 87 new_perm = _make_perm(perm_name)
88 88 self.sa.add(new_perm)
89 89
90 90 def update(self, form_result):
91 91 perm_user = User.get_by_username(username=form_result['perm_user_name'])
92 92
93 93 try:
94 94 # stage 1 set anonymous access
95 95 if perm_user.username == 'default':
96 96 perm_user.active = str2bool(form_result['anonymous'])
97 97 self.sa.add(perm_user)
98 98
99 99 # stage 2 reset defaults and set them from form data
100 100 def _make_new(usr, perm_name):
101 101 log.debug('Creating new permission:%s' % (perm_name))
102 102 new = UserToPerm()
103 103 new.user = usr
104 104 new.permission = Permission.get_by_key(perm_name)
105 105 return new
106 106 # clear current entries, to make this function idempotent
107 107 # it will fix even if we define more permissions or permissions
108 108 # are somehow missing
109 109 u2p = self.sa.query(UserToPerm)\
110 110 .filter(UserToPerm.user == perm_user)\
111 111 .all()
112 112 for p in u2p:
113 113 self.sa.delete(p)
114 114 #create fresh set of permissions
115 115 for def_perm_key in ['default_repo_perm', 'default_group_perm',
116 116 'default_user_group_perm',
117 117 'default_repo_create',
118 118 #'default_repo_group_create', #not implemented yet
119 119 'default_user_group_create',
120 120 'default_fork', 'default_register',
121 121 'default_extern_activate']:
122 122 p = _make_new(perm_user, form_result[def_perm_key])
123 123 self.sa.add(p)
124 124
125 125 #stage 3 update all default permissions for repos if checked
126 if form_result['overwrite_default_repo'] == True:
126 if form_result['overwrite_default_repo']:
127 127 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
128 128 _def = Permission.get_by_key('repository.' + _def_name)
129 129 # repos
130 130 for r2p in self.sa.query(UserRepoToPerm)\
131 131 .filter(UserRepoToPerm.user == perm_user)\
132 132 .all():
133 133
134 134 #don't reset PRIVATE repositories
135 135 if not r2p.repository.private:
136 136 r2p.permission = _def
137 137 self.sa.add(r2p)
138 138
139 if form_result['overwrite_default_group'] == True:
139 if form_result['overwrite_default_group']:
140 140 _def_name = form_result['default_group_perm'].split('group.')[-1]
141 141 # groups
142 142 _def = Permission.get_by_key('group.' + _def_name)
143 143 for g2p in self.sa.query(UserRepoGroupToPerm)\
144 144 .filter(UserRepoGroupToPerm.user == perm_user)\
145 145 .all():
146 146 g2p.permission = _def
147 147 self.sa.add(g2p)
148 148
149 if form_result['overwrite_default_user_group'] == True:
149 if form_result['overwrite_default_user_group']:
150 150 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
151 151 # groups
152 152 _def = Permission.get_by_key('usergroup.' + _def_name)
153 153 for g2p in self.sa.query(UserUserGroupToPerm)\
154 154 .filter(UserUserGroupToPerm.user == perm_user)\
155 155 .all():
156 156 g2p.permission = _def
157 157 self.sa.add(g2p)
158 158
159 159 self.sa.commit()
160 160 except (DatabaseError,):
161 161 log.error(traceback.format_exc())
162 162 self.sa.rollback()
163 163 raise
General Comments 0
You need to be logged in to leave comments. Login now