##// END OF EJS Templates
fixes #59, notifications for user registrations + some changes to mailer
marcink -
r689:ecc566f8 beta
parent child Browse files
Show More
@@ -1,175 +1,176 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # rhodecode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 ################################################################################
11 11 ## Uncomment and replace with the address which should receive ##
12 12 ## any error reports after application crash ##
13 13 ## Additionally those settings will be used by rhodecode mailing system ##
14 14 ################################################################################
15 15 #email_to = admin@localhost
16 16 #error_email_from = paste_error@localhost
17 17 #app_email_from = rhodecode-noreply@localhost
18 18 #error_message =
19 19
20 20 #smtp_server = mail.server.com
21 21 #smtp_username =
22 22 #smtp_password =
23 23 #smtp_port =
24 24 #smtp_use_tls = false
25 #smtp_use_ssl = true
25 26
26 27 [server:main]
27 28 ##nr of threads to spawn
28 29 threadpool_workers = 5
29 30
30 31 ##max request before thread respawn
31 32 threadpool_max_requests = 2
32 33
33 34 ##option to use threads of process
34 35 use_threadpool = true
35 36
36 37 use = egg:Paste#http
37 38 host = 127.0.0.1
38 39 port = 8001
39 40
40 41 [app:main]
41 42 use = egg:rhodecode
42 43 full_stack = true
43 44 static_files = false
44 45 lang=en
45 46 cache_dir = %(here)s/data
46 47 index_dir = %(here)s/data/index
47 48
48 49 ####################################
49 50 ### BEAKER CACHE ####
50 51 ####################################
51 52 beaker.cache.data_dir=/%(here)s/data/cache/data
52 53 beaker.cache.lock_dir=/%(here)s/data/cache/lock
53 54 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
54 55
55 56 beaker.cache.super_short_term.type=memory
56 57 beaker.cache.super_short_term.expire=10
57 58
58 59 beaker.cache.short_term.type=memory
59 60 beaker.cache.short_term.expire=60
60 61
61 62 beaker.cache.long_term.type=memory
62 63 beaker.cache.long_term.expire=36000
63 64
64 65
65 66 beaker.cache.sql_cache_short.type=memory
66 67 beaker.cache.sql_cache_short.expire=5
67 68
68 69 beaker.cache.sql_cache_med.type=memory
69 70 beaker.cache.sql_cache_med.expire=360
70 71
71 72 beaker.cache.sql_cache_long.type=file
72 73 beaker.cache.sql_cache_long.expire=3600
73 74
74 75 ####################################
75 76 ### BEAKER SESSION ####
76 77 ####################################
77 78 ## Type of storage used for the session, current types are
78 79 ## dbm, file, memcached, database, and memory.
79 80 ## The storage uses the Container API
80 81 ##that is also used by the cache system.
81 82 beaker.session.type = file
82 83
83 84 beaker.session.key = rhodecode
84 85 beaker.session.secret = g654dcno0-9873jhgfreyu
85 86 beaker.session.timeout = 36000
86 87
87 88 ##auto save the session to not to use .save()
88 89 beaker.session.auto = False
89 90
90 91 ##true exire at browser close
91 92 #beaker.session.cookie_expires = 3600
92 93
93 94
94 95 ################################################################################
95 96 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
96 97 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
97 98 ## execute malicious code after an exception is raised. ##
98 99 ################################################################################
99 100 set debug = false
100 101
101 102 ##################################
102 103 ### LOGVIEW CONFIG ###
103 104 ##################################
104 105 logview.sqlalchemy = #faa
105 106 logview.pylons.templating = #bfb
106 107 logview.pylons.util = #eee
107 108
108 109 #########################################################
109 110 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
110 111 #########################################################
111 112 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
112 113 #sqlalchemy.db1.echo = False
113 114 #sqlalchemy.db1.pool_recycle = 3600
114 115 sqlalchemy.convert_unicode = true
115 116
116 117 ################################
117 118 ### LOGGING CONFIGURATION ####
118 119 ################################
119 120 [loggers]
120 121 keys = root, routes, rhodecode, sqlalchemy
121 122
122 123 [handlers]
123 124 keys = console
124 125
125 126 [formatters]
126 127 keys = generic,color_formatter
127 128
128 129 #############
129 130 ## LOGGERS ##
130 131 #############
131 132 [logger_root]
132 133 level = INFO
133 134 handlers = console
134 135
135 136 [logger_routes]
136 137 level = INFO
137 138 handlers = console
138 139 qualname = routes.middleware
139 140 # "level = DEBUG" logs the route matched and routing variables.
140 141 propagate = 0
141 142
142 143 [logger_rhodecode]
143 144 level = DEBUG
144 145 handlers = console
145 146 qualname = rhodecode
146 147 propagate = 0
147 148
148 149 [logger_sqlalchemy]
149 150 level = ERROR
150 151 handlers = console
151 152 qualname = sqlalchemy.engine
152 153 propagate = 0
153 154
154 155 ##############
155 156 ## HANDLERS ##
156 157 ##############
157 158
158 159 [handler_console]
159 160 class = StreamHandler
160 161 args = (sys.stderr,)
161 162 level = NOTSET
162 163 formatter = color_formatter
163 164
164 165 ################
165 166 ## FORMATTERS ##
166 167 ################
167 168
168 169 [formatter_generic]
169 170 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
170 171 datefmt = %Y-%m-%d %H:%M:%S
171 172
172 173 [formatter_color_formatter]
173 174 class=rhodecode.lib.colored_formatter.ColorFormatter
174 175 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
175 176 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,175 +1,176 b''
1 1 ################################################################################
2 2 ################################################################################
3 3 # rhodecode - Pylons environment configuration #
4 4 # #
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10 ################################################################################
11 11 ## Uncomment and replace with the address which should receive ##
12 12 ## any error reports after application crash ##
13 13 ## Additionally those settings will be used by rhodecode mailing system ##
14 14 ################################################################################
15 15 #email_to = admin@localhost
16 16 #error_email_from = paste_error@localhost
17 17 #app_email_from = rhodecode-noreply@localhost
18 18 #error_message =
19 19
20 20 #smtp_server = mail.server.com
21 21 #smtp_username =
22 22 #smtp_password =
23 23 #smtp_port =
24 24 #smtp_use_tls = false
25 #smtp_use_ssl = true
25 26
26 27 [server:main]
27 28 ##nr of threads to spawn
28 29 threadpool_workers = 5
29 30
30 31 ##max request before thread respawn
31 32 threadpool_max_requests = 10
32 33
33 34 ##option to use threads of process
34 35 use_threadpool = true
35 36
36 37 use = egg:Paste#http
37 38 host = 127.0.0.1
38 39 port = 5000
39 40
40 41 [app:main]
41 42 use = egg:rhodecode
42 43 full_stack = true
43 44 static_files = true
44 45 lang=en
45 46 cache_dir = %(here)s/data
46 47 index_dir = %(here)s/data/index
47 48 app_instance_uuid = ${app_instance_uuid}
48 49
49 50 ####################################
50 51 ### BEAKER CACHE ####
51 52 ####################################
52 53 beaker.cache.data_dir=/%(here)s/data/cache/data
53 54 beaker.cache.lock_dir=/%(here)s/data/cache/lock
54 55 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
55 56
56 57 beaker.cache.super_short_term.type=memory
57 58 beaker.cache.super_short_term.expire=10
58 59
59 60 beaker.cache.short_term.type=memory
60 61 beaker.cache.short_term.expire=60
61 62
62 63 beaker.cache.long_term.type=memory
63 64 beaker.cache.long_term.expire=36000
64 65
65 66 beaker.cache.sql_cache_short.type=memory
66 67 beaker.cache.sql_cache_short.expire=5
67 68
68 69 beaker.cache.sql_cache_med.type=memory
69 70 beaker.cache.sql_cache_med.expire=360
70 71
71 72 beaker.cache.sql_cache_long.type=file
72 73 beaker.cache.sql_cache_long.expire=3600
73 74
74 75 ####################################
75 76 ### BEAKER SESSION ####
76 77 ####################################
77 78 ## Type of storage used for the session, current types are
78 79 ## dbm, file, memcached, database, and memory.
79 80 ## The storage uses the Container API
80 81 ##that is also used by the cache system.
81 82 beaker.session.type = file
82 83
83 84 beaker.session.key = rhodecode
84 85 beaker.session.secret = ${app_instance_secret}
85 86 beaker.session.timeout = 36000
86 87
87 88 ##auto save the session to not to use .save()
88 89 beaker.session.auto = False
89 90
90 91 ##true exire at browser close
91 92 #beaker.session.cookie_expires = 3600
92 93
93 94
94 95 ################################################################################
95 96 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
96 97 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
97 98 ## execute malicious code after an exception is raised. ##
98 99 ################################################################################
99 100 set debug = false
100 101
101 102 ##################################
102 103 ### LOGVIEW CONFIG ###
103 104 ##################################
104 105 logview.sqlalchemy = #faa
105 106 logview.pylons.templating = #bfb
106 107 logview.pylons.util = #eee
107 108
108 109 #########################################################
109 110 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
110 111 #########################################################
111 112 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
112 113 #sqlalchemy.db1.echo = False
113 114 #sqlalchemy.db1.pool_recycle = 3600
114 115 sqlalchemy.convert_unicode = true
115 116
116 117 ################################
117 118 ### LOGGING CONFIGURATION ####
118 119 ################################
119 120 [loggers]
120 121 keys = root, routes, rhodecode, sqlalchemy
121 122
122 123 [handlers]
123 124 keys = console
124 125
125 126 [formatters]
126 127 keys = generic,color_formatter
127 128
128 129 #############
129 130 ## LOGGERS ##
130 131 #############
131 132 [logger_root]
132 133 level = INFO
133 134 handlers = console
134 135
135 136 [logger_routes]
136 137 level = INFO
137 138 handlers = console
138 139 qualname = routes.middleware
139 140 # "level = DEBUG" logs the route matched and routing variables.
140 141 propagate = 0
141 142
142 143 [logger_rhodecode]
143 144 level = DEBUG
144 145 handlers = console
145 146 qualname = rhodecode
146 147 propagate = 0
147 148
148 149 [logger_sqlalchemy]
149 150 level = ERROR
150 151 handlers = console
151 152 qualname = sqlalchemy.engine
152 153 propagate = 0
153 154
154 155 ##############
155 156 ## HANDLERS ##
156 157 ##############
157 158
158 159 [handler_console]
159 160 class = StreamHandler
160 161 args = (sys.stderr,)
161 162 level = NOTSET
162 163 formatter = color_formatter
163 164
164 165 ################
165 166 ## FORMATTERS ##
166 167 ################
167 168
168 169 [formatter_generic]
169 170 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
170 171 datefmt = %Y-%m-%d %H:%M:%S
171 172
172 173 [formatter_color_formatter]
173 174 class=rhodecode.lib.colored_formatter.ColorFormatter
174 175 format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
175 176 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,299 +1,315 b''
1 1 from celery.decorators import task
2 2
3 3 from operator import itemgetter
4 4 from pylons.i18n.translation import _
5 5 from rhodecode.lib.celerylib import run_task, locked_task
6 6 from rhodecode.lib.helpers import person
7 7 from rhodecode.lib.smtp_mailer import SmtpMailer
8 8 from rhodecode.lib.utils import OrderedDict
9 9 from time import mktime
10 10 import os
11 11 import traceback
12 12 from vcs.backends import get_repo
13 13 from rhodecode.model.hg import HgModel
14 14 try:
15 15 import json
16 16 except ImportError:
17 17 #python 2.5 compatibility
18 18 import simplejson as json
19 19
20 20 try:
21 21 from celeryconfig import PYLONS_CONFIG as config
22 22 celery_on = True
23 23 except ImportError:
24 24 #if celeryconfig is not present let's just load our pylons
25 25 #config instead
26 26 from pylons import config
27 27 celery_on = False
28 28
29 29
30 30 __all__ = ['whoosh_index', 'get_commits_stats',
31 31 'reset_user_password', 'send_email']
32 32
33 33 def get_session():
34 34 if celery_on:
35 35 from sqlalchemy import engine_from_config
36 36 from sqlalchemy.orm import sessionmaker, scoped_session
37 37 engine = engine_from_config(dict(config.items('app:main')),
38 38 'sqlalchemy.db1.')
39 39 sa = scoped_session(sessionmaker(bind=engine))
40 40 else:
41 41 #If we don't use celery reuse our current application Session
42 42 from rhodecode.model.meta import Session
43 43 sa = Session()
44 44
45 45 return sa
46 46
47 47 @task
48 48 @locked_task
49 49 def whoosh_index(repo_location, full_index):
50 50 log = whoosh_index.get_logger()
51 51 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
52 52 index_location = dict(config.items('app:main'))['index_dir']
53 53 WhooshIndexingDaemon(index_location=index_location,
54 54 repo_location=repo_location).run(full_index=full_index)
55 55
56 56 @task
57 57 @locked_task
58 58 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
59 59 from rhodecode.model.db import Statistics, Repository
60 60 log = get_commits_stats.get_logger()
61 61 author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
62 62
63 63 commits_by_day_author_aggregate = {}
64 64 commits_by_day_aggregate = {}
65 65 repos_path = HgModel().repos_path
66 66 p = os.path.join(repos_path, repo_name)
67 67 repo = get_repo(p)
68 68
69 69 skip_date_limit = True
70 70 parse_limit = 250 #limit for single task changeset parsing optimal for
71 71 last_rev = 0
72 72 last_cs = None
73 73 timegetter = itemgetter('time')
74 74
75 75 sa = get_session()
76 76
77 77 dbrepo = sa.query(Repository)\
78 78 .filter(Repository.repo_name == repo_name).scalar()
79 79 cur_stats = sa.query(Statistics)\
80 80 .filter(Statistics.repository == dbrepo).scalar()
81 81 if cur_stats:
82 82 last_rev = cur_stats.stat_on_revision
83 83 if not repo.revisions:
84 84 return True
85 85
86 86 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
87 87 #pass silently without any work if we're not on first revision or current
88 88 #state of parsing revision(from db marker) is the last revision
89 89 return True
90 90
91 91 if cur_stats:
92 92 commits_by_day_aggregate = OrderedDict(
93 93 json.loads(
94 94 cur_stats.commit_activity_combined))
95 95 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
96 96
97 97 log.debug('starting parsing %s', parse_limit)
98 98 lmktime = mktime
99 99
100 100 for cnt, rev in enumerate(repo.revisions[last_rev:]):
101 101 last_cs = cs = repo.get_changeset(rev)
102 102 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
103 103 cs.date.timetuple()[2])
104 104 timetupple = [int(x) for x in k.split('-')]
105 105 timetupple.extend([0 for _ in xrange(6)])
106 106 k = lmktime(timetupple)
107 107 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
108 108 try:
109 109 l = [timegetter(x) for x in commits_by_day_author_aggregate\
110 110 [author_key_cleaner(cs.author)]['data']]
111 111 time_pos = l.index(k)
112 112 except ValueError:
113 113 time_pos = False
114 114
115 115 if time_pos >= 0 and time_pos is not False:
116 116
117 117 datadict = commits_by_day_author_aggregate\
118 118 [author_key_cleaner(cs.author)]['data'][time_pos]
119 119
120 120 datadict["commits"] += 1
121 121 datadict["added"] += len(cs.added)
122 122 datadict["changed"] += len(cs.changed)
123 123 datadict["removed"] += len(cs.removed)
124 124
125 125 else:
126 126 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
127 127
128 128 datadict = {"time":k,
129 129 "commits":1,
130 130 "added":len(cs.added),
131 131 "changed":len(cs.changed),
132 132 "removed":len(cs.removed),
133 133 }
134 134 commits_by_day_author_aggregate\
135 135 [author_key_cleaner(cs.author)]['data'].append(datadict)
136 136
137 137 else:
138 138 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
139 139 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
140 140 "label":author_key_cleaner(cs.author),
141 141 "data":[{"time":k,
142 142 "commits":1,
143 143 "added":len(cs.added),
144 144 "changed":len(cs.changed),
145 145 "removed":len(cs.removed),
146 146 }],
147 147 "schema":["commits"],
148 148 }
149 149
150 150 #gather all data by day
151 151 if commits_by_day_aggregate.has_key(k):
152 152 commits_by_day_aggregate[k] += 1
153 153 else:
154 154 commits_by_day_aggregate[k] = 1
155 155
156 156 if cnt >= parse_limit:
157 157 #don't fetch to much data since we can freeze application
158 158 break
159 159 overview_data = []
160 160 for k, v in commits_by_day_aggregate.items():
161 161 overview_data.append([k, v])
162 162 overview_data = sorted(overview_data, key=itemgetter(0))
163 163 if not commits_by_day_author_aggregate:
164 164 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
165 165 "label":author_key_cleaner(repo.contact),
166 166 "data":[0, 1],
167 167 "schema":["commits"],
168 168 }
169 169
170 170 stats = cur_stats if cur_stats else Statistics()
171 171 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
172 172 stats.commit_activity_combined = json.dumps(overview_data)
173 173
174 174 log.debug('last revison %s', last_rev)
175 175 leftovers = len(repo.revisions[last_rev:])
176 176 log.debug('revisions to parse %s', leftovers)
177 177
178 178 if last_rev == 0 or leftovers < parse_limit:
179 179 stats.languages = json.dumps(__get_codes_stats(repo_name))
180 180
181 181 stats.repository = dbrepo
182 182 stats.stat_on_revision = last_cs.revision
183 183
184 184 try:
185 185 sa.add(stats)
186 186 sa.commit()
187 187 except:
188 188 log.error(traceback.format_exc())
189 189 sa.rollback()
190 190 return False
191 191 if len(repo.revisions) > 1:
192 192 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
193 193
194 194 return True
195 195
196 196 @task
197 197 def reset_user_password(user_email):
198 198 log = reset_user_password.get_logger()
199 199 from rhodecode.lib import auth
200 200 from rhodecode.model.db import User
201 201
202 202 try:
203 203 try:
204 204 sa = get_session()
205 205 user = sa.query(User).filter(User.email == user_email).scalar()
206 206 new_passwd = auth.PasswordGenerator().gen_password(8,
207 207 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
208 208 if user:
209 209 user.password = auth.get_crypt_password(new_passwd)
210 210 sa.add(user)
211 211 sa.commit()
212 212 log.info('change password for %s', user_email)
213 213 if new_passwd is None:
214 214 raise Exception('unable to generate new password')
215 215
216 216 except:
217 217 log.error(traceback.format_exc())
218 218 sa.rollback()
219 219
220 220 run_task(send_email, user_email,
221 221 "Your new rhodecode password",
222 222 'Your new rhodecode password:%s' % (new_passwd))
223 223 log.info('send new password mail to %s', user_email)
224 224
225 225
226 226 except:
227 227 log.error('Failed to update user password')
228 228 log.error(traceback.format_exc())
229 229 return True
230 230
231 231 @task
232 232 def send_email(recipients, subject, body):
233 """
234 Sends an email with defined parameters from the .ini files.
235
236
237 :param recipients: list of recipients, it this is empty the defined email
238 address from field 'email_to' is used instead
239 :param subject: subject of the mail
240 :param body: body of the mail
241 """
233 242 log = send_email.get_logger()
234 243 email_config = dict(config.items('DEFAULT'))
244
245 if not recipients:
246 recipients = [email_config.get('email_to')]
247
248 def str2bool(v):
249 return v.lower() in ["yes", "true", "t", "1"]
250
235 251 mail_from = email_config.get('app_email_from')
236 252 user = email_config.get('smtp_username')
237 253 passwd = email_config.get('smtp_password')
238 254 mail_server = email_config.get('smtp_server')
239 255 mail_port = email_config.get('smtp_port')
240 tls = email_config.get('smtp_use_tls')
241 ssl = False
256 tls = str2bool(email_config.get('smtp_use_tls'))
257 ssl = str2bool(email_config.get('smtp_use_ssl'))
242 258
243 259 try:
244 260 m = SmtpMailer(mail_from, user, passwd, mail_server,
245 261 mail_port, ssl, tls)
246 262 m.send(recipients, subject, body)
247 263 except:
248 264 log.error('Mail sending failed')
249 265 log.error(traceback.format_exc())
250 266 return False
251 267 return True
252 268
253 269 @task
254 270 def create_repo_fork(form_data, cur_user):
255 271 from rhodecode.model.repo import RepoModel
256 272 from vcs import get_backend
257 273 log = create_repo_fork.get_logger()
258 274 repo_model = RepoModel(get_session())
259 275 repo_model.create(form_data, cur_user, just_db=True, fork=True)
260 276 repo_name = form_data['repo_name']
261 277 repos_path = HgModel().repos_path
262 278 repo_path = os.path.join(repos_path, repo_name)
263 279 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
264 280 alias = form_data['repo_type']
265 281
266 282 log.info('creating repo fork %s as %s', repo_name, repo_path)
267 283 backend = get_backend(alias)
268 284 backend(str(repo_fork_path), create=True, src_url=str(repo_path))
269 285
270 286 def __get_codes_stats(repo_name):
271 287 LANGUAGES_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx',
272 288 'aspx', 'asx', 'axd', 'c', 'cfg', 'cfm', 'cpp', 'cs', 'diff', 'do', 'el',
273 289 'erl', 'h', 'java', 'js', 'jsp', 'jspx', 'lisp', 'lua', 'm', 'mako', 'ml',
274 290 'pas', 'patch', 'php', 'php3', 'php4', 'phtml', 'pm', 'py', 'rb', 'rst',
275 291 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
276 292
277 293
278 294 repos_path = HgModel().repos_path
279 295 p = os.path.join(repos_path, repo_name)
280 296 repo = get_repo(p)
281 297 tip = repo.get_changeset()
282 298 code_stats = {}
283 299
284 300 def aggregate(cs):
285 301 for f in cs[2]:
286 302 k = f.mimetype
287 303 if f.extension in LANGUAGES_EXTENSIONS:
288 304 if code_stats.has_key(k):
289 305 code_stats[k] += 1
290 306 else:
291 307 code_stats[k] = 1
292 308
293 309 map(aggregate, tip.walk('/'))
294 310
295 311 return code_stats or {}
296 312
297 313
298 314
299 315
@@ -1,118 +1,118 b''
1 1 import logging
2 2 import smtplib
3 3 import mimetypes
4 4 from email.mime.multipart import MIMEMultipart
5 5 from email.mime.image import MIMEImage
6 6 from email.mime.audio import MIMEAudio
7 7 from email.mime.base import MIMEBase
8 8 from email.mime.text import MIMEText
9 9 from email.utils import formatdate
10 10 from email import encoders
11 11
12 12 class SmtpMailer(object):
13 13 """simple smtp mailer class
14 14
15 15 mailer = SmtpMailer(mail_from, user, passwd, mail_server, mail_port, ssl, tls)
16 16 mailer.send(recipients, subject, body, attachment_files)
17 17
18 18 :param recipients might be a list of string or single string
19 19 :param attachment_files is a dict of {filename:location}
20 20 it tries to guess the mimetype and attach the file
21 21 """
22 22
23 23 def __init__(self, mail_from, user, passwd, mail_server,
24 24 mail_port=None, ssl=False, tls=False):
25 25
26 26 self.mail_from = mail_from
27 27 self.mail_server = mail_server
28 28 self.mail_port = mail_port
29 29 self.user = user
30 30 self.passwd = passwd
31 31 self.ssl = ssl
32 32 self.tls = tls
33 33 self.debug = False
34 34
35 35 def send(self, recipients=[], subject='', body='', attachment_files={}):
36 36
37 37 if isinstance(recipients, basestring):
38 38 recipients = [recipients]
39 39 if self.ssl:
40 40 smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port)
41 41 else:
42 42 smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port)
43 43
44 44 if self.tls:
45 45 smtp_serv.starttls()
46 46
47 47 if self.debug:
48 48 smtp_serv.set_debuglevel(1)
49 49
50 smtp_serv.ehlo("mailer")
50 smtp_serv.ehlo("rhodecode mailer")
51 51
52 52 #if server requires authorization you must provide login and password
53 53 smtp_serv.login(self.user, self.passwd)
54 54
55 55 date_ = formatdate(localtime=True)
56 56 msg = MIMEMultipart()
57 57 msg['From'] = self.mail_from
58 58 msg['To'] = ','.join(recipients)
59 59 msg['Date'] = date_
60 60 msg['Subject'] = subject
61 61 msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
62 62
63 63 msg.attach(MIMEText(body))
64 64
65 65 if attachment_files:
66 66 self.__atach_files(msg, attachment_files)
67 67
68 68 smtp_serv.sendmail(self.mail_from, recipients, msg.as_string())
69 69 logging.info('MAIL SEND TO: %s' % recipients)
70 70 smtp_serv.quit()
71 71
72 72
73 73 def __atach_files(self, msg, attachment_files):
74 74 if isinstance(attachment_files, dict):
75 75 for f_name, msg_file in attachment_files.items():
76 76 ctype, encoding = mimetypes.guess_type(f_name)
77 77 logging.info("guessing file %s type based on %s" , ctype, f_name)
78 78 if ctype is None or encoding is not None:
79 79 # No guess could be made, or the file is encoded (compressed), so
80 80 # use a generic bag-of-bits type.
81 81 ctype = 'application/octet-stream'
82 82 maintype, subtype = ctype.split('/', 1)
83 83 if maintype == 'text':
84 84 # Note: we should handle calculating the charset
85 85 file_part = MIMEText(self.get_content(msg_file),
86 86 _subtype=subtype)
87 87 elif maintype == 'image':
88 88 file_part = MIMEImage(self.get_content(msg_file),
89 89 _subtype=subtype)
90 90 elif maintype == 'audio':
91 91 file_part = MIMEAudio(self.get_content(msg_file),
92 92 _subtype=subtype)
93 93 else:
94 94 file_part = MIMEBase(maintype, subtype)
95 95 file_part.set_payload(self.get_content(msg_file))
96 96 # Encode the payload using Base64
97 97 encoders.encode_base64(msg)
98 98 # Set the filename parameter
99 99 file_part.add_header('Content-Disposition', 'attachment',
100 100 filename=f_name)
101 101 file_part.add_header('Content-Type', ctype, name=f_name)
102 102 msg.attach(file_part)
103 103 else:
104 104 raise Exception('Attachment files should be'
105 105 'a dict in format {"filename":"filepath"}')
106 106
107 107 def get_content(self, msg_file):
108 108 '''
109 109 Get content based on type, if content is a string do open first
110 110 else just read because it's a probably open file object
111 111 :param msg_file:
112 112 '''
113 113 if isinstance(msg_file, str):
114 114 return open(msg_file, "rb").read()
115 115 else:
116 116 #just for safe seek to 0
117 117 msg_file.seek(0)
118 118 return msg_file.read()
@@ -1,553 +1,572 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 # Utilities for RhodeCode
4 4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 5 # This program is free software; you can redistribute it and/or
6 6 # modify it under the terms of the GNU General Public License
7 7 # as published by the Free Software Foundation; version 2
8 8 # of the License or (at your opinion) any later version of the license.
9 9 #
10 10 # This program is distributed in the hope that it will be useful,
11 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 13 # GNU General Public License for more details.
14 14 #
15 15 # You should have received a copy of the GNU General Public License
16 16 # along with this program; if not, write to the Free Software
17 17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 18 # MA 02110-1301, USA.
19 19 """
20 20 Created on April 18, 2010
21 21 Utilities for RhodeCode
22 22 @author: marcink
23 23 """
24 24
25 25 from UserDict import DictMixin
26 26 from mercurial import ui, config, hg
27 27 from mercurial.error import RepoError
28 28 from rhodecode.model import meta
29 29 from rhodecode.model.caching_query import FromCache
30 30 from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \
31 31 UserLog
32 32 from rhodecode.model.repo import RepoModel
33 33 from rhodecode.model.user import UserModel
34 34 from vcs.backends.base import BaseChangeset
35 35 from paste.script import command
36 36 import ConfigParser
37 37 from vcs.utils.lazy import LazyProperty
38 38 import traceback
39 39 import datetime
40 40 import logging
41 41 import os
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def get_repo_slug(request):
47 47 return request.environ['pylons.routes_dict'].get('repo_name')
48 48
49 49 def is_mercurial(environ):
50 50 """
51 51 Returns True if request's target is mercurial server - header
52 52 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
53 53 """
54 54 http_accept = environ.get('HTTP_ACCEPT')
55 55 if http_accept and http_accept.startswith('application/mercurial'):
56 56 return True
57 57 return False
58 58
59 59 def is_git(environ):
60 60 """
61 61 Returns True if request's target is git server. ``HTTP_USER_AGENT`` would
62 62 then have git client version given.
63 63
64 64 :param environ:
65 65 """
66 66 http_user_agent = environ.get('HTTP_USER_AGENT')
67 67 if http_user_agent and http_user_agent.startswith('git'):
68 68 return True
69 69 return False
70 70
71 def action_logger(user, action, repo, ipaddr, sa=None):
71 def action_logger(user, action, repo, ipaddr='', sa=None):
72 72 """
73 73 Action logger for various action made by users
74
75 :param user: user that made this action, can be a string unique username or
76 object containing user_id attribute
77 :param action: action to log, should be on of predefined unique actions for
78 easy translations
79 :param repo: repository that action was made on
80 :param ipaddr: optional ip address from what the action was made
81 :param sa: optional sqlalchemy session
82
74 83 """
75 84
76 85 if not sa:
77 86 sa = meta.Session()
78 87
79 88 try:
80 89 if hasattr(user, 'user_id'):
81 90 user_obj = user
82 91 elif isinstance(user, basestring):
83 92 user_obj = UserModel(sa).get_by_username(user, cache=False)
84 93 else:
85 94 raise Exception('You have to provide user object or username')
86 95
96
97 if repo:
87 98 repo_name = repo.lstrip('/')
99
100 repository = RepoModel(sa).get(repo_name, cache=False)
101 if not repository:
102 raise Exception('You have to provide valid repository')
103 else:
104 raise Exception('You have to provide repository to action logger')
105
106
88 107 user_log = UserLog()
89 108 user_log.user_id = user_obj.user_id
90 109 user_log.action = action
91 110 user_log.repository_name = repo_name
92 user_log.repository = RepoModel(sa).get(repo_name, cache=False)
111 user_log.repository = repository
93 112 user_log.action_date = datetime.datetime.now()
94 113 user_log.user_ip = ipaddr
95 114 sa.add(user_log)
96 115 sa.commit()
97 116
98 117 log.info('Adding user %s, action %s on %s',
99 118 user_obj.username, action, repo)
100 119 except:
101 120 log.error(traceback.format_exc())
102 121 sa.rollback()
103 122
104 123 def get_repos(path, recursive=False, initial=False):
105 124 """
106 125 Scans given path for repos and return (name,(type,path)) tuple
107 126 :param prefix:
108 127 :param path:
109 128 :param recursive:
110 129 :param initial:
111 130 """
112 131 from vcs.utils.helpers import get_scm
113 132 from vcs.exceptions import VCSError
114 133
115 134 try:
116 135 scm = get_scm(path)
117 136 except:
118 137 pass
119 138 else:
120 139 raise Exception('The given path %s should not be a repository got %s',
121 140 path, scm)
122 141
123 142 for dirpath in os.listdir(path):
124 143 try:
125 144 yield dirpath, get_scm(os.path.join(path, dirpath))
126 145 except VCSError:
127 146 pass
128 147
129 148 if __name__ == '__main__':
130 149 get_repos('', '/home/marcink/workspace-python')
131 150
132 151
133 152 def check_repo_fast(repo_name, base_path):
134 153 if os.path.isdir(os.path.join(base_path, repo_name)):return False
135 154 return True
136 155
137 156 def check_repo(repo_name, base_path, verify=True):
138 157
139 158 repo_path = os.path.join(base_path, repo_name)
140 159
141 160 try:
142 161 if not check_repo_fast(repo_name, base_path):
143 162 return False
144 163 r = hg.repository(ui.ui(), repo_path)
145 164 if verify:
146 165 hg.verify(r)
147 166 #here we hnow that repo exists it was verified
148 167 log.info('%s repo is already created', repo_name)
149 168 return False
150 169 except RepoError:
151 170 #it means that there is no valid repo there...
152 171 log.info('%s repo is free for creation', repo_name)
153 172 return True
154 173
155 174 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
156 175 while True:
157 176 ok = raw_input(prompt)
158 177 if ok in ('y', 'ye', 'yes'): return True
159 178 if ok in ('n', 'no', 'nop', 'nope'): return False
160 179 retries = retries - 1
161 180 if retries < 0: raise IOError
162 181 print complaint
163 182
164 183 def get_hg_ui_cached():
165 184 try:
166 185 sa = meta.Session
167 186 ret = sa.query(RhodeCodeUi)\
168 187 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
169 188 .all()
170 189 except:
171 190 pass
172 191 finally:
173 192 meta.Session.remove()
174 193 return ret
175 194
176 195
177 196 def get_hg_settings():
178 197 try:
179 198 sa = meta.Session()
180 199 ret = sa.query(RhodeCodeSettings)\
181 200 .options(FromCache("sql_cache_short", "get_hg_settings"))\
182 201 .all()
183 202 except:
184 203 pass
185 204 finally:
186 205 meta.Session.remove()
187 206
188 207 if not ret:
189 208 raise Exception('Could not get application settings !')
190 209 settings = {}
191 210 for each in ret:
192 211 settings['rhodecode_' + each.app_settings_name] = each.app_settings_value
193 212
194 213 return settings
195 214
196 215 def get_hg_ui_settings():
197 216 try:
198 217 sa = meta.Session()
199 218 ret = sa.query(RhodeCodeUi).all()
200 219 except:
201 220 pass
202 221 finally:
203 222 meta.Session.remove()
204 223
205 224 if not ret:
206 225 raise Exception('Could not get application ui settings !')
207 226 settings = {}
208 227 for each in ret:
209 228 k = each.ui_key
210 229 v = each.ui_value
211 230 if k == '/':
212 231 k = 'root_path'
213 232
214 233 if k.find('.') != -1:
215 234 k = k.replace('.', '_')
216 235
217 236 if each.ui_section == 'hooks':
218 237 v = each.ui_active
219 238
220 239 settings[each.ui_section + '_' + k] = v
221 240
222 241 return settings
223 242
224 243 #propagated from mercurial documentation
225 244 ui_sections = ['alias', 'auth',
226 245 'decode/encode', 'defaults',
227 246 'diff', 'email',
228 247 'extensions', 'format',
229 248 'merge-patterns', 'merge-tools',
230 249 'hooks', 'http_proxy',
231 250 'smtp', 'patch',
232 251 'paths', 'profiling',
233 252 'server', 'trusted',
234 253 'ui', 'web', ]
235 254
236 255 def make_ui(read_from='file', path=None, checkpaths=True):
237 256 """
238 257 A function that will read python rc files or database
239 258 and make an mercurial ui object from read options
240 259
241 260 :param path: path to mercurial config file
242 261 :param checkpaths: check the path
243 262 :param read_from: read from 'file' or 'db'
244 263 """
245 264
246 265 baseui = ui.ui()
247 266
248 267 if read_from == 'file':
249 268 if not os.path.isfile(path):
250 269 log.warning('Unable to read config file %s' % path)
251 270 return False
252 271 log.debug('reading hgrc from %s', path)
253 272 cfg = config.config()
254 273 cfg.read(path)
255 274 for section in ui_sections:
256 275 for k, v in cfg.items(section):
257 276 baseui.setconfig(section, k, v)
258 277 log.debug('settings ui from file[%s]%s:%s', section, k, v)
259 278
260 279 elif read_from == 'db':
261 280 hg_ui = get_hg_ui_cached()
262 281 for ui_ in hg_ui:
263 282 if ui_.ui_active:
264 283 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value)
265 284 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
266 285
267 286
268 287 return baseui
269 288
270 289
271 290 def set_rhodecode_config(config):
272 291 hgsettings = get_hg_settings()
273 292
274 293 for k, v in hgsettings.items():
275 294 config[k] = v
276 295
277 296 def invalidate_cache(name, *args):
278 297 """
279 298 Puts cache invalidation task into db for
280 299 further global cache invalidation
281 300 """
282 301 pass
283 302
284 303 class EmptyChangeset(BaseChangeset):
285 304 """
286 305 An dummy empty changeset. It's possible to pass hash when creating
287 306 an EmptyChangeset
288 307 """
289 308
290 309 def __init__(self, cs='0' * 40):
291 310 self._empty_cs = cs
292 311 self.revision = -1
293 312 self.message = ''
294 313 self.author = ''
295 314 self.date = ''
296 315
297 316 @LazyProperty
298 317 def raw_id(self):
299 318 """
300 319 Returns raw string identifying this changeset, useful for web
301 320 representation.
302 321 """
303 322 return self._empty_cs
304 323
305 324 @LazyProperty
306 325 def short_id(self):
307 326 return self.raw_id[:12]
308 327
309 328 def get_file_changeset(self, path):
310 329 return self
311 330
312 331 def get_file_content(self, path):
313 332 return u''
314 333
315 334 def get_file_size(self, path):
316 335 return 0
317 336
318 337 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
319 338 """
320 339 maps all found repositories into db
321 340 """
322 341
323 342 sa = meta.Session()
324 343 rm = RepoModel(sa)
325 344 user = sa.query(User).filter(User.admin == True).first()
326 345
327 346 for name, repo in initial_repo_list.items():
328 347 if not rm.get(name, cache=False):
329 348 log.info('repository %s not found creating default', name)
330 349
331 350 form_data = {
332 351 'repo_name':name,
333 352 'repo_type':repo.alias,
334 353 'description':repo.description \
335 354 if repo.description != 'unknown' else \
336 355 '%s repository' % name,
337 356 'private':False
338 357 }
339 358 rm.create(form_data, user, just_db=True)
340 359
341 360 if remove_obsolete:
342 361 #remove from database those repositories that are not in the filesystem
343 362 for repo in sa.query(Repository).all():
344 363 if repo.repo_name not in initial_repo_list.keys():
345 364 sa.delete(repo)
346 365 sa.commit()
347 366
348 367 class OrderedDict(dict, DictMixin):
349 368
350 369 def __init__(self, *args, **kwds):
351 370 if len(args) > 1:
352 371 raise TypeError('expected at most 1 arguments, got %d' % len(args))
353 372 try:
354 373 self.__end
355 374 except AttributeError:
356 375 self.clear()
357 376 self.update(*args, **kwds)
358 377
359 378 def clear(self):
360 379 self.__end = end = []
361 380 end += [None, end, end] # sentinel node for doubly linked list
362 381 self.__map = {} # key --> [key, prev, next]
363 382 dict.clear(self)
364 383
365 384 def __setitem__(self, key, value):
366 385 if key not in self:
367 386 end = self.__end
368 387 curr = end[1]
369 388 curr[2] = end[1] = self.__map[key] = [key, curr, end]
370 389 dict.__setitem__(self, key, value)
371 390
372 391 def __delitem__(self, key):
373 392 dict.__delitem__(self, key)
374 393 key, prev, next = self.__map.pop(key)
375 394 prev[2] = next
376 395 next[1] = prev
377 396
378 397 def __iter__(self):
379 398 end = self.__end
380 399 curr = end[2]
381 400 while curr is not end:
382 401 yield curr[0]
383 402 curr = curr[2]
384 403
385 404 def __reversed__(self):
386 405 end = self.__end
387 406 curr = end[1]
388 407 while curr is not end:
389 408 yield curr[0]
390 409 curr = curr[1]
391 410
392 411 def popitem(self, last=True):
393 412 if not self:
394 413 raise KeyError('dictionary is empty')
395 414 if last:
396 415 key = reversed(self).next()
397 416 else:
398 417 key = iter(self).next()
399 418 value = self.pop(key)
400 419 return key, value
401 420
402 421 def __reduce__(self):
403 422 items = [[k, self[k]] for k in self]
404 423 tmp = self.__map, self.__end
405 424 del self.__map, self.__end
406 425 inst_dict = vars(self).copy()
407 426 self.__map, self.__end = tmp
408 427 if inst_dict:
409 428 return (self.__class__, (items,), inst_dict)
410 429 return self.__class__, (items,)
411 430
412 431 def keys(self):
413 432 return list(self)
414 433
415 434 setdefault = DictMixin.setdefault
416 435 update = DictMixin.update
417 436 pop = DictMixin.pop
418 437 values = DictMixin.values
419 438 items = DictMixin.items
420 439 iterkeys = DictMixin.iterkeys
421 440 itervalues = DictMixin.itervalues
422 441 iteritems = DictMixin.iteritems
423 442
424 443 def __repr__(self):
425 444 if not self:
426 445 return '%s()' % (self.__class__.__name__,)
427 446 return '%s(%r)' % (self.__class__.__name__, self.items())
428 447
429 448 def copy(self):
430 449 return self.__class__(self)
431 450
432 451 @classmethod
433 452 def fromkeys(cls, iterable, value=None):
434 453 d = cls()
435 454 for key in iterable:
436 455 d[key] = value
437 456 return d
438 457
439 458 def __eq__(self, other):
440 459 if isinstance(other, OrderedDict):
441 460 return len(self) == len(other) and self.items() == other.items()
442 461 return dict.__eq__(self, other)
443 462
444 463 def __ne__(self, other):
445 464 return not self == other
446 465
447 466
448 467 #===============================================================================
449 468 # TEST FUNCTIONS AND CREATORS
450 469 #===============================================================================
451 470 def create_test_index(repo_location, full_index):
452 471 """Makes default test index
453 472 :param repo_location:
454 473 :param full_index:
455 474 """
456 475 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
457 476 from rhodecode.lib.pidlock import DaemonLock, LockHeld
458 477 import shutil
459 478
460 479 index_location = os.path.join(repo_location, 'index')
461 480 if os.path.exists(index_location):
462 481 shutil.rmtree(index_location)
463 482
464 483 try:
465 484 l = DaemonLock()
466 485 WhooshIndexingDaemon(index_location=index_location,
467 486 repo_location=repo_location)\
468 487 .run(full_index=full_index)
469 488 l.release()
470 489 except LockHeld:
471 490 pass
472 491
473 492 def create_test_env(repos_test_path, config):
474 493 """Makes a fresh database and
475 494 install test repository into tmp dir
476 495 """
477 496 from rhodecode.lib.db_manage import DbManage
478 497 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
479 498 HG_FORK, GIT_FORK, TESTS_TMP_PATH
480 499 import tarfile
481 500 import shutil
482 501 from os.path import dirname as dn, join as jn, abspath
483 502
484 503 log = logging.getLogger('TestEnvCreator')
485 504 # create logger
486 505 log.setLevel(logging.DEBUG)
487 506 log.propagate = True
488 507 # create console handler and set level to debug
489 508 ch = logging.StreamHandler()
490 509 ch.setLevel(logging.DEBUG)
491 510
492 511 # create formatter
493 512 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
494 513
495 514 # add formatter to ch
496 515 ch.setFormatter(formatter)
497 516
498 517 # add ch to logger
499 518 log.addHandler(ch)
500 519
501 520 #PART ONE create db
502 521 dbname = config['sqlalchemy.db1.url'].split('/')[-1]
503 522 log.debug('making test db %s', dbname)
504 523
505 524 dbmanage = DbManage(log_sql=True, dbname=dbname, root=config['here'],
506 525 tests=True)
507 526 dbmanage.create_tables(override=True)
508 527 dbmanage.config_prompt(repos_test_path)
509 528 dbmanage.create_default_user()
510 529 dbmanage.admin_prompt()
511 530 dbmanage.create_permissions()
512 531 dbmanage.populate_default_permissions()
513 532
514 533 #PART TWO make test repo
515 534 log.debug('making test vcs repositories')
516 535
517 536 #remove old one from previos tests
518 537 for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]:
519 538
520 539 if os.path.isdir(jn(TESTS_TMP_PATH, r)):
521 540 log.debug('removing %s', r)
522 541 shutil.rmtree(jn(TESTS_TMP_PATH, r))
523 542
524 543 #CREATE DEFAULT HG REPOSITORY
525 544 cur_dir = dn(dn(abspath(__file__)))
526 545 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
527 546 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
528 547 tar.close()
529 548
530 549 class UpgradeDb(command.Command):
531 550 """Command used for paster to upgrade our database to newer version
532 551 """
533 552
534 553 max_args = 1
535 554 min_args = 1
536 555
537 556 usage = "CONFIG_FILE"
538 557 summary = "Upgrades current db to newer version given configuration file"
539 558 group_name = "RhodeCode"
540 559
541 560 parser = command.Command.standard_parser(verbose=True)
542 561
543 562 parser.add_option('--sql',
544 563 action='store_true',
545 564 dest='just_sql',
546 565 help="Prints upgrade sql for further investigation",
547 566 default=False)
548 567 def command(self):
549 568 config_name = self.args[0]
550 569 p = config_name.split('/')
551 570 root = '.' if len(p) == 1 else '/'.join(p[:-1])
552 571 config = ConfigParser.ConfigParser({'here':root})
553 572 config.read(config_name)
@@ -1,171 +1,180 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 # Model for users
4 4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 5 #
6 6 # This program is free software; you can redistribute it and/or
7 7 # modify it under the terms of the GNU General Public License
8 8 # as published by the Free Software Foundation; version 2
9 9 # of the License or (at your opinion) any later version of the license.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software
18 18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19 19 # MA 02110-1301, USA.
20 20 """
21 21 Created on April 9, 2010
22 22 Model for users
23 23 :author: marcink
24 24 """
25 25
26 26 from pylons.i18n.translation import _
27 27 from rhodecode.model.caching_query import FromCache
28 28 from rhodecode.model.db import User
29 29 from rhodecode.model.meta import Session
30 30 import logging
31 31 import traceback
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35 class DefaultUserException(Exception):pass
36 36
37 37 class UserModel(object):
38 38
39 39 def __init__(self, sa=None):
40 40 if not sa:
41 41 self.sa = Session()
42 42 else:
43 43 self.sa = sa
44 44
45 45 def get(self, user_id, cache=False):
46 46 user = self.sa.query(User)
47 47 if cache:
48 48 user = user.options(FromCache("sql_cache_short",
49 49 "get_user_%s" % user_id))
50 50 return user.get(user_id)
51 51
52 52
53 53 def get_by_username(self, username, cache=False):
54 54 user = self.sa.query(User)\
55 55 .filter(User.username == username)
56 56 if cache:
57 57 user = user.options(FromCache("sql_cache_short",
58 58 "get_user_%s" % username))
59 59 return user.scalar()
60 60
61 61 def create(self, form_data):
62 62 try:
63 63 new_user = User()
64 64 for k, v in form_data.items():
65 65 setattr(new_user, k, v)
66 66
67 67 self.sa.add(new_user)
68 68 self.sa.commit()
69 69 except:
70 70 log.error(traceback.format_exc())
71 71 self.sa.rollback()
72 72 raise
73 73
74 74 def create_registration(self, form_data):
75 from rhodecode.lib.celerylib import tasks, run_task
75 76 try:
76 77 new_user = User()
77 78 for k, v in form_data.items():
78 79 if k != 'admin':
79 80 setattr(new_user, k, v)
80 81
81 82 self.sa.add(new_user)
82 83 self.sa.commit()
84 body = ('New user registration\n'
85 'username: %s\n'
86 'email: %s\n')
87 body = body % (form_data['username'], form_data['email'])
88
89 run_task(tasks.send_email, None,
90 _('[RhodeCode] New User registration'),
91 body)
83 92 except:
84 93 log.error(traceback.format_exc())
85 94 self.sa.rollback()
86 95 raise
87 96
88 97 def update(self, user_id, form_data):
89 98 try:
90 99 new_user = self.get(user_id, cache=False)
91 100 if new_user.username == 'default':
92 101 raise DefaultUserException(
93 102 _("You can't Edit this user since it's"
94 103 " crucial for entire application"))
95 104 for k, v in form_data.items():
96 105 if k == 'new_password' and v != '':
97 106 new_user.password = v
98 107 else:
99 108 setattr(new_user, k, v)
100 109
101 110 self.sa.add(new_user)
102 111 self.sa.commit()
103 112 except:
104 113 log.error(traceback.format_exc())
105 114 self.sa.rollback()
106 115 raise
107 116
108 117 def update_my_account(self, user_id, form_data):
109 118 try:
110 119 new_user = self.get(user_id, cache=False)
111 120 if new_user.username == 'default':
112 121 raise DefaultUserException(
113 122 _("You can't Edit this user since it's"
114 123 " crucial for entire application"))
115 124 for k, v in form_data.items():
116 125 if k == 'new_password' and v != '':
117 126 new_user.password = v
118 127 else:
119 128 if k not in ['admin', 'active']:
120 129 setattr(new_user, k, v)
121 130
122 131 self.sa.add(new_user)
123 132 self.sa.commit()
124 133 except:
125 134 log.error(traceback.format_exc())
126 135 self.sa.rollback()
127 136 raise
128 137
129 138 def delete(self, user_id):
130 139 try:
131 140 user = self.get(user_id, cache=False)
132 141 if user.username == 'default':
133 142 raise DefaultUserException(
134 143 _("You can't remove this user since it's"
135 144 " crucial for entire application"))
136 145 self.sa.delete(user)
137 146 self.sa.commit()
138 147 except:
139 148 log.error(traceback.format_exc())
140 149 self.sa.rollback()
141 150 raise
142 151
143 152 def reset_password(self, data):
144 153 from rhodecode.lib.celerylib import tasks, run_task
145 154 run_task(tasks.reset_user_password, data['email'])
146 155
147 156
148 157 def fill_data(self, user):
149 158 """
150 159 Fills user data with those from database and log out user if not
151 160 present in database
152 161 :param user:
153 162 """
154 163
155 164 if not hasattr(user, 'user_id') or user.user_id is None:
156 165 raise Exception('passed in user has to have the user_id attribute')
157 166
158 167
159 168 log.debug('filling auth user data')
160 169 try:
161 170 dbuser = self.get(user.user_id)
162 171 user.username = dbuser.username
163 172 user.is_admin = dbuser.admin
164 173 user.name = dbuser.name
165 174 user.lastname = dbuser.lastname
166 175 user.email = dbuser.email
167 176 except:
168 177 log.error(traceback.format_exc())
169 178 user.is_authenticated = False
170 179
171 180 return user
General Comments 0
You need to be logged in to leave comments. Login now