Show More
@@ -1,572 +1,572 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.db_manage |
|
15 | kallithea.lib.db_manage | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Database creation, and setup module for Kallithea. Used for creation |
|
18 | Database creation, and setup module for Kallithea. Used for creation | |
19 | of database as well as for migration operations |
|
19 | of database as well as for migration operations | |
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Apr 10, 2010 |
|
23 | :created_on: Apr 10, 2010 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import time |
|
31 | import time | |
32 | import uuid |
|
32 | import uuid | |
33 | import logging |
|
33 | import logging | |
34 |
from os.path import dirname |
|
34 | from os.path import dirname | |
35 |
|
35 | |||
36 | from kallithea import __dbversion__, __py_version__, EXTERN_TYPE_INTERNAL, DB_MIGRATIONS |
|
36 | from kallithea import __dbversion__, __py_version__, EXTERN_TYPE_INTERNAL, DB_MIGRATIONS | |
37 | from kallithea.model.user import UserModel |
|
37 | from kallithea.model.user import UserModel | |
38 | from kallithea.lib.utils import ask_ok |
|
38 | from kallithea.lib.utils import ask_ok | |
39 | from kallithea.model import init_model |
|
39 | from kallithea.model import init_model | |
40 | from kallithea.model.db import User, Permission, Ui, \ |
|
40 | from kallithea.model.db import User, Permission, Ui, \ | |
41 | Setting, UserToPerm, DbMigrateVersion, RepoGroup, \ |
|
41 | Setting, UserToPerm, DbMigrateVersion, RepoGroup, \ | |
42 | UserRepoGroupToPerm, CacheInvalidation, Repository |
|
42 | UserRepoGroupToPerm, CacheInvalidation, Repository | |
43 |
|
43 | |||
44 | from sqlalchemy.engine import create_engine |
|
44 | from sqlalchemy.engine import create_engine | |
45 | from kallithea.model.repo_group import RepoGroupModel |
|
45 | from kallithea.model.repo_group import RepoGroupModel | |
46 | #from kallithea.model import meta |
|
46 | #from kallithea.model import meta | |
47 | from kallithea.model.meta import Session, Base |
|
47 | from kallithea.model.meta import Session, Base | |
48 | from kallithea.model.repo import RepoModel |
|
48 | from kallithea.model.repo import RepoModel | |
49 | from kallithea.model.permission import PermissionModel |
|
49 | from kallithea.model.permission import PermissionModel | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | def notify(msg): |
|
55 | def notify(msg): | |
56 | """ |
|
56 | """ | |
57 | Notification for migrations messages |
|
57 | Notification for migrations messages | |
58 | """ |
|
58 | """ | |
59 | ml = len(msg) + (4 * 2) |
|
59 | ml = len(msg) + (4 * 2) | |
60 | print('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper() |
|
60 | print('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper() | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | class DbManage(object): |
|
63 | class DbManage(object): | |
64 | def __init__(self, log_sql, dbconf, root, tests=False, SESSION=None, cli_args=None): |
|
64 | def __init__(self, log_sql, dbconf, root, tests=False, SESSION=None, cli_args=None): | |
65 | self.dbname = dbconf.split('/')[-1] |
|
65 | self.dbname = dbconf.split('/')[-1] | |
66 | self.tests = tests |
|
66 | self.tests = tests | |
67 | self.root = root |
|
67 | self.root = root | |
68 | self.dburi = dbconf |
|
68 | self.dburi = dbconf | |
69 | self.log_sql = log_sql |
|
69 | self.log_sql = log_sql | |
70 | self.db_exists = False |
|
70 | self.db_exists = False | |
71 | self.cli_args = cli_args or {} |
|
71 | self.cli_args = cli_args or {} | |
72 | self.init_db(SESSION=SESSION) |
|
72 | self.init_db(SESSION=SESSION) | |
73 |
|
73 | |||
74 | force_ask = self.cli_args.get('force_ask') |
|
74 | force_ask = self.cli_args.get('force_ask') | |
75 | if force_ask is not None: |
|
75 | if force_ask is not None: | |
76 | global ask_ok |
|
76 | global ask_ok | |
77 | ask_ok = lambda *args, **kwargs: force_ask |
|
77 | ask_ok = lambda *args, **kwargs: force_ask | |
78 |
|
78 | |||
79 | def init_db(self, SESSION=None): |
|
79 | def init_db(self, SESSION=None): | |
80 | if SESSION: |
|
80 | if SESSION: | |
81 | self.sa = SESSION |
|
81 | self.sa = SESSION | |
82 | else: |
|
82 | else: | |
83 | #init new sessions |
|
83 | #init new sessions | |
84 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
84 | engine = create_engine(self.dburi, echo=self.log_sql) | |
85 | init_model(engine) |
|
85 | init_model(engine) | |
86 | self.sa = Session() |
|
86 | self.sa = Session() | |
87 |
|
87 | |||
88 | def create_tables(self, override=False): |
|
88 | def create_tables(self, override=False): | |
89 | """ |
|
89 | """ | |
90 | Create a auth database |
|
90 | Create a auth database | |
91 | """ |
|
91 | """ | |
92 |
|
92 | |||
93 | log.info("Any existing database is going to be destroyed") |
|
93 | log.info("Any existing database is going to be destroyed") | |
94 | if self.tests: |
|
94 | if self.tests: | |
95 | destroy = True |
|
95 | destroy = True | |
96 | else: |
|
96 | else: | |
97 | destroy = ask_ok('Are you sure to destroy old database ? [y/n]') |
|
97 | destroy = ask_ok('Are you sure to destroy old database ? [y/n]') | |
98 | if not destroy: |
|
98 | if not destroy: | |
99 | print 'Nothing done.' |
|
99 | print 'Nothing done.' | |
100 | sys.exit(0) |
|
100 | sys.exit(0) | |
101 | if destroy: |
|
101 | if destroy: | |
102 | Base.metadata.drop_all() |
|
102 | Base.metadata.drop_all() | |
103 |
|
103 | |||
104 | checkfirst = not override |
|
104 | checkfirst = not override | |
105 | Base.metadata.create_all(checkfirst=checkfirst) |
|
105 | Base.metadata.create_all(checkfirst=checkfirst) | |
106 | log.info('Created tables for %s', self.dbname) |
|
106 | log.info('Created tables for %s', self.dbname) | |
107 |
|
107 | |||
108 | def set_db_version(self): |
|
108 | def set_db_version(self): | |
109 | ver = DbMigrateVersion() |
|
109 | ver = DbMigrateVersion() | |
110 | ver.version = __dbversion__ |
|
110 | ver.version = __dbversion__ | |
111 | ver.repository_id = DB_MIGRATIONS |
|
111 | ver.repository_id = DB_MIGRATIONS | |
112 | ver.repository_path = 'versions' |
|
112 | ver.repository_path = 'versions' | |
113 | self.sa.add(ver) |
|
113 | self.sa.add(ver) | |
114 | log.info('db version set to: %s', __dbversion__) |
|
114 | log.info('db version set to: %s', __dbversion__) | |
115 |
|
115 | |||
116 | def upgrade(self): |
|
116 | def upgrade(self): | |
117 | """ |
|
117 | """ | |
118 | Upgrades given database schema to given revision following |
|
118 | Upgrades given database schema to given revision following | |
119 | all needed steps, to perform the upgrade |
|
119 | all needed steps, to perform the upgrade | |
120 |
|
120 | |||
121 | """ |
|
121 | """ | |
122 |
|
122 | |||
123 | from kallithea.lib.dbmigrate.migrate.versioning import api |
|
123 | from kallithea.lib.dbmigrate.migrate.versioning import api | |
124 | from kallithea.lib.dbmigrate.migrate.exceptions import \ |
|
124 | from kallithea.lib.dbmigrate.migrate.exceptions import \ | |
125 | DatabaseNotControlledError |
|
125 | DatabaseNotControlledError | |
126 |
|
126 | |||
127 | if 'sqlite' in self.dburi: |
|
127 | if 'sqlite' in self.dburi: | |
128 | print ( |
|
128 | print ( | |
129 | '********************** WARNING **********************\n' |
|
129 | '********************** WARNING **********************\n' | |
130 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
130 | 'Make sure your version of sqlite is at least 3.7.X. \n' | |
131 | 'Earlier versions are known to fail on some migrations\n' |
|
131 | 'Earlier versions are known to fail on some migrations\n' | |
132 | '*****************************************************\n') |
|
132 | '*****************************************************\n') | |
133 |
|
133 | |||
134 | upgrade = ask_ok('You are about to perform database upgrade, make ' |
|
134 | upgrade = ask_ok('You are about to perform database upgrade, make ' | |
135 | 'sure You backed up your database before. ' |
|
135 | 'sure You backed up your database before. ' | |
136 | 'Continue ? [y/n]') |
|
136 | 'Continue ? [y/n]') | |
137 | if not upgrade: |
|
137 | if not upgrade: | |
138 | print 'No upgrade performed' |
|
138 | print 'No upgrade performed' | |
139 | sys.exit(0) |
|
139 | sys.exit(0) | |
140 |
|
140 | |||
141 | repository_path = os.path.join(dn(dn(dn(os.path.realpath(__file__)))), |
|
141 | repository_path = os.path.join(dirname(dirname(dirname(os.path.realpath(__file__)))), | |
142 | 'kallithea', 'lib', 'dbmigrate') |
|
142 | 'kallithea', 'lib', 'dbmigrate') | |
143 | db_uri = self.dburi |
|
143 | db_uri = self.dburi | |
144 |
|
144 | |||
145 | try: |
|
145 | try: | |
146 | curr_version = api.db_version(db_uri, repository_path) |
|
146 | curr_version = api.db_version(db_uri, repository_path) | |
147 | msg = ('Found current database under version ' |
|
147 | msg = ('Found current database under version ' | |
148 | 'control with version %s' % curr_version) |
|
148 | 'control with version %s' % curr_version) | |
149 |
|
149 | |||
150 | except (RuntimeError, DatabaseNotControlledError): |
|
150 | except (RuntimeError, DatabaseNotControlledError): | |
151 | curr_version = 1 |
|
151 | curr_version = 1 | |
152 | msg = ('Current database is not under version control. Setting ' |
|
152 | msg = ('Current database is not under version control. Setting ' | |
153 | 'as version %s' % curr_version) |
|
153 | 'as version %s' % curr_version) | |
154 | api.version_control(db_uri, repository_path, curr_version) |
|
154 | api.version_control(db_uri, repository_path, curr_version) | |
155 |
|
155 | |||
156 | notify(msg) |
|
156 | notify(msg) | |
157 | if curr_version == __dbversion__: |
|
157 | if curr_version == __dbversion__: | |
158 | print 'This database is already at the newest version' |
|
158 | print 'This database is already at the newest version' | |
159 | sys.exit(0) |
|
159 | sys.exit(0) | |
160 |
|
160 | |||
161 | # clear cache keys |
|
161 | # clear cache keys | |
162 | log.info("Clearing cache keys now...") |
|
162 | log.info("Clearing cache keys now...") | |
163 | CacheInvalidation.clear_cache() |
|
163 | CacheInvalidation.clear_cache() | |
164 |
|
164 | |||
165 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
165 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) | |
166 | notify('attempting to do database upgrade from ' |
|
166 | notify('attempting to do database upgrade from ' | |
167 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
167 | 'version %s to version %s' % (curr_version, __dbversion__)) | |
168 |
|
168 | |||
169 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
169 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE | |
170 | _step = None |
|
170 | _step = None | |
171 | for step in upgrade_steps: |
|
171 | for step in upgrade_steps: | |
172 | notify('performing upgrade step %s' % step) |
|
172 | notify('performing upgrade step %s' % step) | |
173 | time.sleep(0.5) |
|
173 | time.sleep(0.5) | |
174 |
|
174 | |||
175 | api.upgrade(db_uri, repository_path, step) |
|
175 | api.upgrade(db_uri, repository_path, step) | |
176 | notify('schema upgrade for step %s completed' % (step,)) |
|
176 | notify('schema upgrade for step %s completed' % (step,)) | |
177 |
|
177 | |||
178 | _step = step |
|
178 | _step = step | |
179 |
|
179 | |||
180 | notify('upgrade to version %s successful' % _step) |
|
180 | notify('upgrade to version %s successful' % _step) | |
181 |
|
181 | |||
182 | def fix_repo_paths(self): |
|
182 | def fix_repo_paths(self): | |
183 | """ |
|
183 | """ | |
184 | Fixes a old kallithea version path into new one without a '*' |
|
184 | Fixes a old kallithea version path into new one without a '*' | |
185 | """ |
|
185 | """ | |
186 |
|
186 | |||
187 | paths = self.sa.query(Ui) \ |
|
187 | paths = self.sa.query(Ui) \ | |
188 | .filter(Ui.ui_key == '/') \ |
|
188 | .filter(Ui.ui_key == '/') \ | |
189 | .scalar() |
|
189 | .scalar() | |
190 |
|
190 | |||
191 | paths.ui_value = paths.ui_value.replace('*', '') |
|
191 | paths.ui_value = paths.ui_value.replace('*', '') | |
192 |
|
192 | |||
193 | self.sa.add(paths) |
|
193 | self.sa.add(paths) | |
194 | self.sa.commit() |
|
194 | self.sa.commit() | |
195 |
|
195 | |||
196 | def fix_default_user(self): |
|
196 | def fix_default_user(self): | |
197 | """ |
|
197 | """ | |
198 | Fixes a old default user with some 'nicer' default values, |
|
198 | Fixes a old default user with some 'nicer' default values, | |
199 | used mostly for anonymous access |
|
199 | used mostly for anonymous access | |
200 | """ |
|
200 | """ | |
201 | def_user = self.sa.query(User) \ |
|
201 | def_user = self.sa.query(User) \ | |
202 | .filter(User.username == User.DEFAULT_USER) \ |
|
202 | .filter(User.username == User.DEFAULT_USER) \ | |
203 | .one() |
|
203 | .one() | |
204 |
|
204 | |||
205 | def_user.name = 'Anonymous' |
|
205 | def_user.name = 'Anonymous' | |
206 | def_user.lastname = 'User' |
|
206 | def_user.lastname = 'User' | |
207 | def_user.email = 'anonymous@kallithea-scm.org' |
|
207 | def_user.email = 'anonymous@kallithea-scm.org' | |
208 |
|
208 | |||
209 | self.sa.add(def_user) |
|
209 | self.sa.add(def_user) | |
210 | self.sa.commit() |
|
210 | self.sa.commit() | |
211 |
|
211 | |||
212 | def fix_settings(self): |
|
212 | def fix_settings(self): | |
213 | """ |
|
213 | """ | |
214 | Fixes kallithea settings adds ga_code key for google analytics |
|
214 | Fixes kallithea settings adds ga_code key for google analytics | |
215 | """ |
|
215 | """ | |
216 |
|
216 | |||
217 | hgsettings3 = Setting('ga_code', '') |
|
217 | hgsettings3 = Setting('ga_code', '') | |
218 |
|
218 | |||
219 | self.sa.add(hgsettings3) |
|
219 | self.sa.add(hgsettings3) | |
220 | self.sa.commit() |
|
220 | self.sa.commit() | |
221 |
|
221 | |||
222 | def admin_prompt(self, second=False): |
|
222 | def admin_prompt(self, second=False): | |
223 | if not self.tests: |
|
223 | if not self.tests: | |
224 | import getpass |
|
224 | import getpass | |
225 |
|
225 | |||
226 | # defaults |
|
226 | # defaults | |
227 | defaults = self.cli_args |
|
227 | defaults = self.cli_args | |
228 | username = defaults.get('username') |
|
228 | username = defaults.get('username') | |
229 | password = defaults.get('password') |
|
229 | password = defaults.get('password') | |
230 | email = defaults.get('email') |
|
230 | email = defaults.get('email') | |
231 |
|
231 | |||
232 | def get_password(): |
|
232 | def get_password(): | |
233 | password = getpass.getpass('Specify admin password ' |
|
233 | password = getpass.getpass('Specify admin password ' | |
234 | '(min 6 chars):') |
|
234 | '(min 6 chars):') | |
235 | confirm = getpass.getpass('Confirm password:') |
|
235 | confirm = getpass.getpass('Confirm password:') | |
236 |
|
236 | |||
237 | if password != confirm: |
|
237 | if password != confirm: | |
238 | log.error('passwords mismatch') |
|
238 | log.error('passwords mismatch') | |
239 | return False |
|
239 | return False | |
240 | if len(password) < 6: |
|
240 | if len(password) < 6: | |
241 | log.error('password is to short use at least 6 characters') |
|
241 | log.error('password is to short use at least 6 characters') | |
242 | return False |
|
242 | return False | |
243 |
|
243 | |||
244 | return password |
|
244 | return password | |
245 | if username is None: |
|
245 | if username is None: | |
246 | username = raw_input('Specify admin username:') |
|
246 | username = raw_input('Specify admin username:') | |
247 | if password is None: |
|
247 | if password is None: | |
248 | password = get_password() |
|
248 | password = get_password() | |
249 | if not password: |
|
249 | if not password: | |
250 | #second try |
|
250 | #second try | |
251 | password = get_password() |
|
251 | password = get_password() | |
252 | if not password: |
|
252 | if not password: | |
253 | sys.exit() |
|
253 | sys.exit() | |
254 | if email is None: |
|
254 | if email is None: | |
255 | email = raw_input('Specify admin email:') |
|
255 | email = raw_input('Specify admin email:') | |
256 | self.create_user(username, password, email, True) |
|
256 | self.create_user(username, password, email, True) | |
257 | else: |
|
257 | else: | |
258 | log.info('creating admin and regular test users') |
|
258 | log.info('creating admin and regular test users') | |
259 | from kallithea.tests import TEST_USER_ADMIN_LOGIN, \ |
|
259 | from kallithea.tests import TEST_USER_ADMIN_LOGIN, \ | |
260 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
260 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ | |
261 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
261 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ | |
262 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
262 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ | |
263 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
263 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL | |
264 |
|
264 | |||
265 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
265 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, | |
266 | TEST_USER_ADMIN_EMAIL, True) |
|
266 | TEST_USER_ADMIN_EMAIL, True) | |
267 |
|
267 | |||
268 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
268 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, | |
269 | TEST_USER_REGULAR_EMAIL, False) |
|
269 | TEST_USER_REGULAR_EMAIL, False) | |
270 |
|
270 | |||
271 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
271 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, | |
272 | TEST_USER_REGULAR2_EMAIL, False) |
|
272 | TEST_USER_REGULAR2_EMAIL, False) | |
273 |
|
273 | |||
274 | def create_ui_settings(self, repo_store_path): |
|
274 | def create_ui_settings(self, repo_store_path): | |
275 | """ |
|
275 | """ | |
276 | Creates ui settings, fills out hooks |
|
276 | Creates ui settings, fills out hooks | |
277 | """ |
|
277 | """ | |
278 |
|
278 | |||
279 | #HOOKS |
|
279 | #HOOKS | |
280 | hooks1_key = Ui.HOOK_UPDATE |
|
280 | hooks1_key = Ui.HOOK_UPDATE | |
281 | hooks1_ = self.sa.query(Ui) \ |
|
281 | hooks1_ = self.sa.query(Ui) \ | |
282 | .filter(Ui.ui_key == hooks1_key).scalar() |
|
282 | .filter(Ui.ui_key == hooks1_key).scalar() | |
283 |
|
283 | |||
284 | hooks1 = Ui() if hooks1_ is None else hooks1_ |
|
284 | hooks1 = Ui() if hooks1_ is None else hooks1_ | |
285 | hooks1.ui_section = 'hooks' |
|
285 | hooks1.ui_section = 'hooks' | |
286 | hooks1.ui_key = hooks1_key |
|
286 | hooks1.ui_key = hooks1_key | |
287 | hooks1.ui_value = 'hg update >&2' |
|
287 | hooks1.ui_value = 'hg update >&2' | |
288 | hooks1.ui_active = False |
|
288 | hooks1.ui_active = False | |
289 | self.sa.add(hooks1) |
|
289 | self.sa.add(hooks1) | |
290 |
|
290 | |||
291 | hooks2_key = Ui.HOOK_REPO_SIZE |
|
291 | hooks2_key = Ui.HOOK_REPO_SIZE | |
292 | hooks2_ = self.sa.query(Ui) \ |
|
292 | hooks2_ = self.sa.query(Ui) \ | |
293 | .filter(Ui.ui_key == hooks2_key).scalar() |
|
293 | .filter(Ui.ui_key == hooks2_key).scalar() | |
294 | hooks2 = Ui() if hooks2_ is None else hooks2_ |
|
294 | hooks2 = Ui() if hooks2_ is None else hooks2_ | |
295 | hooks2.ui_section = 'hooks' |
|
295 | hooks2.ui_section = 'hooks' | |
296 | hooks2.ui_key = hooks2_key |
|
296 | hooks2.ui_key = hooks2_key | |
297 | hooks2.ui_value = 'python:kallithea.lib.hooks.repo_size' |
|
297 | hooks2.ui_value = 'python:kallithea.lib.hooks.repo_size' | |
298 | self.sa.add(hooks2) |
|
298 | self.sa.add(hooks2) | |
299 |
|
299 | |||
300 | hooks3 = Ui() |
|
300 | hooks3 = Ui() | |
301 | hooks3.ui_section = 'hooks' |
|
301 | hooks3.ui_section = 'hooks' | |
302 | hooks3.ui_key = Ui.HOOK_PUSH |
|
302 | hooks3.ui_key = Ui.HOOK_PUSH | |
303 | hooks3.ui_value = 'python:kallithea.lib.hooks.log_push_action' |
|
303 | hooks3.ui_value = 'python:kallithea.lib.hooks.log_push_action' | |
304 | self.sa.add(hooks3) |
|
304 | self.sa.add(hooks3) | |
305 |
|
305 | |||
306 | hooks4 = Ui() |
|
306 | hooks4 = Ui() | |
307 | hooks4.ui_section = 'hooks' |
|
307 | hooks4.ui_section = 'hooks' | |
308 | hooks4.ui_key = Ui.HOOK_PRE_PUSH |
|
308 | hooks4.ui_key = Ui.HOOK_PRE_PUSH | |
309 | hooks4.ui_value = 'python:kallithea.lib.hooks.pre_push' |
|
309 | hooks4.ui_value = 'python:kallithea.lib.hooks.pre_push' | |
310 | self.sa.add(hooks4) |
|
310 | self.sa.add(hooks4) | |
311 |
|
311 | |||
312 | hooks5 = Ui() |
|
312 | hooks5 = Ui() | |
313 | hooks5.ui_section = 'hooks' |
|
313 | hooks5.ui_section = 'hooks' | |
314 | hooks5.ui_key = Ui.HOOK_PULL |
|
314 | hooks5.ui_key = Ui.HOOK_PULL | |
315 | hooks5.ui_value = 'python:kallithea.lib.hooks.log_pull_action' |
|
315 | hooks5.ui_value = 'python:kallithea.lib.hooks.log_pull_action' | |
316 | self.sa.add(hooks5) |
|
316 | self.sa.add(hooks5) | |
317 |
|
317 | |||
318 | hooks6 = Ui() |
|
318 | hooks6 = Ui() | |
319 | hooks6.ui_section = 'hooks' |
|
319 | hooks6.ui_section = 'hooks' | |
320 | hooks6.ui_key = Ui.HOOK_PRE_PULL |
|
320 | hooks6.ui_key = Ui.HOOK_PRE_PULL | |
321 | hooks6.ui_value = 'python:kallithea.lib.hooks.pre_pull' |
|
321 | hooks6.ui_value = 'python:kallithea.lib.hooks.pre_pull' | |
322 | self.sa.add(hooks6) |
|
322 | self.sa.add(hooks6) | |
323 |
|
323 | |||
324 | # enable largefiles |
|
324 | # enable largefiles | |
325 | largefiles = Ui() |
|
325 | largefiles = Ui() | |
326 | largefiles.ui_section = 'extensions' |
|
326 | largefiles.ui_section = 'extensions' | |
327 | largefiles.ui_key = 'largefiles' |
|
327 | largefiles.ui_key = 'largefiles' | |
328 | largefiles.ui_value = '' |
|
328 | largefiles.ui_value = '' | |
329 | self.sa.add(largefiles) |
|
329 | self.sa.add(largefiles) | |
330 |
|
330 | |||
331 | # set default largefiles cache dir, defaults to |
|
331 | # set default largefiles cache dir, defaults to | |
332 | # /repo location/.cache/largefiles |
|
332 | # /repo location/.cache/largefiles | |
333 | largefiles = Ui() |
|
333 | largefiles = Ui() | |
334 | largefiles.ui_section = 'largefiles' |
|
334 | largefiles.ui_section = 'largefiles' | |
335 | largefiles.ui_key = 'usercache' |
|
335 | largefiles.ui_key = 'usercache' | |
336 | largefiles.ui_value = os.path.join(repo_store_path, '.cache', |
|
336 | largefiles.ui_value = os.path.join(repo_store_path, '.cache', | |
337 | 'largefiles') |
|
337 | 'largefiles') | |
338 | self.sa.add(largefiles) |
|
338 | self.sa.add(largefiles) | |
339 |
|
339 | |||
340 | # enable hgsubversion disabled by default |
|
340 | # enable hgsubversion disabled by default | |
341 | hgsubversion = Ui() |
|
341 | hgsubversion = Ui() | |
342 | hgsubversion.ui_section = 'extensions' |
|
342 | hgsubversion.ui_section = 'extensions' | |
343 | hgsubversion.ui_key = 'hgsubversion' |
|
343 | hgsubversion.ui_key = 'hgsubversion' | |
344 | hgsubversion.ui_value = '' |
|
344 | hgsubversion.ui_value = '' | |
345 | hgsubversion.ui_active = False |
|
345 | hgsubversion.ui_active = False | |
346 | self.sa.add(hgsubversion) |
|
346 | self.sa.add(hgsubversion) | |
347 |
|
347 | |||
348 | # enable hggit disabled by default |
|
348 | # enable hggit disabled by default | |
349 | hggit = Ui() |
|
349 | hggit = Ui() | |
350 | hggit.ui_section = 'extensions' |
|
350 | hggit.ui_section = 'extensions' | |
351 | hggit.ui_key = 'hggit' |
|
351 | hggit.ui_key = 'hggit' | |
352 | hggit.ui_value = '' |
|
352 | hggit.ui_value = '' | |
353 | hggit.ui_active = False |
|
353 | hggit.ui_active = False | |
354 | self.sa.add(hggit) |
|
354 | self.sa.add(hggit) | |
355 |
|
355 | |||
356 | def create_auth_plugin_options(self, skip_existing=False): |
|
356 | def create_auth_plugin_options(self, skip_existing=False): | |
357 | """ |
|
357 | """ | |
358 | Create default auth plugin settings, and make it active |
|
358 | Create default auth plugin settings, and make it active | |
359 |
|
359 | |||
360 | :param skip_existing: |
|
360 | :param skip_existing: | |
361 | """ |
|
361 | """ | |
362 |
|
362 | |||
363 | for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), |
|
363 | for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), | |
364 | ('auth_internal_enabled', 'True', 'bool')]: |
|
364 | ('auth_internal_enabled', 'True', 'bool')]: | |
365 | if skip_existing and Setting.get_by_name(k) != None: |
|
365 | if skip_existing and Setting.get_by_name(k) != None: | |
366 | log.debug('Skipping option %s', k) |
|
366 | log.debug('Skipping option %s', k) | |
367 | continue |
|
367 | continue | |
368 | setting = Setting(k, v, t) |
|
368 | setting = Setting(k, v, t) | |
369 | self.sa.add(setting) |
|
369 | self.sa.add(setting) | |
370 |
|
370 | |||
371 | def create_default_options(self, skip_existing=False): |
|
371 | def create_default_options(self, skip_existing=False): | |
372 | """Creates default settings""" |
|
372 | """Creates default settings""" | |
373 |
|
373 | |||
374 | for k, v, t in [ |
|
374 | for k, v, t in [ | |
375 | ('default_repo_enable_locking', False, 'bool'), |
|
375 | ('default_repo_enable_locking', False, 'bool'), | |
376 | ('default_repo_enable_downloads', False, 'bool'), |
|
376 | ('default_repo_enable_downloads', False, 'bool'), | |
377 | ('default_repo_enable_statistics', False, 'bool'), |
|
377 | ('default_repo_enable_statistics', False, 'bool'), | |
378 | ('default_repo_private', False, 'bool'), |
|
378 | ('default_repo_private', False, 'bool'), | |
379 | ('default_repo_type', 'hg', 'unicode')]: |
|
379 | ('default_repo_type', 'hg', 'unicode')]: | |
380 |
|
380 | |||
381 | if skip_existing and Setting.get_by_name(k) is not None: |
|
381 | if skip_existing and Setting.get_by_name(k) is not None: | |
382 | log.debug('Skipping option %s', k) |
|
382 | log.debug('Skipping option %s', k) | |
383 | continue |
|
383 | continue | |
384 | setting = Setting(k, v, t) |
|
384 | setting = Setting(k, v, t) | |
385 | self.sa.add(setting) |
|
385 | self.sa.add(setting) | |
386 |
|
386 | |||
387 | def fixup_groups(self): |
|
387 | def fixup_groups(self): | |
388 | def_usr = User.get_default_user() |
|
388 | def_usr = User.get_default_user() | |
389 | for g in RepoGroup.query().all(): |
|
389 | for g in RepoGroup.query().all(): | |
390 | g.group_name = g.get_new_name(g.name) |
|
390 | g.group_name = g.get_new_name(g.name) | |
391 | self.sa.add(g) |
|
391 | self.sa.add(g) | |
392 | # get default perm |
|
392 | # get default perm | |
393 | default = UserRepoGroupToPerm.query() \ |
|
393 | default = UserRepoGroupToPerm.query() \ | |
394 | .filter(UserRepoGroupToPerm.group == g) \ |
|
394 | .filter(UserRepoGroupToPerm.group == g) \ | |
395 | .filter(UserRepoGroupToPerm.user == def_usr) \ |
|
395 | .filter(UserRepoGroupToPerm.user == def_usr) \ | |
396 | .scalar() |
|
396 | .scalar() | |
397 |
|
397 | |||
398 | if default is None: |
|
398 | if default is None: | |
399 | log.debug('missing default permission for group %s adding', g) |
|
399 | log.debug('missing default permission for group %s adding', g) | |
400 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
400 | perm_obj = RepoGroupModel()._create_default_perms(g) | |
401 | self.sa.add(perm_obj) |
|
401 | self.sa.add(perm_obj) | |
402 |
|
402 | |||
403 | def reset_permissions(self, username): |
|
403 | def reset_permissions(self, username): | |
404 | """ |
|
404 | """ | |
405 | Resets permissions to default state, useful when old systems had |
|
405 | Resets permissions to default state, useful when old systems had | |
406 | bad permissions, we must clean them up |
|
406 | bad permissions, we must clean them up | |
407 |
|
407 | |||
408 | :param username: |
|
408 | :param username: | |
409 | """ |
|
409 | """ | |
410 | default_user = User.get_by_username(username) |
|
410 | default_user = User.get_by_username(username) | |
411 | if not default_user: |
|
411 | if not default_user: | |
412 | return |
|
412 | return | |
413 |
|
413 | |||
414 | u2p = UserToPerm.query() \ |
|
414 | u2p = UserToPerm.query() \ | |
415 | .filter(UserToPerm.user == default_user).all() |
|
415 | .filter(UserToPerm.user == default_user).all() | |
416 | fixed = False |
|
416 | fixed = False | |
417 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
417 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): | |
418 | for p in u2p: |
|
418 | for p in u2p: | |
419 | Session().delete(p) |
|
419 | Session().delete(p) | |
420 | fixed = True |
|
420 | fixed = True | |
421 | self.populate_default_permissions() |
|
421 | self.populate_default_permissions() | |
422 | return fixed |
|
422 | return fixed | |
423 |
|
423 | |||
424 | def update_repo_info(self): |
|
424 | def update_repo_info(self): | |
425 | RepoModel.update_repoinfo() |
|
425 | RepoModel.update_repoinfo() | |
426 |
|
426 | |||
427 | def config_prompt(self, test_repo_path='', retries=3): |
|
427 | def config_prompt(self, test_repo_path='', retries=3): | |
428 | defaults = self.cli_args |
|
428 | defaults = self.cli_args | |
429 | _path = defaults.get('repos_location') |
|
429 | _path = defaults.get('repos_location') | |
430 | if retries == 3: |
|
430 | if retries == 3: | |
431 | log.info('Setting up repositories config') |
|
431 | log.info('Setting up repositories config') | |
432 |
|
432 | |||
433 | if _path is not None: |
|
433 | if _path is not None: | |
434 | path = _path |
|
434 | path = _path | |
435 | elif not self.tests and not test_repo_path: |
|
435 | elif not self.tests and not test_repo_path: | |
436 | path = raw_input( |
|
436 | path = raw_input( | |
437 | 'Enter a valid absolute path to store repositories. ' |
|
437 | 'Enter a valid absolute path to store repositories. ' | |
438 | 'All repositories in that path will be added automatically:' |
|
438 | 'All repositories in that path will be added automatically:' | |
439 | ) |
|
439 | ) | |
440 | else: |
|
440 | else: | |
441 | path = test_repo_path |
|
441 | path = test_repo_path | |
442 | path_ok = True |
|
442 | path_ok = True | |
443 |
|
443 | |||
444 | # check proper dir |
|
444 | # check proper dir | |
445 | if not os.path.isdir(path): |
|
445 | if not os.path.isdir(path): | |
446 | path_ok = False |
|
446 | path_ok = False | |
447 | log.error('Given path %s is not a valid directory', path) |
|
447 | log.error('Given path %s is not a valid directory', path) | |
448 |
|
448 | |||
449 | elif not os.path.isabs(path): |
|
449 | elif not os.path.isabs(path): | |
450 | path_ok = False |
|
450 | path_ok = False | |
451 | log.error('Given path %s is not an absolute path', path) |
|
451 | log.error('Given path %s is not an absolute path', path) | |
452 |
|
452 | |||
453 | # check if path is at least readable. |
|
453 | # check if path is at least readable. | |
454 | if not os.access(path, os.R_OK): |
|
454 | if not os.access(path, os.R_OK): | |
455 | path_ok = False |
|
455 | path_ok = False | |
456 | log.error('Given path %s is not readable', path) |
|
456 | log.error('Given path %s is not readable', path) | |
457 |
|
457 | |||
458 | # check write access, warn user about non writeable paths |
|
458 | # check write access, warn user about non writeable paths | |
459 | elif not os.access(path, os.W_OK) and path_ok: |
|
459 | elif not os.access(path, os.W_OK) and path_ok: | |
460 | log.warning('No write permission to given path %s', path) |
|
460 | log.warning('No write permission to given path %s', path) | |
461 | if not ask_ok('Given path %s is not writeable, do you want to ' |
|
461 | if not ask_ok('Given path %s is not writeable, do you want to ' | |
462 | 'continue with read only mode ? [y/n]' % (path,)): |
|
462 | 'continue with read only mode ? [y/n]' % (path,)): | |
463 | log.error('Canceled by user') |
|
463 | log.error('Canceled by user') | |
464 | sys.exit(-1) |
|
464 | sys.exit(-1) | |
465 |
|
465 | |||
466 | if retries == 0: |
|
466 | if retries == 0: | |
467 | sys.exit('max retries reached') |
|
467 | sys.exit('max retries reached') | |
468 | if not path_ok: |
|
468 | if not path_ok: | |
469 | if _path is not None: |
|
469 | if _path is not None: | |
470 | sys.exit('Invalid repo path: %s' % _path) |
|
470 | sys.exit('Invalid repo path: %s' % _path) | |
471 | retries -= 1 |
|
471 | retries -= 1 | |
472 | return self.config_prompt(test_repo_path, retries) # recursing!!! |
|
472 | return self.config_prompt(test_repo_path, retries) # recursing!!! | |
473 |
|
473 | |||
474 | real_path = os.path.normpath(os.path.realpath(path)) |
|
474 | real_path = os.path.normpath(os.path.realpath(path)) | |
475 |
|
475 | |||
476 | if real_path != os.path.normpath(path): |
|
476 | if real_path != os.path.normpath(path): | |
477 | log.warning('Using normalized path %s instead of %s', real_path, path) |
|
477 | log.warning('Using normalized path %s instead of %s', real_path, path) | |
478 |
|
478 | |||
479 | return real_path |
|
479 | return real_path | |
480 |
|
480 | |||
481 | def create_settings(self, path): |
|
481 | def create_settings(self, path): | |
482 |
|
482 | |||
483 | self.create_ui_settings(path) |
|
483 | self.create_ui_settings(path) | |
484 |
|
484 | |||
485 | ui_config = [ |
|
485 | ui_config = [ | |
486 | ('web', 'push_ssl', 'false'), |
|
486 | ('web', 'push_ssl', 'false'), | |
487 | ('web', 'allow_archive', 'gz zip bz2'), |
|
487 | ('web', 'allow_archive', 'gz zip bz2'), | |
488 | ('web', 'allow_push', '*'), |
|
488 | ('web', 'allow_push', '*'), | |
489 | ('web', 'baseurl', '/'), |
|
489 | ('web', 'baseurl', '/'), | |
490 | ('paths', '/', path), |
|
490 | ('paths', '/', path), | |
491 | #('phases', 'publish', 'false') |
|
491 | #('phases', 'publish', 'false') | |
492 | ] |
|
492 | ] | |
493 | for section, key, value in ui_config: |
|
493 | for section, key, value in ui_config: | |
494 | ui_conf = Ui() |
|
494 | ui_conf = Ui() | |
495 | setattr(ui_conf, 'ui_section', section) |
|
495 | setattr(ui_conf, 'ui_section', section) | |
496 | setattr(ui_conf, 'ui_key', key) |
|
496 | setattr(ui_conf, 'ui_key', key) | |
497 | setattr(ui_conf, 'ui_value', value) |
|
497 | setattr(ui_conf, 'ui_value', value) | |
498 | self.sa.add(ui_conf) |
|
498 | self.sa.add(ui_conf) | |
499 |
|
499 | |||
500 | settings = [ |
|
500 | settings = [ | |
501 | ('realm', 'Kallithea', 'unicode'), |
|
501 | ('realm', 'Kallithea', 'unicode'), | |
502 | ('title', '', 'unicode'), |
|
502 | ('title', '', 'unicode'), | |
503 | ('ga_code', '', 'unicode'), |
|
503 | ('ga_code', '', 'unicode'), | |
504 | ('show_public_icon', True, 'bool'), |
|
504 | ('show_public_icon', True, 'bool'), | |
505 | ('show_private_icon', True, 'bool'), |
|
505 | ('show_private_icon', True, 'bool'), | |
506 | ('stylify_metatags', False, 'bool'), |
|
506 | ('stylify_metatags', False, 'bool'), | |
507 | ('dashboard_items', 100, 'int'), |
|
507 | ('dashboard_items', 100, 'int'), | |
508 | ('admin_grid_items', 25, 'int'), |
|
508 | ('admin_grid_items', 25, 'int'), | |
509 | ('show_version', True, 'bool'), |
|
509 | ('show_version', True, 'bool'), | |
510 | ('use_gravatar', True, 'bool'), |
|
510 | ('use_gravatar', True, 'bool'), | |
511 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
511 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), | |
512 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
512 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), | |
513 | ('update_url', Setting.DEFAULT_UPDATE_URL, 'unicode'), |
|
513 | ('update_url', Setting.DEFAULT_UPDATE_URL, 'unicode'), | |
514 | ] |
|
514 | ] | |
515 | for key, val, type_ in settings: |
|
515 | for key, val, type_ in settings: | |
516 | sett = Setting(key, val, type_) |
|
516 | sett = Setting(key, val, type_) | |
517 | self.sa.add(sett) |
|
517 | self.sa.add(sett) | |
518 |
|
518 | |||
519 | self.create_auth_plugin_options() |
|
519 | self.create_auth_plugin_options() | |
520 | self.create_default_options() |
|
520 | self.create_default_options() | |
521 |
|
521 | |||
522 | log.info('created ui config') |
|
522 | log.info('created ui config') | |
523 |
|
523 | |||
524 | def create_user(self, username, password, email='', admin=False): |
|
524 | def create_user(self, username, password, email='', admin=False): | |
525 | log.info('creating user %s', username) |
|
525 | log.info('creating user %s', username) | |
526 | UserModel().create_or_update(username, password, email, |
|
526 | UserModel().create_or_update(username, password, email, | |
527 | firstname=u'Kallithea', lastname=u'Admin', |
|
527 | firstname=u'Kallithea', lastname=u'Admin', | |
528 | active=True, admin=admin, |
|
528 | active=True, admin=admin, | |
529 | extern_type=EXTERN_TYPE_INTERNAL) |
|
529 | extern_type=EXTERN_TYPE_INTERNAL) | |
530 |
|
530 | |||
531 | def create_default_user(self): |
|
531 | def create_default_user(self): | |
532 | log.info('creating default user') |
|
532 | log.info('creating default user') | |
533 | # create default user for handling default permissions. |
|
533 | # create default user for handling default permissions. | |
534 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
534 | user = UserModel().create_or_update(username=User.DEFAULT_USER, | |
535 | password=str(uuid.uuid1())[:20], |
|
535 | password=str(uuid.uuid1())[:20], | |
536 | email='anonymous@kallithea-scm.org', |
|
536 | email='anonymous@kallithea-scm.org', | |
537 | firstname=u'Anonymous', |
|
537 | firstname=u'Anonymous', | |
538 | lastname=u'User') |
|
538 | lastname=u'User') | |
539 | # based on configuration options activate/deactivate this user which |
|
539 | # based on configuration options activate/deactivate this user which | |
540 | # controls anonymous access |
|
540 | # controls anonymous access | |
541 | if self.cli_args.get('public_access') is False: |
|
541 | if self.cli_args.get('public_access') is False: | |
542 | log.info('Public access disabled') |
|
542 | log.info('Public access disabled') | |
543 | user.active = False |
|
543 | user.active = False | |
544 | Session().add(user) |
|
544 | Session().add(user) | |
545 | Session().commit() |
|
545 | Session().commit() | |
546 |
|
546 | |||
547 | def create_permissions(self): |
|
547 | def create_permissions(self): | |
548 | """ |
|
548 | """ | |
549 | Creates all permissions defined in the system |
|
549 | Creates all permissions defined in the system | |
550 | """ |
|
550 | """ | |
551 | # module.(access|create|change|delete)_[name] |
|
551 | # module.(access|create|change|delete)_[name] | |
552 | # module.(none|read|write|admin) |
|
552 | # module.(none|read|write|admin) | |
553 | log.info('creating permissions') |
|
553 | log.info('creating permissions') | |
554 | PermissionModel(self.sa).create_permissions() |
|
554 | PermissionModel(self.sa).create_permissions() | |
555 |
|
555 | |||
556 | def populate_default_permissions(self): |
|
556 | def populate_default_permissions(self): | |
557 | """ |
|
557 | """ | |
558 | Populate default permissions. It will create only the default |
|
558 | Populate default permissions. It will create only the default | |
559 | permissions that are missing, and not alter already defined ones |
|
559 | permissions that are missing, and not alter already defined ones | |
560 | """ |
|
560 | """ | |
561 | log.info('creating default user permissions') |
|
561 | log.info('creating default user permissions') | |
562 | PermissionModel(self.sa).create_default_permissions(user=User.DEFAULT_USER) |
|
562 | PermissionModel(self.sa).create_default_permissions(user=User.DEFAULT_USER) | |
563 |
|
563 | |||
564 | @staticmethod |
|
564 | @staticmethod | |
565 | def check_waitress(): |
|
565 | def check_waitress(): | |
566 | """ |
|
566 | """ | |
567 | Function executed at the end of setup |
|
567 | Function executed at the end of setup | |
568 | """ |
|
568 | """ | |
569 | if not __py_version__ >= (2, 6): |
|
569 | if not __py_version__ >= (2, 6): | |
570 | notify('Python2.5 detected, please switch ' |
|
570 | notify('Python2.5 detected, please switch ' | |
571 | 'egg:waitress#main -> egg:Paste#http ' |
|
571 | 'egg:waitress#main -> egg:Paste#http ' | |
572 | 'in your .ini file') |
|
572 | 'in your .ini file') |
@@ -1,197 +1,197 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.indexers |
|
15 | kallithea.lib.indexers | |
16 | ~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Whoosh indexing module for Kallithea |
|
18 | Whoosh indexing module for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Aug 17, 2010 |
|
22 | :created_on: Aug 17, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import sys |
|
29 | import sys | |
30 | import logging |
|
30 | import logging | |
31 |
from os.path import dirname |
|
31 | from os.path import dirname | |
32 |
|
32 | |||
33 | # Add location of top level folder to sys.path |
|
33 | # Add location of top level folder to sys.path | |
34 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) |
|
34 | sys.path.append(dirname(dirname(dirname(os.path.realpath(__file__))))) | |
35 |
|
35 | |||
36 | from whoosh.analysis import RegexTokenizer, LowercaseFilter |
|
36 | from whoosh.analysis import RegexTokenizer, LowercaseFilter | |
37 | from whoosh.fields import TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME |
|
37 | from whoosh.fields import TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME | |
38 | from whoosh.formats import Characters |
|
38 | from whoosh.formats import Characters | |
39 | from whoosh.highlight import highlight as whoosh_highlight, HtmlFormatter, ContextFragmenter |
|
39 | from whoosh.highlight import highlight as whoosh_highlight, HtmlFormatter, ContextFragmenter | |
40 | from kallithea.lib.utils2 import LazyProperty |
|
40 | from kallithea.lib.utils2 import LazyProperty | |
41 |
|
41 | |||
42 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
43 |
|
43 | |||
44 | # CUSTOM ANALYZER wordsplit + lowercase filter |
|
44 | # CUSTOM ANALYZER wordsplit + lowercase filter | |
45 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() |
|
45 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() | |
46 |
|
46 | |||
47 | #INDEX SCHEMA DEFINITION |
|
47 | #INDEX SCHEMA DEFINITION | |
48 | SCHEMA = Schema( |
|
48 | SCHEMA = Schema( | |
49 | fileid=ID(unique=True), |
|
49 | fileid=ID(unique=True), | |
50 | owner=TEXT(), |
|
50 | owner=TEXT(), | |
51 | repository=TEXT(stored=True), |
|
51 | repository=TEXT(stored=True), | |
52 | path=TEXT(stored=True), |
|
52 | path=TEXT(stored=True), | |
53 | content=FieldType(format=Characters(), analyzer=ANALYZER, |
|
53 | content=FieldType(format=Characters(), analyzer=ANALYZER, | |
54 | scorable=True, stored=True), |
|
54 | scorable=True, stored=True), | |
55 | modtime=STORED(), |
|
55 | modtime=STORED(), | |
56 | extension=TEXT(stored=True) |
|
56 | extension=TEXT(stored=True) | |
57 | ) |
|
57 | ) | |
58 |
|
58 | |||
59 | IDX_NAME = 'HG_INDEX' |
|
59 | IDX_NAME = 'HG_INDEX' | |
60 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') |
|
60 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') | |
61 | FRAGMENTER = ContextFragmenter(200) |
|
61 | FRAGMENTER = ContextFragmenter(200) | |
62 |
|
62 | |||
63 | CHGSETS_SCHEMA = Schema( |
|
63 | CHGSETS_SCHEMA = Schema( | |
64 | raw_id=ID(unique=True, stored=True), |
|
64 | raw_id=ID(unique=True, stored=True), | |
65 | date=NUMERIC(stored=True), |
|
65 | date=NUMERIC(stored=True), | |
66 | last=BOOLEAN(), |
|
66 | last=BOOLEAN(), | |
67 | owner=TEXT(), |
|
67 | owner=TEXT(), | |
68 | repository=ID(unique=True, stored=True), |
|
68 | repository=ID(unique=True, stored=True), | |
69 | author=TEXT(stored=True), |
|
69 | author=TEXT(stored=True), | |
70 | message=FieldType(format=Characters(), analyzer=ANALYZER, |
|
70 | message=FieldType(format=Characters(), analyzer=ANALYZER, | |
71 | scorable=True, stored=True), |
|
71 | scorable=True, stored=True), | |
72 | parents=TEXT(), |
|
72 | parents=TEXT(), | |
73 | added=TEXT(), |
|
73 | added=TEXT(), | |
74 | removed=TEXT(), |
|
74 | removed=TEXT(), | |
75 | changed=TEXT(), |
|
75 | changed=TEXT(), | |
76 | ) |
|
76 | ) | |
77 |
|
77 | |||
78 | CHGSET_IDX_NAME = 'CHGSET_INDEX' |
|
78 | CHGSET_IDX_NAME = 'CHGSET_INDEX' | |
79 |
|
79 | |||
80 | # used only to generate queries in journal |
|
80 | # used only to generate queries in journal | |
81 | JOURNAL_SCHEMA = Schema( |
|
81 | JOURNAL_SCHEMA = Schema( | |
82 | username=TEXT(), |
|
82 | username=TEXT(), | |
83 | date=DATETIME(), |
|
83 | date=DATETIME(), | |
84 | action=TEXT(), |
|
84 | action=TEXT(), | |
85 | repository=TEXT(), |
|
85 | repository=TEXT(), | |
86 | ip=TEXT(), |
|
86 | ip=TEXT(), | |
87 | ) |
|
87 | ) | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | class WhooshResultWrapper(object): |
|
90 | class WhooshResultWrapper(object): | |
91 | def __init__(self, search_type, searcher, matcher, highlight_items, |
|
91 | def __init__(self, search_type, searcher, matcher, highlight_items, | |
92 | repo_location): |
|
92 | repo_location): | |
93 | self.search_type = search_type |
|
93 | self.search_type = search_type | |
94 | self.searcher = searcher |
|
94 | self.searcher = searcher | |
95 | self.matcher = matcher |
|
95 | self.matcher = matcher | |
96 | self.highlight_items = highlight_items |
|
96 | self.highlight_items = highlight_items | |
97 | self.fragment_size = 200 |
|
97 | self.fragment_size = 200 | |
98 | self.repo_location = repo_location |
|
98 | self.repo_location = repo_location | |
99 |
|
99 | |||
100 | @LazyProperty |
|
100 | @LazyProperty | |
101 | def doc_ids(self): |
|
101 | def doc_ids(self): | |
102 | docs_id = [] |
|
102 | docs_id = [] | |
103 | while self.matcher.is_active(): |
|
103 | while self.matcher.is_active(): | |
104 | docnum = self.matcher.id() |
|
104 | docnum = self.matcher.id() | |
105 | chunks = [offsets for offsets in self.get_chunks()] |
|
105 | chunks = [offsets for offsets in self.get_chunks()] | |
106 | docs_id.append([docnum, chunks]) |
|
106 | docs_id.append([docnum, chunks]) | |
107 | self.matcher.next() |
|
107 | self.matcher.next() | |
108 | return docs_id |
|
108 | return docs_id | |
109 |
|
109 | |||
110 | def __str__(self): |
|
110 | def __str__(self): | |
111 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) |
|
111 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) | |
112 |
|
112 | |||
113 | def __repr__(self): |
|
113 | def __repr__(self): | |
114 | return self.__str__() |
|
114 | return self.__str__() | |
115 |
|
115 | |||
116 | def __len__(self): |
|
116 | def __len__(self): | |
117 | return len(self.doc_ids) |
|
117 | return len(self.doc_ids) | |
118 |
|
118 | |||
119 | def __iter__(self): |
|
119 | def __iter__(self): | |
120 | """ |
|
120 | """ | |
121 | Allows Iteration over results,and lazy generate content |
|
121 | Allows Iteration over results,and lazy generate content | |
122 |
|
122 | |||
123 | *Requires* implementation of ``__getitem__`` method. |
|
123 | *Requires* implementation of ``__getitem__`` method. | |
124 | """ |
|
124 | """ | |
125 | for docid in self.doc_ids: |
|
125 | for docid in self.doc_ids: | |
126 | yield self.get_full_content(docid) |
|
126 | yield self.get_full_content(docid) | |
127 |
|
127 | |||
128 | def __getitem__(self, key): |
|
128 | def __getitem__(self, key): | |
129 | """ |
|
129 | """ | |
130 | Slicing of resultWrapper |
|
130 | Slicing of resultWrapper | |
131 | """ |
|
131 | """ | |
132 | i, j = key.start, key.stop |
|
132 | i, j = key.start, key.stop | |
133 |
|
133 | |||
134 | slices = [] |
|
134 | slices = [] | |
135 | for docid in self.doc_ids[i:j]: |
|
135 | for docid in self.doc_ids[i:j]: | |
136 | slices.append(self.get_full_content(docid)) |
|
136 | slices.append(self.get_full_content(docid)) | |
137 | return slices |
|
137 | return slices | |
138 |
|
138 | |||
139 | def get_full_content(self, docid): |
|
139 | def get_full_content(self, docid): | |
140 | res = self.searcher.stored_fields(docid[0]) |
|
140 | res = self.searcher.stored_fields(docid[0]) | |
141 | log.debug('result: %s', res) |
|
141 | log.debug('result: %s', res) | |
142 | if self.search_type == 'content': |
|
142 | if self.search_type == 'content': | |
143 | full_repo_path = os.path.join(self.repo_location, res['repository']) |
|
143 | full_repo_path = os.path.join(self.repo_location, res['repository']) | |
144 | f_path = res['path'].split(full_repo_path)[-1] |
|
144 | f_path = res['path'].split(full_repo_path)[-1] | |
145 | f_path = f_path.lstrip(os.sep) |
|
145 | f_path = f_path.lstrip(os.sep) | |
146 | content_short = self.get_short_content(res, docid[1]) |
|
146 | content_short = self.get_short_content(res, docid[1]) | |
147 | res.update({'content_short': content_short, |
|
147 | res.update({'content_short': content_short, | |
148 | 'content_short_hl': self.highlight(content_short), |
|
148 | 'content_short_hl': self.highlight(content_short), | |
149 | 'f_path': f_path |
|
149 | 'f_path': f_path | |
150 | }) |
|
150 | }) | |
151 | elif self.search_type == 'path': |
|
151 | elif self.search_type == 'path': | |
152 | full_repo_path = os.path.join(self.repo_location, res['repository']) |
|
152 | full_repo_path = os.path.join(self.repo_location, res['repository']) | |
153 | f_path = res['path'].split(full_repo_path)[-1] |
|
153 | f_path = res['path'].split(full_repo_path)[-1] | |
154 | f_path = f_path.lstrip(os.sep) |
|
154 | f_path = f_path.lstrip(os.sep) | |
155 | res.update({'f_path': f_path}) |
|
155 | res.update({'f_path': f_path}) | |
156 | elif self.search_type == 'message': |
|
156 | elif self.search_type == 'message': | |
157 | res.update({'message_hl': self.highlight(res['message'])}) |
|
157 | res.update({'message_hl': self.highlight(res['message'])}) | |
158 |
|
158 | |||
159 | log.debug('result: %s', res) |
|
159 | log.debug('result: %s', res) | |
160 |
|
160 | |||
161 | return res |
|
161 | return res | |
162 |
|
162 | |||
163 | def get_short_content(self, res, chunks): |
|
163 | def get_short_content(self, res, chunks): | |
164 |
|
164 | |||
165 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) |
|
165 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) | |
166 |
|
166 | |||
167 | def get_chunks(self): |
|
167 | def get_chunks(self): | |
168 | """ |
|
168 | """ | |
169 | Smart function that implements chunking the content |
|
169 | Smart function that implements chunking the content | |
170 | but not overlap chunks so it doesn't highlight the same |
|
170 | but not overlap chunks so it doesn't highlight the same | |
171 | close occurrences twice. |
|
171 | close occurrences twice. | |
172 | """ |
|
172 | """ | |
173 | memory = [(0, 0)] |
|
173 | memory = [(0, 0)] | |
174 | if self.matcher.supports('positions'): |
|
174 | if self.matcher.supports('positions'): | |
175 | for span in self.matcher.spans(): |
|
175 | for span in self.matcher.spans(): | |
176 | start = span.startchar or 0 |
|
176 | start = span.startchar or 0 | |
177 | end = span.endchar or 0 |
|
177 | end = span.endchar or 0 | |
178 | start_offseted = max(0, start - self.fragment_size) |
|
178 | start_offseted = max(0, start - self.fragment_size) | |
179 | end_offseted = end + self.fragment_size |
|
179 | end_offseted = end + self.fragment_size | |
180 |
|
180 | |||
181 | if start_offseted < memory[-1][1]: |
|
181 | if start_offseted < memory[-1][1]: | |
182 | start_offseted = memory[-1][1] |
|
182 | start_offseted = memory[-1][1] | |
183 | memory.append((start_offseted, end_offseted,)) |
|
183 | memory.append((start_offseted, end_offseted,)) | |
184 | yield (start_offseted, end_offseted,) |
|
184 | yield (start_offseted, end_offseted,) | |
185 |
|
185 | |||
186 | def highlight(self, content, top=5): |
|
186 | def highlight(self, content, top=5): | |
187 | if self.search_type not in ['content', 'message']: |
|
187 | if self.search_type not in ['content', 'message']: | |
188 | return '' |
|
188 | return '' | |
189 | hl = whoosh_highlight( |
|
189 | hl = whoosh_highlight( | |
190 | text=content, |
|
190 | text=content, | |
191 | terms=self.highlight_items, |
|
191 | terms=self.highlight_items, | |
192 | analyzer=ANALYZER, |
|
192 | analyzer=ANALYZER, | |
193 | fragmenter=FRAGMENTER, |
|
193 | fragmenter=FRAGMENTER, | |
194 | formatter=FORMATTER, |
|
194 | formatter=FORMATTER, | |
195 | top=top |
|
195 | top=top | |
196 | ) |
|
196 | ) | |
197 | return hl |
|
197 | return hl |
@@ -1,459 +1,459 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.indexers.daemon |
|
15 | kallithea.lib.indexers.daemon | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | A daemon will read from task table and run tasks |
|
18 | A daemon will read from task table and run tasks | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jan 26, 2010 |
|
22 | :created_on: Jan 26, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import logging |
|
31 | import logging | |
32 | import traceback |
|
32 | import traceback | |
33 |
|
33 | |||
34 | from shutil import rmtree |
|
34 | from shutil import rmtree | |
35 | from time import mktime |
|
35 | from time import mktime | |
36 |
|
36 | |||
37 |
from os.path import dirname |
|
37 | from os.path import dirname | |
38 |
|
38 | |||
39 | # Add location of top level folder to sys.path |
|
39 | # Add location of top level folder to sys.path | |
40 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) |
|
40 | project_path = dirname(dirname(dirname(dirname(os.path.realpath(__file__))))) | |
41 | sys.path.append(project_path) |
|
41 | sys.path.append(project_path) | |
42 |
|
42 | |||
43 | from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES |
|
43 | from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES | |
44 | from kallithea.model.scm import ScmModel |
|
44 | from kallithea.model.scm import ScmModel | |
45 | from kallithea.model.db import Repository |
|
45 | from kallithea.model.db import Repository | |
46 | from kallithea.lib.utils2 import safe_unicode, safe_str |
|
46 | from kallithea.lib.utils2 import safe_unicode, safe_str | |
47 | from kallithea.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \ |
|
47 | from kallithea.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \ | |
48 | CHGSET_IDX_NAME |
|
48 | CHGSET_IDX_NAME | |
49 |
|
49 | |||
50 | from kallithea.lib.vcs.exceptions import ChangesetError, RepositoryError, \ |
|
50 | from kallithea.lib.vcs.exceptions import ChangesetError, RepositoryError, \ | |
51 | NodeDoesNotExistError |
|
51 | NodeDoesNotExistError | |
52 |
|
52 | |||
53 | from whoosh.index import create_in, open_dir, exists_in |
|
53 | from whoosh.index import create_in, open_dir, exists_in | |
54 | from whoosh.query import * |
|
54 | from whoosh.query import * | |
55 | from whoosh.qparser import QueryParser |
|
55 | from whoosh.qparser import QueryParser | |
56 |
|
56 | |||
57 | log = logging.getLogger('whoosh_indexer') |
|
57 | log = logging.getLogger('whoosh_indexer') | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | class WhooshIndexingDaemon(object): |
|
60 | class WhooshIndexingDaemon(object): | |
61 | """ |
|
61 | """ | |
62 | Daemon for atomic indexing jobs |
|
62 | Daemon for atomic indexing jobs | |
63 | """ |
|
63 | """ | |
64 |
|
64 | |||
65 | def __init__(self, indexname=IDX_NAME, index_location=None, |
|
65 | def __init__(self, indexname=IDX_NAME, index_location=None, | |
66 | repo_location=None, sa=None, repo_list=None, |
|
66 | repo_location=None, sa=None, repo_list=None, | |
67 | repo_update_list=None): |
|
67 | repo_update_list=None): | |
68 | self.indexname = indexname |
|
68 | self.indexname = indexname | |
69 |
|
69 | |||
70 | self.index_location = index_location |
|
70 | self.index_location = index_location | |
71 | if not index_location: |
|
71 | if not index_location: | |
72 | raise Exception('You have to provide index location') |
|
72 | raise Exception('You have to provide index location') | |
73 |
|
73 | |||
74 | self.repo_location = repo_location |
|
74 | self.repo_location = repo_location | |
75 | if not repo_location: |
|
75 | if not repo_location: | |
76 | raise Exception('You have to provide repositories location') |
|
76 | raise Exception('You have to provide repositories location') | |
77 |
|
77 | |||
78 | self.repo_paths = ScmModel(sa).repo_scan(self.repo_location) |
|
78 | self.repo_paths = ScmModel(sa).repo_scan(self.repo_location) | |
79 |
|
79 | |||
80 | #filter repo list |
|
80 | #filter repo list | |
81 | if repo_list: |
|
81 | if repo_list: | |
82 | #Fix non-ascii repo names to unicode |
|
82 | #Fix non-ascii repo names to unicode | |
83 | repo_list = map(safe_unicode, repo_list) |
|
83 | repo_list = map(safe_unicode, repo_list) | |
84 | self.filtered_repo_paths = {} |
|
84 | self.filtered_repo_paths = {} | |
85 | for repo_name, repo in self.repo_paths.items(): |
|
85 | for repo_name, repo in self.repo_paths.items(): | |
86 | if repo_name in repo_list: |
|
86 | if repo_name in repo_list: | |
87 | self.filtered_repo_paths[repo_name] = repo |
|
87 | self.filtered_repo_paths[repo_name] = repo | |
88 |
|
88 | |||
89 | self.repo_paths = self.filtered_repo_paths |
|
89 | self.repo_paths = self.filtered_repo_paths | |
90 |
|
90 | |||
91 | #filter update repo list |
|
91 | #filter update repo list | |
92 | self.filtered_repo_update_paths = {} |
|
92 | self.filtered_repo_update_paths = {} | |
93 | if repo_update_list: |
|
93 | if repo_update_list: | |
94 | self.filtered_repo_update_paths = {} |
|
94 | self.filtered_repo_update_paths = {} | |
95 | for repo_name, repo in self.repo_paths.items(): |
|
95 | for repo_name, repo in self.repo_paths.items(): | |
96 | if repo_name in repo_update_list: |
|
96 | if repo_name in repo_update_list: | |
97 | self.filtered_repo_update_paths[repo_name] = repo |
|
97 | self.filtered_repo_update_paths[repo_name] = repo | |
98 | self.repo_paths = self.filtered_repo_update_paths |
|
98 | self.repo_paths = self.filtered_repo_update_paths | |
99 |
|
99 | |||
100 | self.initial = True |
|
100 | self.initial = True | |
101 | if not os.path.isdir(self.index_location): |
|
101 | if not os.path.isdir(self.index_location): | |
102 | os.makedirs(self.index_location) |
|
102 | os.makedirs(self.index_location) | |
103 | log.info('Cannot run incremental index since it does not ' |
|
103 | log.info('Cannot run incremental index since it does not ' | |
104 | 'yet exist running full build') |
|
104 | 'yet exist running full build') | |
105 | elif not exists_in(self.index_location, IDX_NAME): |
|
105 | elif not exists_in(self.index_location, IDX_NAME): | |
106 | log.info('Running full index build as the file content ' |
|
106 | log.info('Running full index build as the file content ' | |
107 | 'index does not exist') |
|
107 | 'index does not exist') | |
108 | elif not exists_in(self.index_location, CHGSET_IDX_NAME): |
|
108 | elif not exists_in(self.index_location, CHGSET_IDX_NAME): | |
109 | log.info('Running full index build as the changeset ' |
|
109 | log.info('Running full index build as the changeset ' | |
110 | 'index does not exist') |
|
110 | 'index does not exist') | |
111 | else: |
|
111 | else: | |
112 | self.initial = False |
|
112 | self.initial = False | |
113 |
|
113 | |||
114 | def _get_index_revision(self, repo): |
|
114 | def _get_index_revision(self, repo): | |
115 | db_repo = Repository.get_by_repo_name(repo.name_unicode) |
|
115 | db_repo = Repository.get_by_repo_name(repo.name_unicode) | |
116 | landing_rev = 'tip' |
|
116 | landing_rev = 'tip' | |
117 | if db_repo: |
|
117 | if db_repo: | |
118 | _rev_type, _rev = db_repo.landing_rev |
|
118 | _rev_type, _rev = db_repo.landing_rev | |
119 | landing_rev = _rev |
|
119 | landing_rev = _rev | |
120 | return landing_rev |
|
120 | return landing_rev | |
121 |
|
121 | |||
122 | def _get_index_changeset(self, repo, index_rev=None): |
|
122 | def _get_index_changeset(self, repo, index_rev=None): | |
123 | if not index_rev: |
|
123 | if not index_rev: | |
124 | index_rev = self._get_index_revision(repo) |
|
124 | index_rev = self._get_index_revision(repo) | |
125 | cs = repo.get_changeset(index_rev) |
|
125 | cs = repo.get_changeset(index_rev) | |
126 | return cs |
|
126 | return cs | |
127 |
|
127 | |||
128 | def get_paths(self, repo): |
|
128 | def get_paths(self, repo): | |
129 | """ |
|
129 | """ | |
130 | recursive walk in root dir and return a set of all path in that dir |
|
130 | recursive walk in root dir and return a set of all path in that dir | |
131 | based on repository walk function |
|
131 | based on repository walk function | |
132 | """ |
|
132 | """ | |
133 | index_paths_ = set() |
|
133 | index_paths_ = set() | |
134 | try: |
|
134 | try: | |
135 | cs = self._get_index_changeset(repo) |
|
135 | cs = self._get_index_changeset(repo) | |
136 | for _topnode, _dirs, files in cs.walk('/'): |
|
136 | for _topnode, _dirs, files in cs.walk('/'): | |
137 | for f in files: |
|
137 | for f in files: | |
138 | index_paths_.add(os.path.join(safe_str(repo.path), safe_str(f.path))) |
|
138 | index_paths_.add(os.path.join(safe_str(repo.path), safe_str(f.path))) | |
139 |
|
139 | |||
140 | except RepositoryError: |
|
140 | except RepositoryError: | |
141 | log.debug(traceback.format_exc()) |
|
141 | log.debug(traceback.format_exc()) | |
142 | pass |
|
142 | pass | |
143 | return index_paths_ |
|
143 | return index_paths_ | |
144 |
|
144 | |||
145 | def get_node(self, repo, path, index_rev=None): |
|
145 | def get_node(self, repo, path, index_rev=None): | |
146 | """ |
|
146 | """ | |
147 | gets a filenode based on given full path. It operates on string for |
|
147 | gets a filenode based on given full path. It operates on string for | |
148 | hg git compatibility. |
|
148 | hg git compatibility. | |
149 |
|
149 | |||
150 | :param repo: scm repo instance |
|
150 | :param repo: scm repo instance | |
151 | :param path: full path including root location |
|
151 | :param path: full path including root location | |
152 | :return: FileNode |
|
152 | :return: FileNode | |
153 | """ |
|
153 | """ | |
154 | # FIXME: paths should be normalized ... or even better: don't include repo.path |
|
154 | # FIXME: paths should be normalized ... or even better: don't include repo.path | |
155 | path = safe_str(path) |
|
155 | path = safe_str(path) | |
156 | repo_path = safe_str(repo.path) |
|
156 | repo_path = safe_str(repo.path) | |
157 | assert path.startswith(repo_path) |
|
157 | assert path.startswith(repo_path) | |
158 | assert path[len(repo_path)] in (os.path.sep, os.path.altsep) |
|
158 | assert path[len(repo_path)] in (os.path.sep, os.path.altsep) | |
159 | node_path = path[len(repo_path) + 1:] |
|
159 | node_path = path[len(repo_path) + 1:] | |
160 | cs = self._get_index_changeset(repo, index_rev=index_rev) |
|
160 | cs = self._get_index_changeset(repo, index_rev=index_rev) | |
161 | node = cs.get_node(node_path) |
|
161 | node = cs.get_node(node_path) | |
162 | return node |
|
162 | return node | |
163 |
|
163 | |||
164 | def is_indexable_node(self, node): |
|
164 | def is_indexable_node(self, node): | |
165 | """ |
|
165 | """ | |
166 | Just index the content of chosen files, skipping binary files |
|
166 | Just index the content of chosen files, skipping binary files | |
167 | """ |
|
167 | """ | |
168 | return (node.extension in INDEX_EXTENSIONS or node.name in INDEX_FILENAMES) and \ |
|
168 | return (node.extension in INDEX_EXTENSIONS or node.name in INDEX_FILENAMES) and \ | |
169 | not node.is_binary |
|
169 | not node.is_binary | |
170 |
|
170 | |||
171 | def get_node_mtime(self, node): |
|
171 | def get_node_mtime(self, node): | |
172 | return mktime(node.last_changeset.date.timetuple()) |
|
172 | return mktime(node.last_changeset.date.timetuple()) | |
173 |
|
173 | |||
174 | def add_doc(self, writer, path, repo, repo_name, index_rev=None): |
|
174 | def add_doc(self, writer, path, repo, repo_name, index_rev=None): | |
175 | """ |
|
175 | """ | |
176 | Adding doc to writer this function itself fetches data from |
|
176 | Adding doc to writer this function itself fetches data from | |
177 | the instance of vcs backend |
|
177 | the instance of vcs backend | |
178 | """ |
|
178 | """ | |
179 | try: |
|
179 | try: | |
180 | node = self.get_node(repo, path, index_rev) |
|
180 | node = self.get_node(repo, path, index_rev) | |
181 | except (ChangesetError, NodeDoesNotExistError): |
|
181 | except (ChangesetError, NodeDoesNotExistError): | |
182 | log.debug("couldn't add doc - %s did not have %r at %s", repo, path, index_rev) |
|
182 | log.debug("couldn't add doc - %s did not have %r at %s", repo, path, index_rev) | |
183 | return 0, 0 |
|
183 | return 0, 0 | |
184 |
|
184 | |||
185 | indexed = indexed_w_content = 0 |
|
185 | indexed = indexed_w_content = 0 | |
186 | if self.is_indexable_node(node): |
|
186 | if self.is_indexable_node(node): | |
187 | u_content = node.content |
|
187 | u_content = node.content | |
188 | if not isinstance(u_content, unicode): |
|
188 | if not isinstance(u_content, unicode): | |
189 | log.warning(' >> %s Could not get this content as unicode ' |
|
189 | log.warning(' >> %s Could not get this content as unicode ' | |
190 | 'replacing with empty content' % path) |
|
190 | 'replacing with empty content' % path) | |
191 | u_content = u'' |
|
191 | u_content = u'' | |
192 | else: |
|
192 | else: | |
193 | log.debug(' >> %s [WITH CONTENT]', path) |
|
193 | log.debug(' >> %s [WITH CONTENT]', path) | |
194 | indexed_w_content += 1 |
|
194 | indexed_w_content += 1 | |
195 |
|
195 | |||
196 | else: |
|
196 | else: | |
197 | log.debug(' >> %s', path) |
|
197 | log.debug(' >> %s', path) | |
198 | # just index file name without it's content |
|
198 | # just index file name without it's content | |
199 | u_content = u'' |
|
199 | u_content = u'' | |
200 | indexed += 1 |
|
200 | indexed += 1 | |
201 |
|
201 | |||
202 | p = safe_unicode(path) |
|
202 | p = safe_unicode(path) | |
203 | writer.add_document( |
|
203 | writer.add_document( | |
204 | fileid=p, |
|
204 | fileid=p, | |
205 | owner=unicode(repo.contact), |
|
205 | owner=unicode(repo.contact), | |
206 | repository=safe_unicode(repo_name), |
|
206 | repository=safe_unicode(repo_name), | |
207 | path=p, |
|
207 | path=p, | |
208 | content=u_content, |
|
208 | content=u_content, | |
209 | modtime=self.get_node_mtime(node), |
|
209 | modtime=self.get_node_mtime(node), | |
210 | extension=node.extension |
|
210 | extension=node.extension | |
211 | ) |
|
211 | ) | |
212 | return indexed, indexed_w_content |
|
212 | return indexed, indexed_w_content | |
213 |
|
213 | |||
214 | def index_changesets(self, writer, repo_name, repo, start_rev=None): |
|
214 | def index_changesets(self, writer, repo_name, repo, start_rev=None): | |
215 | """ |
|
215 | """ | |
216 | Add all changeset in the vcs repo starting at start_rev |
|
216 | Add all changeset in the vcs repo starting at start_rev | |
217 | to the index writer |
|
217 | to the index writer | |
218 |
|
218 | |||
219 | :param writer: the whoosh index writer to add to |
|
219 | :param writer: the whoosh index writer to add to | |
220 | :param repo_name: name of the repository from whence the |
|
220 | :param repo_name: name of the repository from whence the | |
221 | changeset originates including the repository group |
|
221 | changeset originates including the repository group | |
222 | :param repo: the vcs repository instance to index changesets for, |
|
222 | :param repo: the vcs repository instance to index changesets for, | |
223 | the presumption is the repo has changesets to index |
|
223 | the presumption is the repo has changesets to index | |
224 | :param start_rev=None: the full sha id to start indexing from |
|
224 | :param start_rev=None: the full sha id to start indexing from | |
225 | if start_rev is None then index from the first changeset in |
|
225 | if start_rev is None then index from the first changeset in | |
226 | the repo |
|
226 | the repo | |
227 | """ |
|
227 | """ | |
228 |
|
228 | |||
229 | if start_rev is None: |
|
229 | if start_rev is None: | |
230 | start_rev = repo[0].raw_id |
|
230 | start_rev = repo[0].raw_id | |
231 |
|
231 | |||
232 | log.debug('indexing changesets in %s starting at rev: %s', |
|
232 | log.debug('indexing changesets in %s starting at rev: %s', | |
233 | repo_name, start_rev) |
|
233 | repo_name, start_rev) | |
234 |
|
234 | |||
235 | indexed = 0 |
|
235 | indexed = 0 | |
236 | cs_iter = repo.get_changesets(start=start_rev) |
|
236 | cs_iter = repo.get_changesets(start=start_rev) | |
237 | total = len(cs_iter) |
|
237 | total = len(cs_iter) | |
238 | for cs in cs_iter: |
|
238 | for cs in cs_iter: | |
239 | log.debug(' >> %s/%s', cs, total) |
|
239 | log.debug(' >> %s/%s', cs, total) | |
240 | writer.add_document( |
|
240 | writer.add_document( | |
241 | raw_id=unicode(cs.raw_id), |
|
241 | raw_id=unicode(cs.raw_id), | |
242 | owner=unicode(repo.contact), |
|
242 | owner=unicode(repo.contact), | |
243 | date=cs._timestamp, |
|
243 | date=cs._timestamp, | |
244 | repository=safe_unicode(repo_name), |
|
244 | repository=safe_unicode(repo_name), | |
245 | author=cs.author, |
|
245 | author=cs.author, | |
246 | message=cs.message, |
|
246 | message=cs.message, | |
247 | last=cs.last, |
|
247 | last=cs.last, | |
248 | added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(), |
|
248 | added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(), | |
249 | removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(), |
|
249 | removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(), | |
250 | changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(), |
|
250 | changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(), | |
251 | parents=u' '.join([cs.raw_id for cs in cs.parents]), |
|
251 | parents=u' '.join([cs.raw_id for cs in cs.parents]), | |
252 | ) |
|
252 | ) | |
253 | indexed += 1 |
|
253 | indexed += 1 | |
254 |
|
254 | |||
255 | log.debug('indexed %d changesets for repo %s', indexed, repo_name) |
|
255 | log.debug('indexed %d changesets for repo %s', indexed, repo_name) | |
256 | return indexed |
|
256 | return indexed | |
257 |
|
257 | |||
258 | def index_files(self, file_idx_writer, repo_name, repo): |
|
258 | def index_files(self, file_idx_writer, repo_name, repo): | |
259 | """ |
|
259 | """ | |
260 | Index files for given repo_name |
|
260 | Index files for given repo_name | |
261 |
|
261 | |||
262 | :param file_idx_writer: the whoosh index writer to add to |
|
262 | :param file_idx_writer: the whoosh index writer to add to | |
263 | :param repo_name: name of the repository we're indexing |
|
263 | :param repo_name: name of the repository we're indexing | |
264 | :param repo: instance of vcs repo |
|
264 | :param repo: instance of vcs repo | |
265 | """ |
|
265 | """ | |
266 | i_cnt = iwc_cnt = 0 |
|
266 | i_cnt = iwc_cnt = 0 | |
267 | log.debug('building index for %s @revision:%s', repo.path, |
|
267 | log.debug('building index for %s @revision:%s', repo.path, | |
268 | self._get_index_revision(repo)) |
|
268 | self._get_index_revision(repo)) | |
269 | index_rev = self._get_index_revision(repo) |
|
269 | index_rev = self._get_index_revision(repo) | |
270 | for idx_path in self.get_paths(repo): |
|
270 | for idx_path in self.get_paths(repo): | |
271 | i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev) |
|
271 | i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev) | |
272 | i_cnt += i |
|
272 | i_cnt += i | |
273 | iwc_cnt += iwc |
|
273 | iwc_cnt += iwc | |
274 |
|
274 | |||
275 | log.debug('added %s files %s with content for repo %s', |
|
275 | log.debug('added %s files %s with content for repo %s', | |
276 | i_cnt + iwc_cnt, iwc_cnt, repo.path) |
|
276 | i_cnt + iwc_cnt, iwc_cnt, repo.path) | |
277 | return i_cnt, iwc_cnt |
|
277 | return i_cnt, iwc_cnt | |
278 |
|
278 | |||
279 | def update_changeset_index(self): |
|
279 | def update_changeset_index(self): | |
280 | idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME) |
|
280 | idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME) | |
281 |
|
281 | |||
282 | with idx.searcher() as searcher: |
|
282 | with idx.searcher() as searcher: | |
283 | writer = idx.writer() |
|
283 | writer = idx.writer() | |
284 | writer_is_dirty = False |
|
284 | writer_is_dirty = False | |
285 | try: |
|
285 | try: | |
286 | indexed_total = 0 |
|
286 | indexed_total = 0 | |
287 | repo_name = None |
|
287 | repo_name = None | |
288 | for repo_name, repo in self.repo_paths.items(): |
|
288 | for repo_name, repo in self.repo_paths.items(): | |
289 | # skip indexing if there aren't any revs in the repo |
|
289 | # skip indexing if there aren't any revs in the repo | |
290 | num_of_revs = len(repo) |
|
290 | num_of_revs = len(repo) | |
291 | if num_of_revs < 1: |
|
291 | if num_of_revs < 1: | |
292 | continue |
|
292 | continue | |
293 |
|
293 | |||
294 | qp = QueryParser('repository', schema=CHGSETS_SCHEMA) |
|
294 | qp = QueryParser('repository', schema=CHGSETS_SCHEMA) | |
295 | q = qp.parse(u"last:t AND %s" % repo_name) |
|
295 | q = qp.parse(u"last:t AND %s" % repo_name) | |
296 |
|
296 | |||
297 | results = searcher.search(q) |
|
297 | results = searcher.search(q) | |
298 |
|
298 | |||
299 | # default to scanning the entire repo |
|
299 | # default to scanning the entire repo | |
300 | last_rev = 0 |
|
300 | last_rev = 0 | |
301 | start_id = None |
|
301 | start_id = None | |
302 |
|
302 | |||
303 | if len(results) > 0: |
|
303 | if len(results) > 0: | |
304 | # assuming that there is only one result, if not this |
|
304 | # assuming that there is only one result, if not this | |
305 | # may require a full re-index. |
|
305 | # may require a full re-index. | |
306 | start_id = results[0]['raw_id'] |
|
306 | start_id = results[0]['raw_id'] | |
307 | last_rev = repo.get_changeset(revision=start_id).revision |
|
307 | last_rev = repo.get_changeset(revision=start_id).revision | |
308 |
|
308 | |||
309 | # there are new changesets to index or a new repo to index |
|
309 | # there are new changesets to index or a new repo to index | |
310 | if last_rev == 0 or num_of_revs > last_rev + 1: |
|
310 | if last_rev == 0 or num_of_revs > last_rev + 1: | |
311 | # delete the docs in the index for the previous |
|
311 | # delete the docs in the index for the previous | |
312 | # last changeset(s) |
|
312 | # last changeset(s) | |
313 | for hit in results: |
|
313 | for hit in results: | |
314 | q = qp.parse(u"last:t AND %s AND raw_id:%s" % |
|
314 | q = qp.parse(u"last:t AND %s AND raw_id:%s" % | |
315 | (repo_name, hit['raw_id'])) |
|
315 | (repo_name, hit['raw_id'])) | |
316 | writer.delete_by_query(q) |
|
316 | writer.delete_by_query(q) | |
317 |
|
317 | |||
318 | # index from the previous last changeset + all new ones |
|
318 | # index from the previous last changeset + all new ones | |
319 | indexed_total += self.index_changesets(writer, |
|
319 | indexed_total += self.index_changesets(writer, | |
320 | repo_name, repo, start_id) |
|
320 | repo_name, repo, start_id) | |
321 | writer_is_dirty = True |
|
321 | writer_is_dirty = True | |
322 | log.debug('indexed %s changesets for repo %s', |
|
322 | log.debug('indexed %s changesets for repo %s', | |
323 | indexed_total, repo_name |
|
323 | indexed_total, repo_name | |
324 | ) |
|
324 | ) | |
325 | finally: |
|
325 | finally: | |
326 | if writer_is_dirty: |
|
326 | if writer_is_dirty: | |
327 | log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<') |
|
327 | log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<') | |
328 | writer.commit(merge=True) |
|
328 | writer.commit(merge=True) | |
329 | log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<') |
|
329 | log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<') | |
330 | else: |
|
330 | else: | |
331 | log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<') |
|
331 | log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<') | |
332 |
|
332 | |||
333 | def update_file_index(self): |
|
333 | def update_file_index(self): | |
334 | log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s ' |
|
334 | log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s ' | |
335 | 'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys())) |
|
335 | 'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys())) | |
336 |
|
336 | |||
337 | idx = open_dir(self.index_location, indexname=self.indexname) |
|
337 | idx = open_dir(self.index_location, indexname=self.indexname) | |
338 | # The set of all paths in the index |
|
338 | # The set of all paths in the index | |
339 | indexed_paths = set() |
|
339 | indexed_paths = set() | |
340 | # The set of all paths we need to re-index |
|
340 | # The set of all paths we need to re-index | |
341 | to_index = set() |
|
341 | to_index = set() | |
342 |
|
342 | |||
343 | writer = idx.writer() |
|
343 | writer = idx.writer() | |
344 | writer_is_dirty = False |
|
344 | writer_is_dirty = False | |
345 | try: |
|
345 | try: | |
346 | with idx.reader() as reader: |
|
346 | with idx.reader() as reader: | |
347 |
|
347 | |||
348 | # Loop over the stored fields in the index |
|
348 | # Loop over the stored fields in the index | |
349 | for fields in reader.all_stored_fields(): |
|
349 | for fields in reader.all_stored_fields(): | |
350 | indexed_path = fields['path'] |
|
350 | indexed_path = fields['path'] | |
351 | indexed_repo_path = fields['repository'] |
|
351 | indexed_repo_path = fields['repository'] | |
352 | indexed_paths.add(indexed_path) |
|
352 | indexed_paths.add(indexed_path) | |
353 |
|
353 | |||
354 | if not indexed_repo_path in self.filtered_repo_update_paths: |
|
354 | if not indexed_repo_path in self.filtered_repo_update_paths: | |
355 | continue |
|
355 | continue | |
356 |
|
356 | |||
357 | repo = self.repo_paths[indexed_repo_path] |
|
357 | repo = self.repo_paths[indexed_repo_path] | |
358 |
|
358 | |||
359 | try: |
|
359 | try: | |
360 | node = self.get_node(repo, indexed_path) |
|
360 | node = self.get_node(repo, indexed_path) | |
361 | # Check if this file was changed since it was indexed |
|
361 | # Check if this file was changed since it was indexed | |
362 | indexed_time = fields['modtime'] |
|
362 | indexed_time = fields['modtime'] | |
363 | mtime = self.get_node_mtime(node) |
|
363 | mtime = self.get_node_mtime(node) | |
364 | if mtime > indexed_time: |
|
364 | if mtime > indexed_time: | |
365 | # The file has changed, delete it and add it to |
|
365 | # The file has changed, delete it and add it to | |
366 | # the list of files to reindex |
|
366 | # the list of files to reindex | |
367 | log.debug( |
|
367 | log.debug( | |
368 | 'adding to reindex list %s mtime: %s vs %s', |
|
368 | 'adding to reindex list %s mtime: %s vs %s', | |
369 | indexed_path, mtime, indexed_time |
|
369 | indexed_path, mtime, indexed_time | |
370 | ) |
|
370 | ) | |
371 | writer.delete_by_term('fileid', indexed_path) |
|
371 | writer.delete_by_term('fileid', indexed_path) | |
372 | writer_is_dirty = True |
|
372 | writer_is_dirty = True | |
373 |
|
373 | |||
374 | to_index.add(indexed_path) |
|
374 | to_index.add(indexed_path) | |
375 | except (ChangesetError, NodeDoesNotExistError): |
|
375 | except (ChangesetError, NodeDoesNotExistError): | |
376 | # This file was deleted since it was indexed |
|
376 | # This file was deleted since it was indexed | |
377 | log.debug('removing from index %s', indexed_path) |
|
377 | log.debug('removing from index %s', indexed_path) | |
378 | writer.delete_by_term('path', indexed_path) |
|
378 | writer.delete_by_term('path', indexed_path) | |
379 | writer_is_dirty = True |
|
379 | writer_is_dirty = True | |
380 |
|
380 | |||
381 | # Loop over the files in the filesystem |
|
381 | # Loop over the files in the filesystem | |
382 | # Assume we have a function that gathers the filenames of the |
|
382 | # Assume we have a function that gathers the filenames of the | |
383 | # documents to be indexed |
|
383 | # documents to be indexed | |
384 | ri_cnt_total = 0 # indexed |
|
384 | ri_cnt_total = 0 # indexed | |
385 | riwc_cnt_total = 0 # indexed with content |
|
385 | riwc_cnt_total = 0 # indexed with content | |
386 | for repo_name, repo in self.repo_paths.items(): |
|
386 | for repo_name, repo in self.repo_paths.items(): | |
387 | # skip indexing if there aren't any revisions |
|
387 | # skip indexing if there aren't any revisions | |
388 | if len(repo) < 1: |
|
388 | if len(repo) < 1: | |
389 | continue |
|
389 | continue | |
390 | ri_cnt = 0 # indexed |
|
390 | ri_cnt = 0 # indexed | |
391 | riwc_cnt = 0 # indexed with content |
|
391 | riwc_cnt = 0 # indexed with content | |
392 | for path in self.get_paths(repo): |
|
392 | for path in self.get_paths(repo): | |
393 | path = safe_unicode(path) |
|
393 | path = safe_unicode(path) | |
394 | if path in to_index or path not in indexed_paths: |
|
394 | if path in to_index or path not in indexed_paths: | |
395 |
|
395 | |||
396 | # This is either a file that's changed, or a new file |
|
396 | # This is either a file that's changed, or a new file | |
397 | # that wasn't indexed before. So index it! |
|
397 | # that wasn't indexed before. So index it! | |
398 | i, iwc = self.add_doc(writer, path, repo, repo_name) |
|
398 | i, iwc = self.add_doc(writer, path, repo, repo_name) | |
399 | writer_is_dirty = True |
|
399 | writer_is_dirty = True | |
400 | log.debug('re indexing %s', path) |
|
400 | log.debug('re indexing %s', path) | |
401 | ri_cnt += i |
|
401 | ri_cnt += i | |
402 | ri_cnt_total += 1 |
|
402 | ri_cnt_total += 1 | |
403 | riwc_cnt += iwc |
|
403 | riwc_cnt += iwc | |
404 | riwc_cnt_total += iwc |
|
404 | riwc_cnt_total += iwc | |
405 | log.debug('added %s files %s with content for repo %s', |
|
405 | log.debug('added %s files %s with content for repo %s', | |
406 | ri_cnt + riwc_cnt, riwc_cnt, repo.path |
|
406 | ri_cnt + riwc_cnt, riwc_cnt, repo.path | |
407 | ) |
|
407 | ) | |
408 | log.debug('indexed %s files in total and %s with content', |
|
408 | log.debug('indexed %s files in total and %s with content', | |
409 | ri_cnt_total, riwc_cnt_total |
|
409 | ri_cnt_total, riwc_cnt_total | |
410 | ) |
|
410 | ) | |
411 | finally: |
|
411 | finally: | |
412 | if writer_is_dirty: |
|
412 | if writer_is_dirty: | |
413 | log.debug('>> COMMITING CHANGES TO FILE INDEX <<') |
|
413 | log.debug('>> COMMITING CHANGES TO FILE INDEX <<') | |
414 | writer.commit(merge=True) |
|
414 | writer.commit(merge=True) | |
415 | log.debug('>>> FINISHED REBUILDING FILE INDEX <<<') |
|
415 | log.debug('>>> FINISHED REBUILDING FILE INDEX <<<') | |
416 | else: |
|
416 | else: | |
417 | log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<') |
|
417 | log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<') | |
418 | writer.cancel() |
|
418 | writer.cancel() | |
419 |
|
419 | |||
420 | def build_indexes(self): |
|
420 | def build_indexes(self): | |
421 | if os.path.exists(self.index_location): |
|
421 | if os.path.exists(self.index_location): | |
422 | log.debug('removing previous index') |
|
422 | log.debug('removing previous index') | |
423 | rmtree(self.index_location) |
|
423 | rmtree(self.index_location) | |
424 |
|
424 | |||
425 | if not os.path.exists(self.index_location): |
|
425 | if not os.path.exists(self.index_location): | |
426 | os.mkdir(self.index_location) |
|
426 | os.mkdir(self.index_location) | |
427 |
|
427 | |||
428 | chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA, |
|
428 | chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA, | |
429 | indexname=CHGSET_IDX_NAME) |
|
429 | indexname=CHGSET_IDX_NAME) | |
430 | chgset_idx_writer = chgset_idx.writer() |
|
430 | chgset_idx_writer = chgset_idx.writer() | |
431 |
|
431 | |||
432 | file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) |
|
432 | file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) | |
433 | file_idx_writer = file_idx.writer() |
|
433 | file_idx_writer = file_idx.writer() | |
434 | log.debug('BUILDING INDEX FOR EXTENSIONS %s ' |
|
434 | log.debug('BUILDING INDEX FOR EXTENSIONS %s ' | |
435 | 'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys())) |
|
435 | 'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys())) | |
436 |
|
436 | |||
437 | for repo_name, repo in self.repo_paths.items(): |
|
437 | for repo_name, repo in self.repo_paths.items(): | |
438 | # skip indexing if there aren't any revisions |
|
438 | # skip indexing if there aren't any revisions | |
439 | if len(repo) < 1: |
|
439 | if len(repo) < 1: | |
440 | continue |
|
440 | continue | |
441 |
|
441 | |||
442 | self.index_files(file_idx_writer, repo_name, repo) |
|
442 | self.index_files(file_idx_writer, repo_name, repo) | |
443 | self.index_changesets(chgset_idx_writer, repo_name, repo) |
|
443 | self.index_changesets(chgset_idx_writer, repo_name, repo) | |
444 |
|
444 | |||
445 | log.debug('>> COMMITING CHANGES <<') |
|
445 | log.debug('>> COMMITING CHANGES <<') | |
446 | file_idx_writer.commit(merge=True) |
|
446 | file_idx_writer.commit(merge=True) | |
447 | chgset_idx_writer.commit(merge=True) |
|
447 | chgset_idx_writer.commit(merge=True) | |
448 | log.debug('>>> FINISHED BUILDING INDEX <<<') |
|
448 | log.debug('>>> FINISHED BUILDING INDEX <<<') | |
449 |
|
449 | |||
450 | def update_indexes(self): |
|
450 | def update_indexes(self): | |
451 | self.update_file_index() |
|
451 | self.update_file_index() | |
452 | self.update_changeset_index() |
|
452 | self.update_changeset_index() | |
453 |
|
453 | |||
454 | def run(self, full_index=False): |
|
454 | def run(self, full_index=False): | |
455 | """Run daemon""" |
|
455 | """Run daemon""" | |
456 | if full_index or self.initial: |
|
456 | if full_index or self.initial: | |
457 | self.build_indexes() |
|
457 | self.build_indexes() | |
458 | else: |
|
458 | else: | |
459 | self.update_indexes() |
|
459 | self.update_indexes() |
@@ -1,83 +1,83 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.cache_keys |
|
15 | kallithea.lib.paster_commands.cache_keys | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | cleanup-keys paster command for Kallithea |
|
18 | cleanup-keys paster command for Kallithea | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: mar 27, 2013 |
|
23 | :created_on: mar 27, 2013 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | import os |
|
30 | import os | |
31 | import sys |
|
31 | import sys | |
32 |
|
32 | |||
33 | from kallithea.model.meta import Session |
|
33 | from kallithea.model.meta import Session | |
34 | from kallithea.lib.utils import BasePasterCommand |
|
34 | from kallithea.lib.utils import BasePasterCommand | |
35 | from kallithea.model.db import CacheInvalidation |
|
35 | from kallithea.model.db import CacheInvalidation | |
36 |
|
36 | |||
37 | # Add location of top level folder to sys.path |
|
37 | # Add location of top level folder to sys.path | |
38 |
from os.path import dirname |
|
38 | from os.path import dirname | |
39 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
39 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
40 | sys.path.append(rc_path) |
|
40 | sys.path.append(rc_path) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class Command(BasePasterCommand): |
|
43 | class Command(BasePasterCommand): | |
44 |
|
44 | |||
45 | max_args = 1 |
|
45 | max_args = 1 | |
46 | min_args = 1 |
|
46 | min_args = 1 | |
47 |
|
47 | |||
48 | usage = "CONFIG_FILE" |
|
48 | usage = "CONFIG_FILE" | |
49 | group_name = "Kallithea" |
|
49 | group_name = "Kallithea" | |
50 | takes_config_file = -1 |
|
50 | takes_config_file = -1 | |
51 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
51 | parser = BasePasterCommand.standard_parser(verbose=True) | |
52 | summary = "Cache keys utils" |
|
52 | summary = "Cache keys utils" | |
53 |
|
53 | |||
54 | def command(self): |
|
54 | def command(self): | |
55 | #get SqlAlchemy session |
|
55 | #get SqlAlchemy session | |
56 | self._init_session() |
|
56 | self._init_session() | |
57 |
|
57 | |||
58 | _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all() |
|
58 | _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all() | |
59 | if self.options.show: |
|
59 | if self.options.show: | |
60 | for c_obj in _caches: |
|
60 | for c_obj in _caches: | |
61 | print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active) |
|
61 | print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active) | |
62 | elif self.options.cleanup: |
|
62 | elif self.options.cleanup: | |
63 | for c_obj in _caches: |
|
63 | for c_obj in _caches: | |
64 | Session().delete(c_obj) |
|
64 | Session().delete(c_obj) | |
65 | print 'Removing key: %s' % (c_obj.cache_key) |
|
65 | print 'Removing key: %s' % (c_obj.cache_key) | |
66 | Session().commit() |
|
66 | Session().commit() | |
67 | else: |
|
67 | else: | |
68 | print 'Nothing done, exiting...' |
|
68 | print 'Nothing done, exiting...' | |
69 |
|
69 | |||
70 | def update_parser(self): |
|
70 | def update_parser(self): | |
71 | self.parser.add_option( |
|
71 | self.parser.add_option( | |
72 | '--show', |
|
72 | '--show', | |
73 | action='store_true', |
|
73 | action='store_true', | |
74 | dest='show', |
|
74 | dest='show', | |
75 | help=("show existing cache keys with together with status") |
|
75 | help=("show existing cache keys with together with status") | |
76 | ) |
|
76 | ) | |
77 |
|
77 | |||
78 | self.parser.add_option( |
|
78 | self.parser.add_option( | |
79 | '--cleanup', |
|
79 | '--cleanup', | |
80 | action="store_true", |
|
80 | action="store_true", | |
81 | dest="cleanup", |
|
81 | dest="cleanup", | |
82 | help="cleanup existing cache keys" |
|
82 | help="cleanup existing cache keys" | |
83 | ) |
|
83 | ) |
@@ -1,149 +1,149 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.cleanup |
|
15 | kallithea.lib.paster_commands.cleanup | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | cleanup-repos paster command for Kallithea |
|
18 | cleanup-repos paster command for Kallithea | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Jul 14, 2012 |
|
23 | :created_on: Jul 14, 2012 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | import os |
|
30 | import os | |
31 | import sys |
|
31 | import sys | |
32 | import re |
|
32 | import re | |
33 | import shutil |
|
33 | import shutil | |
34 | import datetime |
|
34 | import datetime | |
35 |
|
35 | |||
36 | from kallithea.lib.utils import BasePasterCommand, ask_ok, REMOVED_REPO_PAT |
|
36 | from kallithea.lib.utils import BasePasterCommand, ask_ok, REMOVED_REPO_PAT | |
37 | from kallithea.lib.utils2 import safe_str |
|
37 | from kallithea.lib.utils2 import safe_str | |
38 | from kallithea.model.db import Ui |
|
38 | from kallithea.model.db import Ui | |
39 |
|
39 | |||
40 | # Add location of top level folder to sys.path |
|
40 | # Add location of top level folder to sys.path | |
41 |
from os.path import dirname |
|
41 | from os.path import dirname | |
42 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
42 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
43 | sys.path.append(rc_path) |
|
43 | sys.path.append(rc_path) | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | class Command(BasePasterCommand): |
|
46 | class Command(BasePasterCommand): | |
47 |
|
47 | |||
48 | max_args = 1 |
|
48 | max_args = 1 | |
49 | min_args = 1 |
|
49 | min_args = 1 | |
50 |
|
50 | |||
51 | usage = "CONFIG_FILE" |
|
51 | usage = "CONFIG_FILE" | |
52 | group_name = "Kallithea" |
|
52 | group_name = "Kallithea" | |
53 | takes_config_file = -1 |
|
53 | takes_config_file = -1 | |
54 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
54 | parser = BasePasterCommand.standard_parser(verbose=True) | |
55 | summary = "Cleanup deleted repos" |
|
55 | summary = "Cleanup deleted repos" | |
56 |
|
56 | |||
57 | def _parse_older_than(self, val): |
|
57 | def _parse_older_than(self, val): | |
58 | regex = re.compile(r'((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)m)?((?P<seconds>\d+?)s)?') |
|
58 | regex = re.compile(r'((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)m)?((?P<seconds>\d+?)s)?') | |
59 | parts = regex.match(val) |
|
59 | parts = regex.match(val) | |
60 | if not parts: |
|
60 | if not parts: | |
61 | return |
|
61 | return | |
62 | parts = parts.groupdict() |
|
62 | parts = parts.groupdict() | |
63 | time_params = {} |
|
63 | time_params = {} | |
64 | for (name, param) in parts.iteritems(): |
|
64 | for (name, param) in parts.iteritems(): | |
65 | if param: |
|
65 | if param: | |
66 | time_params[name] = int(param) |
|
66 | time_params[name] = int(param) | |
67 | return datetime.timedelta(**time_params) |
|
67 | return datetime.timedelta(**time_params) | |
68 |
|
68 | |||
69 | def _extract_date(self, name): |
|
69 | def _extract_date(self, name): | |
70 | """ |
|
70 | """ | |
71 | Extract the date part from rm__<date> pattern of removed repos, |
|
71 | Extract the date part from rm__<date> pattern of removed repos, | |
72 | and convert it to datetime object |
|
72 | and convert it to datetime object | |
73 |
|
73 | |||
74 | :param name: |
|
74 | :param name: | |
75 | """ |
|
75 | """ | |
76 | date_part = name[4:19] # 4:19 since we don't parse milliseconds |
|
76 | date_part = name[4:19] # 4:19 since we don't parse milliseconds | |
77 | return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S') |
|
77 | return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S') | |
78 |
|
78 | |||
79 | def command(self): |
|
79 | def command(self): | |
80 | #get SqlAlchemy session |
|
80 | #get SqlAlchemy session | |
81 | self._init_session() |
|
81 | self._init_session() | |
82 |
|
82 | |||
83 | repos_location = Ui.get_repos_location() |
|
83 | repos_location = Ui.get_repos_location() | |
84 | to_remove = [] |
|
84 | to_remove = [] | |
85 | for dn_, dirs, f in os.walk(safe_str(repos_location)): |
|
85 | for dn_, dirs, f in os.walk(safe_str(repos_location)): | |
86 | alldirs = list(dirs) |
|
86 | alldirs = list(dirs) | |
87 | del dirs[:] |
|
87 | del dirs[:] | |
88 | if ('.hg' in alldirs or |
|
88 | if ('.hg' in alldirs or | |
89 | 'objects' in alldirs and ('refs' in alldirs or 'packed-refs' in f)): |
|
89 | 'objects' in alldirs and ('refs' in alldirs or 'packed-refs' in f)): | |
90 | continue |
|
90 | continue | |
91 | for loc in alldirs: |
|
91 | for loc in alldirs: | |
92 | if REMOVED_REPO_PAT.match(loc): |
|
92 | if REMOVED_REPO_PAT.match(loc): | |
93 | to_remove.append([os.path.join(dn_, loc), |
|
93 | to_remove.append([os.path.join(dn_, loc), | |
94 | self._extract_date(loc)]) |
|
94 | self._extract_date(loc)]) | |
95 | else: |
|
95 | else: | |
96 | dirs.append(loc) |
|
96 | dirs.append(loc) | |
97 |
|
97 | |||
98 | #filter older than (if present)! |
|
98 | #filter older than (if present)! | |
99 | now = datetime.datetime.now() |
|
99 | now = datetime.datetime.now() | |
100 | older_than = self.options.older_than |
|
100 | older_than = self.options.older_than | |
101 | if older_than: |
|
101 | if older_than: | |
102 | to_remove_filtered = [] |
|
102 | to_remove_filtered = [] | |
103 | older_than_date = self._parse_older_than(older_than) |
|
103 | older_than_date = self._parse_older_than(older_than) | |
104 | for name, date_ in to_remove: |
|
104 | for name, date_ in to_remove: | |
105 | repo_age = now - date_ |
|
105 | repo_age = now - date_ | |
106 | if repo_age > older_than_date: |
|
106 | if repo_age > older_than_date: | |
107 | to_remove_filtered.append([name, date_]) |
|
107 | to_remove_filtered.append([name, date_]) | |
108 |
|
108 | |||
109 | to_remove = to_remove_filtered |
|
109 | to_remove = to_remove_filtered | |
110 | print 'Removing %s deleted repos older than %s (%s)' \ |
|
110 | print 'Removing %s deleted repos older than %s (%s)' \ | |
111 | % (len(to_remove), older_than, older_than_date) |
|
111 | % (len(to_remove), older_than, older_than_date) | |
112 | else: |
|
112 | else: | |
113 | print 'Removing all %s deleted repos' % len(to_remove) |
|
113 | print 'Removing all %s deleted repos' % len(to_remove) | |
114 | if self.options.dont_ask or not to_remove: |
|
114 | if self.options.dont_ask or not to_remove: | |
115 | # don't ask just remove ! |
|
115 | # don't ask just remove ! | |
116 | remove = True |
|
116 | remove = True | |
117 | else: |
|
117 | else: | |
118 | remove = ask_ok('the following repositories will be deleted completely:\n%s\n' |
|
118 | remove = ask_ok('the following repositories will be deleted completely:\n%s\n' | |
119 | 'are you sure you want to remove them [y/n]?' |
|
119 | 'are you sure you want to remove them [y/n]?' | |
120 | % '\n'.join(['%s removed on %s' % (safe_str(x[0]), safe_str(x[1])) |
|
120 | % '\n'.join(['%s removed on %s' % (safe_str(x[0]), safe_str(x[1])) | |
121 | for x in to_remove])) |
|
121 | for x in to_remove])) | |
122 |
|
122 | |||
123 | if remove: |
|
123 | if remove: | |
124 | for path, date_ in to_remove: |
|
124 | for path, date_ in to_remove: | |
125 | print 'Removing repository %s' % path |
|
125 | print 'Removing repository %s' % path | |
126 | shutil.rmtree(path) |
|
126 | shutil.rmtree(path) | |
127 | else: |
|
127 | else: | |
128 | print 'Nothing done, exiting...' |
|
128 | print 'Nothing done, exiting...' | |
129 |
|
129 | |||
130 | def update_parser(self): |
|
130 | def update_parser(self): | |
131 | self.parser.add_option( |
|
131 | self.parser.add_option( | |
132 | '--older-than', |
|
132 | '--older-than', | |
133 | action='store', |
|
133 | action='store', | |
134 | dest='older_than', |
|
134 | dest='older_than', | |
135 | help=("only remove repos that have been removed " |
|
135 | help=("only remove repos that have been removed " | |
136 | "at least given time ago. " |
|
136 | "at least given time ago. " | |
137 | "The default is to remove all removed repositories. " |
|
137 | "The default is to remove all removed repositories. " | |
138 | "Possible suffixes: " |
|
138 | "Possible suffixes: " | |
139 | "d (days), h (hours), m (minutes), s (seconds). " |
|
139 | "d (days), h (hours), m (minutes), s (seconds). " | |
140 | "For example --older-than=30d deletes repositories " |
|
140 | "For example --older-than=30d deletes repositories " | |
141 | "removed more than 30 days ago.") |
|
141 | "removed more than 30 days ago.") | |
142 | ) |
|
142 | ) | |
143 |
|
143 | |||
144 | self.parser.add_option( |
|
144 | self.parser.add_option( | |
145 | '--dont-ask', |
|
145 | '--dont-ask', | |
146 | action="store_true", |
|
146 | action="store_true", | |
147 | dest="dont_ask", |
|
147 | dest="dont_ask", | |
148 | help="remove repositories without asking for confirmation." |
|
148 | help="remove repositories without asking for confirmation." | |
149 | ) |
|
149 | ) |
@@ -1,108 +1,108 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.install_iis |
|
15 | kallithea.lib.paster_commands.install_iis | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | IIS installation tools for Kallithea |
|
18 | IIS installation tools for Kallithea | |
19 | """ |
|
19 | """ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import os |
|
22 | import os | |
23 | import sys |
|
23 | import sys | |
24 | from paste.script.appinstall import AbstractInstallCommand |
|
24 | from paste.script.appinstall import AbstractInstallCommand | |
25 | from paste.script.command import BadCommand |
|
25 | from paste.script.command import BadCommand | |
26 |
|
26 | |||
27 | # Add location of top level folder to sys.path |
|
27 | # Add location of top level folder to sys.path | |
28 |
from os.path import dirname |
|
28 | from os.path import dirname | |
29 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
29 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
30 | sys.path.append(rc_path) |
|
30 | sys.path.append(rc_path) | |
31 |
|
31 | |||
32 | class Command(AbstractInstallCommand): |
|
32 | class Command(AbstractInstallCommand): | |
33 | default_verbosity = 1 |
|
33 | default_verbosity = 1 | |
34 | max_args = 1 |
|
34 | max_args = 1 | |
35 | min_args = 1 |
|
35 | min_args = 1 | |
36 | summary = 'Setup IIS given a config file' |
|
36 | summary = 'Setup IIS given a config file' | |
37 | usage = 'CONFIG_FILE' |
|
37 | usage = 'CONFIG_FILE' | |
38 |
|
38 | |||
39 | description = ''' |
|
39 | description = ''' | |
40 | Script for installing into IIS using isapi-wsgi. |
|
40 | Script for installing into IIS using isapi-wsgi. | |
41 | ''' |
|
41 | ''' | |
42 | parser = AbstractInstallCommand.standard_parser( |
|
42 | parser = AbstractInstallCommand.standard_parser( | |
43 | simulate=True, quiet=True, interactive=True) |
|
43 | simulate=True, quiet=True, interactive=True) | |
44 | parser.add_option('--virtualdir', |
|
44 | parser.add_option('--virtualdir', | |
45 | action='store', |
|
45 | action='store', | |
46 | dest='virtualdir', |
|
46 | dest='virtualdir', | |
47 | default='/', |
|
47 | default='/', | |
48 | help='The virtual folder to install into on IIS') |
|
48 | help='The virtual folder to install into on IIS') | |
49 |
|
49 | |||
50 | def command(self): |
|
50 | def command(self): | |
51 | config_spec = self.args[0] |
|
51 | config_spec = self.args[0] | |
52 | if not config_spec.startswith('config:'): |
|
52 | if not config_spec.startswith('config:'): | |
53 | config_spec = 'config:' + config_spec |
|
53 | config_spec = 'config:' + config_spec | |
54 | config_file = config_spec[len('config:'):].split('#', 1)[0] |
|
54 | config_file = config_spec[len('config:'):].split('#', 1)[0] | |
55 | config_file = os.path.join(os.getcwd(), config_file) |
|
55 | config_file = os.path.join(os.getcwd(), config_file) | |
56 | try: |
|
56 | try: | |
57 | import isapi_wsgi |
|
57 | import isapi_wsgi | |
58 | except ImportError: |
|
58 | except ImportError: | |
59 | raise BadCommand('missing requirement: isapi-wsgi not installed') |
|
59 | raise BadCommand('missing requirement: isapi-wsgi not installed') | |
60 |
|
60 | |||
61 | file = '''\ |
|
61 | file = '''\ | |
62 | # Created by Kallithea install_iis |
|
62 | # Created by Kallithea install_iis | |
63 | import sys |
|
63 | import sys | |
64 |
|
64 | |||
65 | if hasattr(sys, "isapidllhandle"): |
|
65 | if hasattr(sys, "isapidllhandle"): | |
66 | import win32traceutil |
|
66 | import win32traceutil | |
67 |
|
67 | |||
68 | import isapi_wsgi |
|
68 | import isapi_wsgi | |
69 | import os |
|
69 | import os | |
70 |
|
70 | |||
71 | def __ExtensionFactory__(): |
|
71 | def __ExtensionFactory__(): | |
72 | from paste.deploy import loadapp |
|
72 | from paste.deploy import loadapp | |
73 | from paste.script.util.logging_config import fileConfig |
|
73 | from paste.script.util.logging_config import fileConfig | |
74 | fileConfig('%(inifile)s') |
|
74 | fileConfig('%(inifile)s') | |
75 | application = loadapp('config:%(inifile)s') |
|
75 | application = loadapp('config:%(inifile)s') | |
76 |
|
76 | |||
77 | def app(environ, start_response): |
|
77 | def app(environ, start_response): | |
78 | user = environ.get('REMOTE_USER', None) |
|
78 | user = environ.get('REMOTE_USER', None) | |
79 | if user is not None: |
|
79 | if user is not None: | |
80 | os.environ['REMOTE_USER'] = user |
|
80 | os.environ['REMOTE_USER'] = user | |
81 | return application(environ, start_response) |
|
81 | return application(environ, start_response) | |
82 |
|
82 | |||
83 | return isapi_wsgi.ISAPIThreadPoolHandler(app) |
|
83 | return isapi_wsgi.ISAPIThreadPoolHandler(app) | |
84 |
|
84 | |||
85 | if __name__=='__main__': |
|
85 | if __name__=='__main__': | |
86 | from isapi.install import * |
|
86 | from isapi.install import * | |
87 | params = ISAPIParameters() |
|
87 | params = ISAPIParameters() | |
88 | sm = [ScriptMapParams(Extension="*", Flags=0)] |
|
88 | sm = [ScriptMapParams(Extension="*", Flags=0)] | |
89 | vd = VirtualDirParameters(Name="%(virtualdir)s", |
|
89 | vd = VirtualDirParameters(Name="%(virtualdir)s", | |
90 | Description = "Kallithea", |
|
90 | Description = "Kallithea", | |
91 | ScriptMaps = sm, |
|
91 | ScriptMaps = sm, | |
92 | ScriptMapUpdate = "replace") |
|
92 | ScriptMapUpdate = "replace") | |
93 | params.VirtualDirs = [vd] |
|
93 | params.VirtualDirs = [vd] | |
94 | HandleCommandLine(params) |
|
94 | HandleCommandLine(params) | |
95 | ''' |
|
95 | ''' | |
96 |
|
96 | |||
97 | outdata = file % { |
|
97 | outdata = file % { | |
98 | 'inifile': config_file.replace('\\', '\\\\'), |
|
98 | 'inifile': config_file.replace('\\', '\\\\'), | |
99 | 'virtualdir': self.options.virtualdir |
|
99 | 'virtualdir': self.options.virtualdir | |
100 | } |
|
100 | } | |
101 |
|
101 | |||
102 | dispatchfile = os.path.join(os.getcwd(), 'dispatch.py') |
|
102 | dispatchfile = os.path.join(os.getcwd(), 'dispatch.py') | |
103 | self.ensure_file(dispatchfile, outdata, False) |
|
103 | self.ensure_file(dispatchfile, outdata, False) | |
104 | print 'Generating %s' % (dispatchfile,) |
|
104 | print 'Generating %s' % (dispatchfile,) | |
105 |
|
105 | |||
106 | print ('Run \'python "%s" install\' with administrative privileges ' |
|
106 | print ('Run \'python "%s" install\' with administrative privileges ' | |
107 | 'to generate the _dispatch.dll file and install it into the ' |
|
107 | 'to generate the _dispatch.dll file and install it into the ' | |
108 | 'default web site') % (dispatchfile,) |
|
108 | 'default web site') % (dispatchfile,) |
@@ -1,74 +1,74 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.ishell |
|
15 | kallithea.lib.paster_commands.ishell | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | interactive shell paster command for Kallithea |
|
18 | interactive shell paster command for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 4, 2013 |
|
22 | :created_on: Apr 4, 2013 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 |
|
31 | |||
32 | from kallithea.lib.utils import BasePasterCommand |
|
32 | from kallithea.lib.utils import BasePasterCommand | |
33 |
|
33 | |||
34 | # Add location of top level folder to sys.path |
|
34 | # Add location of top level folder to sys.path | |
35 |
from os.path import dirname |
|
35 | from os.path import dirname | |
36 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
36 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
37 | sys.path.append(rc_path) |
|
37 | sys.path.append(rc_path) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | class Command(BasePasterCommand): |
|
40 | class Command(BasePasterCommand): | |
41 |
|
41 | |||
42 | max_args = 1 |
|
42 | max_args = 1 | |
43 | min_args = 1 |
|
43 | min_args = 1 | |
44 |
|
44 | |||
45 | usage = "CONFIG_FILE" |
|
45 | usage = "CONFIG_FILE" | |
46 | group_name = "Kallithea" |
|
46 | group_name = "Kallithea" | |
47 | takes_config_file = -1 |
|
47 | takes_config_file = -1 | |
48 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
48 | parser = BasePasterCommand.standard_parser(verbose=True) | |
49 | summary = "Interactive shell" |
|
49 | summary = "Interactive shell" | |
50 |
|
50 | |||
51 | def command(self): |
|
51 | def command(self): | |
52 | #get SqlAlchemy session |
|
52 | #get SqlAlchemy session | |
53 | self._init_session() |
|
53 | self._init_session() | |
54 |
|
54 | |||
55 | # imports, used in IPython shell |
|
55 | # imports, used in IPython shell | |
56 | import os |
|
56 | import os | |
57 | import sys |
|
57 | import sys | |
58 | import time |
|
58 | import time | |
59 | import shutil |
|
59 | import shutil | |
60 | import datetime |
|
60 | import datetime | |
61 | from kallithea.model.db import * |
|
61 | from kallithea.model.db import * | |
62 |
|
62 | |||
63 | try: |
|
63 | try: | |
64 | from IPython import embed |
|
64 | from IPython import embed | |
65 | from IPython.config.loader import Config |
|
65 | from IPython.config.loader import Config | |
66 | cfg = Config() |
|
66 | cfg = Config() | |
67 | cfg.InteractiveShellEmbed.confirm_exit = False |
|
67 | cfg.InteractiveShellEmbed.confirm_exit = False | |
68 | embed(config=cfg, banner1="Kallithea IShell.") |
|
68 | embed(config=cfg, banner1="Kallithea IShell.") | |
69 | except ImportError: |
|
69 | except ImportError: | |
70 | print 'IPython installation is required for ishell' |
|
70 | print 'IPython installation is required for ishell' | |
71 | sys.exit(-1) |
|
71 | sys.exit(-1) | |
72 |
|
72 | |||
73 | def update_parser(self): |
|
73 | def update_parser(self): | |
74 | pass |
|
74 | pass |
@@ -1,109 +1,109 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.make_index |
|
15 | kallithea.lib.paster_commands.make_index | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | make-index paster command for Kallithea |
|
18 | make-index paster command for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Aug 17, 2010 |
|
22 | :created_on: Aug 17, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 |
|
31 | |||
32 | from string import strip |
|
32 | from string import strip | |
33 | from kallithea.model.repo import RepoModel |
|
33 | from kallithea.model.repo import RepoModel | |
34 | from kallithea.lib.utils import BasePasterCommand, load_rcextensions |
|
34 | from kallithea.lib.utils import BasePasterCommand, load_rcextensions | |
35 |
|
35 | |||
36 | # Add location of top level folder to sys.path |
|
36 | # Add location of top level folder to sys.path | |
37 |
from os.path import dirname |
|
37 | from os.path import dirname | |
38 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
38 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
39 | sys.path.append(rc_path) |
|
39 | sys.path.append(rc_path) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class Command(BasePasterCommand): |
|
42 | class Command(BasePasterCommand): | |
43 |
|
43 | |||
44 | max_args = 1 |
|
44 | max_args = 1 | |
45 | min_args = 1 |
|
45 | min_args = 1 | |
46 |
|
46 | |||
47 | usage = "CONFIG_FILE" |
|
47 | usage = "CONFIG_FILE" | |
48 | group_name = "Kallithea" |
|
48 | group_name = "Kallithea" | |
49 | takes_config_file = -1 |
|
49 | takes_config_file = -1 | |
50 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
50 | parser = BasePasterCommand.standard_parser(verbose=True) | |
51 | summary = "Creates or updates full text search index" |
|
51 | summary = "Creates or updates full text search index" | |
52 |
|
52 | |||
53 | def command(self): |
|
53 | def command(self): | |
54 | #get SqlAlchemy session |
|
54 | #get SqlAlchemy session | |
55 | self._init_session() |
|
55 | self._init_session() | |
56 | from pylons import config |
|
56 | from pylons import config | |
57 | index_location = config['index_dir'] |
|
57 | index_location = config['index_dir'] | |
58 | load_rcextensions(config['here']) |
|
58 | load_rcextensions(config['here']) | |
59 |
|
59 | |||
60 | repo_location = self.options.repo_location \ |
|
60 | repo_location = self.options.repo_location \ | |
61 | if self.options.repo_location else RepoModel().repos_path |
|
61 | if self.options.repo_location else RepoModel().repos_path | |
62 | repo_list = map(strip, self.options.repo_list.split(',')) \ |
|
62 | repo_list = map(strip, self.options.repo_list.split(',')) \ | |
63 | if self.options.repo_list else None |
|
63 | if self.options.repo_list else None | |
64 |
|
64 | |||
65 | repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ |
|
65 | repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ | |
66 | if self.options.repo_update_list else None |
|
66 | if self.options.repo_update_list else None | |
67 |
|
67 | |||
68 | #====================================================================== |
|
68 | #====================================================================== | |
69 | # WHOOSH DAEMON |
|
69 | # WHOOSH DAEMON | |
70 | #====================================================================== |
|
70 | #====================================================================== | |
71 | from kallithea.lib.pidlock import LockHeld, DaemonLock |
|
71 | from kallithea.lib.pidlock import LockHeld, DaemonLock | |
72 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon |
|
72 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon | |
73 | try: |
|
73 | try: | |
74 | l = DaemonLock(file_=os.path.join(dn(dn(index_location)), |
|
74 | l = DaemonLock(file_=os.path.join(dirname(dirname(index_location)), | |
75 | 'make_index.lock')) |
|
75 | 'make_index.lock')) | |
76 | WhooshIndexingDaemon(index_location=index_location, |
|
76 | WhooshIndexingDaemon(index_location=index_location, | |
77 | repo_location=repo_location, |
|
77 | repo_location=repo_location, | |
78 | repo_list=repo_list, |
|
78 | repo_list=repo_list, | |
79 | repo_update_list=repo_update_list) \ |
|
79 | repo_update_list=repo_update_list) \ | |
80 | .run(full_index=self.options.full_index) |
|
80 | .run(full_index=self.options.full_index) | |
81 | l.release() |
|
81 | l.release() | |
82 | except LockHeld: |
|
82 | except LockHeld: | |
83 | sys.exit(1) |
|
83 | sys.exit(1) | |
84 |
|
84 | |||
85 | def update_parser(self): |
|
85 | def update_parser(self): | |
86 | self.parser.add_option('--repo-location', |
|
86 | self.parser.add_option('--repo-location', | |
87 | action='store', |
|
87 | action='store', | |
88 | dest='repo_location', |
|
88 | dest='repo_location', | |
89 | help="Specifies repositories location to index OPTIONAL", |
|
89 | help="Specifies repositories location to index OPTIONAL", | |
90 | ) |
|
90 | ) | |
91 | self.parser.add_option('--index-only', |
|
91 | self.parser.add_option('--index-only', | |
92 | action='store', |
|
92 | action='store', | |
93 | dest='repo_list', |
|
93 | dest='repo_list', | |
94 | help="Specifies a comma separated list of repositories " |
|
94 | help="Specifies a comma separated list of repositories " | |
95 | "to build index on. If not given all repositories " |
|
95 | "to build index on. If not given all repositories " | |
96 | "are scanned for indexing. OPTIONAL", |
|
96 | "are scanned for indexing. OPTIONAL", | |
97 | ) |
|
97 | ) | |
98 | self.parser.add_option('--update-only', |
|
98 | self.parser.add_option('--update-only', | |
99 | action='store', |
|
99 | action='store', | |
100 | dest='repo_update_list', |
|
100 | dest='repo_update_list', | |
101 | help="Specifies a comma separated list of repositories " |
|
101 | help="Specifies a comma separated list of repositories " | |
102 | "to re-build index on. OPTIONAL", |
|
102 | "to re-build index on. OPTIONAL", | |
103 | ) |
|
103 | ) | |
104 | self.parser.add_option('-f', |
|
104 | self.parser.add_option('-f', | |
105 | action='store_true', |
|
105 | action='store_true', | |
106 | dest='full_index', |
|
106 | dest='full_index', | |
107 | help="Specifies that index should be made full i.e" |
|
107 | help="Specifies that index should be made full i.e" | |
108 | " destroy old and build from scratch", |
|
108 | " destroy old and build from scratch", | |
109 | default=False) |
|
109 | default=False) |
@@ -1,80 +1,80 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.make_rcextensions |
|
15 | kallithea.lib.paster_commands.make_rcextensions | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | make-rcext paster command for Kallithea |
|
18 | make-rcext paster command for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Mar 6, 2012 |
|
22 | :created_on: Mar 6, 2012 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import pkg_resources |
|
31 | import pkg_resources | |
32 |
|
32 | |||
33 | from kallithea.lib.utils import BasePasterCommand, ask_ok |
|
33 | from kallithea.lib.utils import BasePasterCommand, ask_ok | |
34 |
|
34 | |||
35 | # Add location of top level folder to sys.path |
|
35 | # Add location of top level folder to sys.path | |
36 |
from os.path import dirname |
|
36 | from os.path import dirname | |
37 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
37 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
38 | sys.path.append(rc_path) |
|
38 | sys.path.append(rc_path) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class Command(BasePasterCommand): |
|
41 | class Command(BasePasterCommand): | |
42 |
|
42 | |||
43 | max_args = 1 |
|
43 | max_args = 1 | |
44 | min_args = 1 |
|
44 | min_args = 1 | |
45 |
|
45 | |||
46 | group_name = "Kallithea" |
|
46 | group_name = "Kallithea" | |
47 | takes_config_file = -1 |
|
47 | takes_config_file = -1 | |
48 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
48 | parser = BasePasterCommand.standard_parser(verbose=True) | |
49 | summary = "Write template file for extending Kallithea in Python." |
|
49 | summary = "Write template file for extending Kallithea in Python." | |
50 | usage = "CONFIG_FILE" |
|
50 | usage = "CONFIG_FILE" | |
51 | description = '''\ |
|
51 | description = '''\ | |
52 | A rcextensions directory with a __init__.py file will be created next to |
|
52 | A rcextensions directory with a __init__.py file will be created next to | |
53 | the ini file. Local customizations in that file will survive upgrades. |
|
53 | the ini file. Local customizations in that file will survive upgrades. | |
54 | The file contains instructions on how it can be customized. |
|
54 | The file contains instructions on how it can be customized. | |
55 | ''' |
|
55 | ''' | |
56 |
|
56 | |||
57 | def command(self): |
|
57 | def command(self): | |
58 | from pylons import config |
|
58 | from pylons import config | |
59 |
|
59 | |||
60 | here = config['here'] |
|
60 | here = config['here'] | |
61 | content = pkg_resources.resource_string( |
|
61 | content = pkg_resources.resource_string( | |
62 | 'kallithea', os.path.join('config', 'rcextensions', '__init__.py') |
|
62 | 'kallithea', os.path.join('config', 'rcextensions', '__init__.py') | |
63 | ) |
|
63 | ) | |
64 | ext_file = os.path.join(here, 'rcextensions', '__init__.py') |
|
64 | ext_file = os.path.join(here, 'rcextensions', '__init__.py') | |
65 | if os.path.exists(ext_file): |
|
65 | if os.path.exists(ext_file): | |
66 | msg = ('Extension file already exists, do you want ' |
|
66 | msg = ('Extension file already exists, do you want ' | |
67 | 'to overwrite it ? [y/n]') |
|
67 | 'to overwrite it ? [y/n]') | |
68 | if not ask_ok(msg): |
|
68 | if not ask_ok(msg): | |
69 | print 'Nothing done, exiting...' |
|
69 | print 'Nothing done, exiting...' | |
70 | return |
|
70 | return | |
71 |
|
71 | |||
72 | dirname = os.path.dirname(ext_file) |
|
72 | dirname = os.path.dirname(ext_file) | |
73 | if not os.path.isdir(dirname): |
|
73 | if not os.path.isdir(dirname): | |
74 | os.makedirs(dirname) |
|
74 | os.makedirs(dirname) | |
75 | with open(ext_file, 'wb') as f: |
|
75 | with open(ext_file, 'wb') as f: | |
76 | f.write(content) |
|
76 | f.write(content) | |
77 | print 'Wrote new extensions file to %s' % ext_file |
|
77 | print 'Wrote new extensions file to %s' % ext_file | |
78 |
|
78 | |||
79 | def update_parser(self): |
|
79 | def update_parser(self): | |
80 | pass |
|
80 | pass |
@@ -1,69 +1,69 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.repo_scan |
|
15 | kallithea.lib.paster_commands.repo_scan | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | repo-scan paster command for Kallithea |
|
18 | repo-scan paster command for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Feb 9, 2013 |
|
22 | :created_on: Feb 9, 2013 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 |
|
31 | |||
32 | from kallithea.model.scm import ScmModel |
|
32 | from kallithea.model.scm import ScmModel | |
33 | from kallithea.lib.utils import BasePasterCommand, repo2db_mapper |
|
33 | from kallithea.lib.utils import BasePasterCommand, repo2db_mapper | |
34 |
|
34 | |||
35 | # Add location of top level folder to sys.path |
|
35 | # Add location of top level folder to sys.path | |
36 |
from os.path import dirname |
|
36 | from os.path import dirname | |
37 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
37 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
38 | sys.path.append(rc_path) |
|
38 | sys.path.append(rc_path) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class Command(BasePasterCommand): |
|
41 | class Command(BasePasterCommand): | |
42 |
|
42 | |||
43 | max_args = 1 |
|
43 | max_args = 1 | |
44 | min_args = 1 |
|
44 | min_args = 1 | |
45 |
|
45 | |||
46 | usage = "CONFIG_FILE" |
|
46 | usage = "CONFIG_FILE" | |
47 | group_name = "Kallithea" |
|
47 | group_name = "Kallithea" | |
48 | takes_config_file = -1 |
|
48 | takes_config_file = -1 | |
49 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
49 | parser = BasePasterCommand.standard_parser(verbose=True) | |
50 | summary = "Rescan default location for new repositories" |
|
50 | summary = "Rescan default location for new repositories" | |
51 |
|
51 | |||
52 | def command(self): |
|
52 | def command(self): | |
53 | #get SqlAlchemy session |
|
53 | #get SqlAlchemy session | |
54 | self._init_session() |
|
54 | self._init_session() | |
55 | rm_obsolete = self.options.delete_obsolete |
|
55 | rm_obsolete = self.options.delete_obsolete | |
56 | print 'Now scanning root location for new repos ...' |
|
56 | print 'Now scanning root location for new repos ...' | |
57 | added, removed = repo2db_mapper(ScmModel().repo_scan(), |
|
57 | added, removed = repo2db_mapper(ScmModel().repo_scan(), | |
58 | remove_obsolete=rm_obsolete) |
|
58 | remove_obsolete=rm_obsolete) | |
59 | added = ', '.join(added) or '-' |
|
59 | added = ', '.join(added) or '-' | |
60 | removed = ', '.join(removed) or '-' |
|
60 | removed = ', '.join(removed) or '-' | |
61 | print 'Scan completed added: %s removed: %s' % (added, removed) |
|
61 | print 'Scan completed added: %s removed: %s' % (added, removed) | |
62 |
|
62 | |||
63 | def update_parser(self): |
|
63 | def update_parser(self): | |
64 | self.parser.add_option( |
|
64 | self.parser.add_option( | |
65 | '--delete-obsolete', |
|
65 | '--delete-obsolete', | |
66 | action='store_true', |
|
66 | action='store_true', | |
67 | help="Use this flag do delete repositories that are " |
|
67 | help="Use this flag do delete repositories that are " | |
68 | "present in Kallithea database but not on the filesystem", |
|
68 | "present in Kallithea database but not on the filesystem", | |
69 | ) |
|
69 | ) |
@@ -1,131 +1,131 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.setup_db |
|
15 | kallithea.lib.paster_commands.setup_db | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Databaset setup paster command for Kallithea |
|
18 | Databaset setup paster command for Kallithea | |
19 | """ |
|
19 | """ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import os |
|
22 | import os | |
23 | import sys |
|
23 | import sys | |
24 | from paste.script.appinstall import AbstractInstallCommand |
|
24 | from paste.script.appinstall import AbstractInstallCommand | |
25 | from paste.script.command import BadCommand |
|
25 | from paste.script.command import BadCommand | |
26 | from paste.deploy import appconfig |
|
26 | from paste.deploy import appconfig | |
27 |
|
27 | |||
28 | # Add location of top level folder to sys.path |
|
28 | # Add location of top level folder to sys.path | |
29 |
from os.path import dirname |
|
29 | from os.path import dirname | |
30 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
30 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
31 | sys.path.append(rc_path) |
|
31 | sys.path.append(rc_path) | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | class Command(AbstractInstallCommand): |
|
34 | class Command(AbstractInstallCommand): | |
35 |
|
35 | |||
36 | default_verbosity = 1 |
|
36 | default_verbosity = 1 | |
37 | max_args = 1 |
|
37 | max_args = 1 | |
38 | min_args = 1 |
|
38 | min_args = 1 | |
39 | summary = "Setup an application, given a config file" |
|
39 | summary = "Setup an application, given a config file" | |
40 | usage = "CONFIG_FILE" |
|
40 | usage = "CONFIG_FILE" | |
41 | group_name = "Kallithea" |
|
41 | group_name = "Kallithea" | |
42 |
|
42 | |||
43 | description = """\ |
|
43 | description = """\ | |
44 | Setup Kallithea according to its configuration file. This is |
|
44 | Setup Kallithea according to its configuration file. This is | |
45 | the second part of a two-phase web application installation |
|
45 | the second part of a two-phase web application installation | |
46 | process (the first phase is prepare-app). The setup process |
|
46 | process (the first phase is prepare-app). The setup process | |
47 | consist of things like setting up databases, creating super user |
|
47 | consist of things like setting up databases, creating super user | |
48 | """ |
|
48 | """ | |
49 |
|
49 | |||
50 | parser = AbstractInstallCommand.standard_parser( |
|
50 | parser = AbstractInstallCommand.standard_parser( | |
51 | simulate=True, quiet=True, interactive=True) |
|
51 | simulate=True, quiet=True, interactive=True) | |
52 | parser.add_option('--user', |
|
52 | parser.add_option('--user', | |
53 | action='store', |
|
53 | action='store', | |
54 | dest='username', |
|
54 | dest='username', | |
55 | default=None, |
|
55 | default=None, | |
56 | help='Admin Username') |
|
56 | help='Admin Username') | |
57 | parser.add_option('--email', |
|
57 | parser.add_option('--email', | |
58 | action='store', |
|
58 | action='store', | |
59 | dest='email', |
|
59 | dest='email', | |
60 | default=None, |
|
60 | default=None, | |
61 | help='Admin Email') |
|
61 | help='Admin Email') | |
62 | parser.add_option('--password', |
|
62 | parser.add_option('--password', | |
63 | action='store', |
|
63 | action='store', | |
64 | dest='password', |
|
64 | dest='password', | |
65 | default=None, |
|
65 | default=None, | |
66 | help='Admin password min 6 chars') |
|
66 | help='Admin password min 6 chars') | |
67 | parser.add_option('--repos', |
|
67 | parser.add_option('--repos', | |
68 | action='store', |
|
68 | action='store', | |
69 | dest='repos_location', |
|
69 | dest='repos_location', | |
70 | default=None, |
|
70 | default=None, | |
71 | help='Absolute path to repositories location') |
|
71 | help='Absolute path to repositories location') | |
72 | parser.add_option('--name', |
|
72 | parser.add_option('--name', | |
73 | action='store', |
|
73 | action='store', | |
74 | dest='section_name', |
|
74 | dest='section_name', | |
75 | default=None, |
|
75 | default=None, | |
76 | help='The name of the section to set up (default: app:main)') |
|
76 | help='The name of the section to set up (default: app:main)') | |
77 | parser.add_option('--force-yes', |
|
77 | parser.add_option('--force-yes', | |
78 | action='store_true', |
|
78 | action='store_true', | |
79 | dest='force_ask', |
|
79 | dest='force_ask', | |
80 | default=None, |
|
80 | default=None, | |
81 | help='Force yes to every question') |
|
81 | help='Force yes to every question') | |
82 | parser.add_option('--force-no', |
|
82 | parser.add_option('--force-no', | |
83 | action='store_false', |
|
83 | action='store_false', | |
84 | dest='force_ask', |
|
84 | dest='force_ask', | |
85 | default=None, |
|
85 | default=None, | |
86 | help='Force no to every question') |
|
86 | help='Force no to every question') | |
87 | parser.add_option('--public-access', |
|
87 | parser.add_option('--public-access', | |
88 | action='store_true', |
|
88 | action='store_true', | |
89 | dest='public_access', |
|
89 | dest='public_access', | |
90 | default=None, |
|
90 | default=None, | |
91 | help='Enable public access on this installation (default)') |
|
91 | help='Enable public access on this installation (default)') | |
92 | parser.add_option('--no-public-access', |
|
92 | parser.add_option('--no-public-access', | |
93 | action='store_false', |
|
93 | action='store_false', | |
94 | dest='public_access', |
|
94 | dest='public_access', | |
95 | default=None, |
|
95 | default=None, | |
96 | help='Disable public access on this installation ') |
|
96 | help='Disable public access on this installation ') | |
97 |
|
97 | |||
98 | def command(self): |
|
98 | def command(self): | |
99 | config_spec = self.args[0] |
|
99 | config_spec = self.args[0] | |
100 | section = self.options.section_name |
|
100 | section = self.options.section_name | |
101 | if section is None: |
|
101 | if section is None: | |
102 | if '#' in config_spec: |
|
102 | if '#' in config_spec: | |
103 | config_spec, section = config_spec.split('#', 1) |
|
103 | config_spec, section = config_spec.split('#', 1) | |
104 | else: |
|
104 | else: | |
105 | section = 'main' |
|
105 | section = 'main' | |
106 | if not ':' in section: |
|
106 | if not ':' in section: | |
107 | plain_section = section |
|
107 | plain_section = section | |
108 | section = 'app:' + section |
|
108 | section = 'app:' + section | |
109 | else: |
|
109 | else: | |
110 | plain_section = section.split(':', 1)[0] |
|
110 | plain_section = section.split(':', 1)[0] | |
111 | if not config_spec.startswith('config:'): |
|
111 | if not config_spec.startswith('config:'): | |
112 | config_spec = 'config:' + config_spec |
|
112 | config_spec = 'config:' + config_spec | |
113 | if plain_section != 'main': |
|
113 | if plain_section != 'main': | |
114 | config_spec += '#' + plain_section |
|
114 | config_spec += '#' + plain_section | |
115 | config_file = config_spec[len('config:'):].split('#', 1)[0] |
|
115 | config_file = config_spec[len('config:'):].split('#', 1)[0] | |
116 | config_file = os.path.join(os.getcwd(), config_file) |
|
116 | config_file = os.path.join(os.getcwd(), config_file) | |
117 | self.logging_file_config(config_file) |
|
117 | self.logging_file_config(config_file) | |
118 | conf = appconfig(config_spec, relative_to=os.getcwd()) |
|
118 | conf = appconfig(config_spec, relative_to=os.getcwd()) | |
119 | ep_name = conf.context.entry_point_name |
|
119 | ep_name = conf.context.entry_point_name | |
120 | ep_group = conf.context.protocol |
|
120 | ep_group = conf.context.protocol | |
121 | dist = conf.context.distribution |
|
121 | dist = conf.context.distribution | |
122 | if dist is None: |
|
122 | if dist is None: | |
123 | raise BadCommand( |
|
123 | raise BadCommand( | |
124 | "The section %r is not the application (probably a filter). " |
|
124 | "The section %r is not the application (probably a filter). " | |
125 | "You should add #section_name, where section_name is the " |
|
125 | "You should add #section_name, where section_name is the " | |
126 | "section that configures your application" % plain_section) |
|
126 | "section that configures your application" % plain_section) | |
127 | installer = self.get_installer(dist, ep_group, ep_name) |
|
127 | installer = self.get_installer(dist, ep_group, ep_name) | |
128 | installer.setup_config( |
|
128 | installer.setup_config( | |
129 | self, config_file, section, self.sysconfig_install_vars(installer)) |
|
129 | self, config_file, section, self.sysconfig_install_vars(installer)) | |
130 | self.call_sysconfig_functions( |
|
130 | self.call_sysconfig_functions( | |
131 | 'post_setup_hook', installer, config_file) |
|
131 | 'post_setup_hook', installer, config_file) |
@@ -1,86 +1,86 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.update_repoinfo |
|
15 | kallithea.lib.paster_commands.update_repoinfo | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | update-repoinfo paster command for Kallithea |
|
18 | update-repoinfo paster command for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jul 14, 2012 |
|
22 | :created_on: Jul 14, 2012 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import string |
|
31 | import string | |
32 |
|
32 | |||
33 | from kallithea.lib.utils import BasePasterCommand |
|
33 | from kallithea.lib.utils import BasePasterCommand | |
34 | from kallithea.model.db import Repository |
|
34 | from kallithea.model.db import Repository | |
35 | from kallithea.model.repo import RepoModel |
|
35 | from kallithea.model.repo import RepoModel | |
36 | from kallithea.model.meta import Session |
|
36 | from kallithea.model.meta import Session | |
37 |
|
37 | |||
38 | # Add location of top level folder to sys.path |
|
38 | # Add location of top level folder to sys.path | |
39 |
from os.path import dirname |
|
39 | from os.path import dirname | |
40 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
40 | rc_path = dirname(dirname(dirname(os.path.realpath(__file__)))) | |
41 | sys.path.append(rc_path) |
|
41 | sys.path.append(rc_path) | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | class Command(BasePasterCommand): |
|
44 | class Command(BasePasterCommand): | |
45 |
|
45 | |||
46 | max_args = 1 |
|
46 | max_args = 1 | |
47 | min_args = 1 |
|
47 | min_args = 1 | |
48 |
|
48 | |||
49 | usage = "CONFIG_FILE" |
|
49 | usage = "CONFIG_FILE" | |
50 | group_name = "Kallithea" |
|
50 | group_name = "Kallithea" | |
51 | takes_config_file = -1 |
|
51 | takes_config_file = -1 | |
52 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
52 | parser = BasePasterCommand.standard_parser(verbose=True) | |
53 | summary = "Updates repositories caches for last changeset" |
|
53 | summary = "Updates repositories caches for last changeset" | |
54 |
|
54 | |||
55 | def command(self): |
|
55 | def command(self): | |
56 | #get SqlAlchemy session |
|
56 | #get SqlAlchemy session | |
57 | self._init_session() |
|
57 | self._init_session() | |
58 |
|
58 | |||
59 | repo_update_list = map(string.strip, |
|
59 | repo_update_list = map(string.strip, | |
60 | self.options.repo_update_list.split(',')) \ |
|
60 | self.options.repo_update_list.split(',')) \ | |
61 | if self.options.repo_update_list else None |
|
61 | if self.options.repo_update_list else None | |
62 |
|
62 | |||
63 | if repo_update_list is not None: |
|
63 | if repo_update_list is not None: | |
64 | repo_list = list(Repository.query() \ |
|
64 | repo_list = list(Repository.query() \ | |
65 | .filter(Repository.repo_name.in_(repo_update_list))) |
|
65 | .filter(Repository.repo_name.in_(repo_update_list))) | |
66 | else: |
|
66 | else: | |
67 | repo_list = Repository.getAll() |
|
67 | repo_list = Repository.getAll() | |
68 | RepoModel.update_repoinfo(repositories=repo_list) |
|
68 | RepoModel.update_repoinfo(repositories=repo_list) | |
69 | Session().commit() |
|
69 | Session().commit() | |
70 |
|
70 | |||
71 | if self.options.invalidate_cache: |
|
71 | if self.options.invalidate_cache: | |
72 | for r in repo_list: |
|
72 | for r in repo_list: | |
73 | r.set_invalidate() |
|
73 | r.set_invalidate() | |
74 | print 'Updated cache for %s repositories' % (len(repo_list)) |
|
74 | print 'Updated cache for %s repositories' % (len(repo_list)) | |
75 |
|
75 | |||
76 | def update_parser(self): |
|
76 | def update_parser(self): | |
77 | self.parser.add_option('--update-only', |
|
77 | self.parser.add_option('--update-only', | |
78 | action='store', |
|
78 | action='store', | |
79 | dest='repo_update_list', |
|
79 | dest='repo_update_list', | |
80 | help="Specifies a comma separated list of repositories " |
|
80 | help="Specifies a comma separated list of repositories " | |
81 | "to update last commit info for. OPTIONAL") |
|
81 | "to update last commit info for. OPTIONAL") | |
82 | self.parser.add_option('--invalidate-cache', |
|
82 | self.parser.add_option('--invalidate-cache', | |
83 | action='store_true', |
|
83 | action='store_true', | |
84 | dest='invalidate_cache', |
|
84 | dest='invalidate_cache', | |
85 | help="Trigger cache invalidation event for repos. " |
|
85 | help="Trigger cache invalidation event for repos. " | |
86 | "OPTIONAL") |
|
86 | "OPTIONAL") |
@@ -1,875 +1,875 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.utils |
|
15 | kallithea.lib.utils | |
16 | ~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Utilities library for Kallithea |
|
18 | Utilities library for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 18, 2010 |
|
22 | :created_on: Apr 18, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import re |
|
29 | import re | |
30 | import logging |
|
30 | import logging | |
31 | import datetime |
|
31 | import datetime | |
32 | import traceback |
|
32 | import traceback | |
33 | import paste |
|
33 | import paste | |
34 | import beaker |
|
34 | import beaker | |
35 | import tarfile |
|
35 | import tarfile | |
36 | import shutil |
|
36 | import shutil | |
37 | import decorator |
|
37 | import decorator | |
38 | import warnings |
|
38 | import warnings | |
39 | from os.path import abspath |
|
39 | from os.path import abspath | |
40 |
from os.path import dirname |
|
40 | from os.path import dirname | |
41 |
|
41 | |||
42 | from paste.script.command import Command, BadCommand |
|
42 | from paste.script.command import Command, BadCommand | |
43 |
|
43 | |||
44 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
44 | from webhelpers.text import collapse, remove_formatting, strip_tags | |
45 | from beaker.cache import _cache_decorate |
|
45 | from beaker.cache import _cache_decorate | |
46 |
|
46 | |||
47 | from kallithea import BRAND |
|
47 | from kallithea import BRAND | |
48 |
|
48 | |||
49 | from kallithea.lib.vcs.utils.hgcompat import ui, config |
|
49 | from kallithea.lib.vcs.utils.hgcompat import ui, config | |
50 | from kallithea.lib.vcs.utils.helpers import get_scm |
|
50 | from kallithea.lib.vcs.utils.helpers import get_scm | |
51 | from kallithea.lib.vcs.exceptions import VCSError |
|
51 | from kallithea.lib.vcs.exceptions import VCSError | |
52 |
|
52 | |||
53 | from kallithea.model import meta |
|
53 | from kallithea.model import meta | |
54 | from kallithea.model.db import Repository, User, Ui, \ |
|
54 | from kallithea.model.db import Repository, User, Ui, \ | |
55 | UserLog, RepoGroup, Setting, UserGroup |
|
55 | UserLog, RepoGroup, Setting, UserGroup | |
56 | from kallithea.model.meta import Session |
|
56 | from kallithea.model.meta import Session | |
57 | from kallithea.model.repo_group import RepoGroupModel |
|
57 | from kallithea.model.repo_group import RepoGroupModel | |
58 | from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser |
|
58 | from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser | |
59 | from kallithea.lib.vcs.utils.fakemod import create_module |
|
59 | from kallithea.lib.vcs.utils.fakemod import create_module | |
60 |
|
60 | |||
61 | log = logging.getLogger(__name__) |
|
61 | log = logging.getLogger(__name__) | |
62 |
|
62 | |||
63 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*') |
|
63 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*') | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def recursive_replace(str_, replace=' '): |
|
66 | def recursive_replace(str_, replace=' '): | |
67 | """ |
|
67 | """ | |
68 | Recursive replace of given sign to just one instance |
|
68 | Recursive replace of given sign to just one instance | |
69 |
|
69 | |||
70 | :param str_: given string |
|
70 | :param str_: given string | |
71 | :param replace: char to find and replace multiple instances |
|
71 | :param replace: char to find and replace multiple instances | |
72 |
|
72 | |||
73 | Examples:: |
|
73 | Examples:: | |
74 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
74 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
75 | 'Mighty-Mighty-Bo-sstones' |
|
75 | 'Mighty-Mighty-Bo-sstones' | |
76 | """ |
|
76 | """ | |
77 |
|
77 | |||
78 | if str_.find(replace * 2) == -1: |
|
78 | if str_.find(replace * 2) == -1: | |
79 | return str_ |
|
79 | return str_ | |
80 | else: |
|
80 | else: | |
81 | str_ = str_.replace(replace * 2, replace) |
|
81 | str_ = str_.replace(replace * 2, replace) | |
82 | return recursive_replace(str_, replace) |
|
82 | return recursive_replace(str_, replace) | |
83 |
|
83 | |||
84 |
|
84 | |||
85 | def repo_name_slug(value): |
|
85 | def repo_name_slug(value): | |
86 | """ |
|
86 | """ | |
87 | Return slug of name of repository |
|
87 | Return slug of name of repository | |
88 | This function is called on each creation/modification |
|
88 | This function is called on each creation/modification | |
89 | of repository to prevent bad names in repo |
|
89 | of repository to prevent bad names in repo | |
90 | """ |
|
90 | """ | |
91 |
|
91 | |||
92 | slug = remove_formatting(value) |
|
92 | slug = remove_formatting(value) | |
93 | slug = strip_tags(slug) |
|
93 | slug = strip_tags(slug) | |
94 |
|
94 | |||
95 | for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
95 | for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: | |
96 | slug = slug.replace(c, '-') |
|
96 | slug = slug.replace(c, '-') | |
97 | slug = recursive_replace(slug, '-') |
|
97 | slug = recursive_replace(slug, '-') | |
98 | slug = collapse(slug, '-') |
|
98 | slug = collapse(slug, '-') | |
99 | return slug |
|
99 | return slug | |
100 |
|
100 | |||
101 |
|
101 | |||
102 | #============================================================================== |
|
102 | #============================================================================== | |
103 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
103 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS | |
104 | #============================================================================== |
|
104 | #============================================================================== | |
105 | def get_repo_slug(request): |
|
105 | def get_repo_slug(request): | |
106 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
106 | _repo = request.environ['pylons.routes_dict'].get('repo_name') | |
107 | if _repo: |
|
107 | if _repo: | |
108 | _repo = _repo.rstrip('/') |
|
108 | _repo = _repo.rstrip('/') | |
109 | return _repo |
|
109 | return _repo | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | def get_repo_group_slug(request): |
|
112 | def get_repo_group_slug(request): | |
113 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
113 | _group = request.environ['pylons.routes_dict'].get('group_name') | |
114 | if _group: |
|
114 | if _group: | |
115 | _group = _group.rstrip('/') |
|
115 | _group = _group.rstrip('/') | |
116 | return _group |
|
116 | return _group | |
117 |
|
117 | |||
118 |
|
118 | |||
119 | def get_user_group_slug(request): |
|
119 | def get_user_group_slug(request): | |
120 | _group = request.environ['pylons.routes_dict'].get('id') |
|
120 | _group = request.environ['pylons.routes_dict'].get('id') | |
121 | _group = UserGroup.get(_group) |
|
121 | _group = UserGroup.get(_group) | |
122 | if _group: |
|
122 | if _group: | |
123 | return _group.users_group_name |
|
123 | return _group.users_group_name | |
124 | return None |
|
124 | return None | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def _extract_id_from_repo_name(repo_name): |
|
127 | def _extract_id_from_repo_name(repo_name): | |
128 | if repo_name.startswith('/'): |
|
128 | if repo_name.startswith('/'): | |
129 | repo_name = repo_name.lstrip('/') |
|
129 | repo_name = repo_name.lstrip('/') | |
130 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
130 | by_id_match = re.match(r'^_(\d{1,})', repo_name) | |
131 | if by_id_match: |
|
131 | if by_id_match: | |
132 | return by_id_match.groups()[0] |
|
132 | return by_id_match.groups()[0] | |
133 |
|
133 | |||
134 |
|
134 | |||
135 | def get_repo_by_id(repo_name): |
|
135 | def get_repo_by_id(repo_name): | |
136 | """ |
|
136 | """ | |
137 | Extracts repo_name by id from special urls. Example url is _11/repo_name |
|
137 | Extracts repo_name by id from special urls. Example url is _11/repo_name | |
138 |
|
138 | |||
139 | :param repo_name: |
|
139 | :param repo_name: | |
140 | :return: repo_name if matched else None |
|
140 | :return: repo_name if matched else None | |
141 | """ |
|
141 | """ | |
142 | _repo_id = _extract_id_from_repo_name(repo_name) |
|
142 | _repo_id = _extract_id_from_repo_name(repo_name) | |
143 | if _repo_id: |
|
143 | if _repo_id: | |
144 | from kallithea.model.db import Repository |
|
144 | from kallithea.model.db import Repository | |
145 | repo = Repository.get(_repo_id) |
|
145 | repo = Repository.get(_repo_id) | |
146 | if repo: |
|
146 | if repo: | |
147 | # TODO: return repo instead of reponame? or would that be a layering violation? |
|
147 | # TODO: return repo instead of reponame? or would that be a layering violation? | |
148 | return repo.repo_name |
|
148 | return repo.repo_name | |
149 | return None |
|
149 | return None | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): |
|
152 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): | |
153 | """ |
|
153 | """ | |
154 | Action logger for various actions made by users |
|
154 | Action logger for various actions made by users | |
155 |
|
155 | |||
156 | :param user: user that made this action, can be a unique username string or |
|
156 | :param user: user that made this action, can be a unique username string or | |
157 | object containing user_id attribute |
|
157 | object containing user_id attribute | |
158 | :param action: action to log, should be on of predefined unique actions for |
|
158 | :param action: action to log, should be on of predefined unique actions for | |
159 | easy translations |
|
159 | easy translations | |
160 | :param repo: string name of repository or object containing repo_id, |
|
160 | :param repo: string name of repository or object containing repo_id, | |
161 | that action was made on |
|
161 | that action was made on | |
162 | :param ipaddr: optional IP address from what the action was made |
|
162 | :param ipaddr: optional IP address from what the action was made | |
163 | :param sa: optional sqlalchemy session |
|
163 | :param sa: optional sqlalchemy session | |
164 |
|
164 | |||
165 | """ |
|
165 | """ | |
166 |
|
166 | |||
167 | if not sa: |
|
167 | if not sa: | |
168 | sa = meta.Session() |
|
168 | sa = meta.Session() | |
169 | # if we don't get explicit IP address try to get one from registered user |
|
169 | # if we don't get explicit IP address try to get one from registered user | |
170 | # in tmpl context var |
|
170 | # in tmpl context var | |
171 | if not ipaddr: |
|
171 | if not ipaddr: | |
172 | ipaddr = getattr(get_current_authuser(), 'ip_addr', '') |
|
172 | ipaddr = getattr(get_current_authuser(), 'ip_addr', '') | |
173 |
|
173 | |||
174 | if getattr(user, 'user_id', None): |
|
174 | if getattr(user, 'user_id', None): | |
175 | user_obj = User.get(user.user_id) |
|
175 | user_obj = User.get(user.user_id) | |
176 | elif isinstance(user, basestring): |
|
176 | elif isinstance(user, basestring): | |
177 | user_obj = User.get_by_username(user) |
|
177 | user_obj = User.get_by_username(user) | |
178 | else: |
|
178 | else: | |
179 | raise Exception('You have to provide a user object or a username') |
|
179 | raise Exception('You have to provide a user object or a username') | |
180 |
|
180 | |||
181 | if getattr(repo, 'repo_id', None): |
|
181 | if getattr(repo, 'repo_id', None): | |
182 | repo_obj = Repository.get(repo.repo_id) |
|
182 | repo_obj = Repository.get(repo.repo_id) | |
183 | repo_name = repo_obj.repo_name |
|
183 | repo_name = repo_obj.repo_name | |
184 | elif isinstance(repo, basestring): |
|
184 | elif isinstance(repo, basestring): | |
185 | repo_name = repo.lstrip('/') |
|
185 | repo_name = repo.lstrip('/') | |
186 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
186 | repo_obj = Repository.get_by_repo_name(repo_name) | |
187 | else: |
|
187 | else: | |
188 | repo_obj = None |
|
188 | repo_obj = None | |
189 | repo_name = u'' |
|
189 | repo_name = u'' | |
190 |
|
190 | |||
191 | user_log = UserLog() |
|
191 | user_log = UserLog() | |
192 | user_log.user_id = user_obj.user_id |
|
192 | user_log.user_id = user_obj.user_id | |
193 | user_log.username = user_obj.username |
|
193 | user_log.username = user_obj.username | |
194 | user_log.action = safe_unicode(action) |
|
194 | user_log.action = safe_unicode(action) | |
195 |
|
195 | |||
196 | user_log.repository = repo_obj |
|
196 | user_log.repository = repo_obj | |
197 | user_log.repository_name = repo_name |
|
197 | user_log.repository_name = repo_name | |
198 |
|
198 | |||
199 | user_log.action_date = datetime.datetime.now() |
|
199 | user_log.action_date = datetime.datetime.now() | |
200 | user_log.user_ip = ipaddr |
|
200 | user_log.user_ip = ipaddr | |
201 | sa.add(user_log) |
|
201 | sa.add(user_log) | |
202 |
|
202 | |||
203 | log.info('Logging action:%s on %s by user:%s ip:%s', |
|
203 | log.info('Logging action:%s on %s by user:%s ip:%s', | |
204 | action, safe_unicode(repo), user_obj, ipaddr) |
|
204 | action, safe_unicode(repo), user_obj, ipaddr) | |
205 | if commit: |
|
205 | if commit: | |
206 | sa.commit() |
|
206 | sa.commit() | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
209 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): | |
210 | """ |
|
210 | """ | |
211 | Scans given path for repos and return (name,(type,path)) tuple |
|
211 | Scans given path for repos and return (name,(type,path)) tuple | |
212 |
|
212 | |||
213 | :param path: path to scan for repositories |
|
213 | :param path: path to scan for repositories | |
214 | :param recursive: recursive search and return names with subdirs in front |
|
214 | :param recursive: recursive search and return names with subdirs in front | |
215 | """ |
|
215 | """ | |
216 |
|
216 | |||
217 | # remove ending slash for better results |
|
217 | # remove ending slash for better results | |
218 | path = safe_str(path.rstrip(os.sep)) |
|
218 | path = safe_str(path.rstrip(os.sep)) | |
219 | log.debug('now scanning in %s location recursive:%s...', path, recursive) |
|
219 | log.debug('now scanning in %s location recursive:%s...', path, recursive) | |
220 |
|
220 | |||
221 | def _get_repos(p): |
|
221 | def _get_repos(p): | |
222 | if not os.access(p, os.R_OK) or not os.access(p, os.X_OK): |
|
222 | if not os.access(p, os.R_OK) or not os.access(p, os.X_OK): | |
223 | log.warning('ignoring repo path without access: %s', p) |
|
223 | log.warning('ignoring repo path without access: %s', p) | |
224 | return |
|
224 | return | |
225 | if not os.access(p, os.W_OK): |
|
225 | if not os.access(p, os.W_OK): | |
226 | log.warning('repo path without write access: %s', p) |
|
226 | log.warning('repo path without write access: %s', p) | |
227 | for dirpath in os.listdir(p): |
|
227 | for dirpath in os.listdir(p): | |
228 | if os.path.isfile(os.path.join(p, dirpath)): |
|
228 | if os.path.isfile(os.path.join(p, dirpath)): | |
229 | continue |
|
229 | continue | |
230 | cur_path = os.path.join(p, dirpath) |
|
230 | cur_path = os.path.join(p, dirpath) | |
231 |
|
231 | |||
232 | # skip removed repos |
|
232 | # skip removed repos | |
233 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
233 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): | |
234 | continue |
|
234 | continue | |
235 |
|
235 | |||
236 | #skip .<something> dirs TODO: rly? then we should prevent creating them ... |
|
236 | #skip .<something> dirs TODO: rly? then we should prevent creating them ... | |
237 | if dirpath.startswith('.'): |
|
237 | if dirpath.startswith('.'): | |
238 | continue |
|
238 | continue | |
239 |
|
239 | |||
240 | try: |
|
240 | try: | |
241 | scm_info = get_scm(cur_path) |
|
241 | scm_info = get_scm(cur_path) | |
242 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
242 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info | |
243 | except VCSError: |
|
243 | except VCSError: | |
244 | if not recursive: |
|
244 | if not recursive: | |
245 | continue |
|
245 | continue | |
246 | #check if this dir contains other repos for recursive scan |
|
246 | #check if this dir contains other repos for recursive scan | |
247 | rec_path = os.path.join(p, dirpath) |
|
247 | rec_path = os.path.join(p, dirpath) | |
248 | if not os.path.islink(rec_path) and os.path.isdir(rec_path): |
|
248 | if not os.path.islink(rec_path) and os.path.isdir(rec_path): | |
249 | for inner_scm in _get_repos(rec_path): |
|
249 | for inner_scm in _get_repos(rec_path): | |
250 | yield inner_scm |
|
250 | yield inner_scm | |
251 |
|
251 | |||
252 | return _get_repos(path) |
|
252 | return _get_repos(path) | |
253 |
|
253 | |||
254 |
|
254 | |||
255 | def is_valid_repo(repo_name, base_path, scm=None): |
|
255 | def is_valid_repo(repo_name, base_path, scm=None): | |
256 | """ |
|
256 | """ | |
257 | Returns True if given path is a valid repository False otherwise. |
|
257 | Returns True if given path is a valid repository False otherwise. | |
258 | If scm param is given also compare if given scm is the same as expected |
|
258 | If scm param is given also compare if given scm is the same as expected | |
259 | from scm parameter |
|
259 | from scm parameter | |
260 |
|
260 | |||
261 | :param repo_name: |
|
261 | :param repo_name: | |
262 | :param base_path: |
|
262 | :param base_path: | |
263 | :param scm: |
|
263 | :param scm: | |
264 |
|
264 | |||
265 | :return True: if given path is a valid repository |
|
265 | :return True: if given path is a valid repository | |
266 | """ |
|
266 | """ | |
267 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
267 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) | |
268 |
|
268 | |||
269 | try: |
|
269 | try: | |
270 | scm_ = get_scm(full_path) |
|
270 | scm_ = get_scm(full_path) | |
271 | if scm: |
|
271 | if scm: | |
272 | return scm_[0] == scm |
|
272 | return scm_[0] == scm | |
273 | return True |
|
273 | return True | |
274 | except VCSError: |
|
274 | except VCSError: | |
275 | return False |
|
275 | return False | |
276 |
|
276 | |||
277 |
|
277 | |||
278 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
278 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): | |
279 | """ |
|
279 | """ | |
280 | Returns True if given path is a repository group False otherwise |
|
280 | Returns True if given path is a repository group False otherwise | |
281 |
|
281 | |||
282 | :param repo_name: |
|
282 | :param repo_name: | |
283 | :param base_path: |
|
283 | :param base_path: | |
284 | """ |
|
284 | """ | |
285 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
285 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) | |
286 |
|
286 | |||
287 | # check if it's not a repo |
|
287 | # check if it's not a repo | |
288 | if is_valid_repo(repo_group_name, base_path): |
|
288 | if is_valid_repo(repo_group_name, base_path): | |
289 | return False |
|
289 | return False | |
290 |
|
290 | |||
291 | try: |
|
291 | try: | |
292 | # we need to check bare git repos at higher level |
|
292 | # we need to check bare git repos at higher level | |
293 | # since we might match branches/hooks/info/objects or possible |
|
293 | # since we might match branches/hooks/info/objects or possible | |
294 | # other things inside bare git repo |
|
294 | # other things inside bare git repo | |
295 | get_scm(os.path.dirname(full_path)) |
|
295 | get_scm(os.path.dirname(full_path)) | |
296 | return False |
|
296 | return False | |
297 | except VCSError: |
|
297 | except VCSError: | |
298 | pass |
|
298 | pass | |
299 |
|
299 | |||
300 | # check if it's a valid path |
|
300 | # check if it's a valid path | |
301 | if skip_path_check or os.path.isdir(full_path): |
|
301 | if skip_path_check or os.path.isdir(full_path): | |
302 | return True |
|
302 | return True | |
303 |
|
303 | |||
304 | return False |
|
304 | return False | |
305 |
|
305 | |||
306 |
|
306 | |||
307 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
307 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): | |
308 | while True: |
|
308 | while True: | |
309 | ok = raw_input(prompt) |
|
309 | ok = raw_input(prompt) | |
310 | if ok in ('y', 'ye', 'yes'): |
|
310 | if ok in ('y', 'ye', 'yes'): | |
311 | return True |
|
311 | return True | |
312 | if ok in ('n', 'no', 'nop', 'nope'): |
|
312 | if ok in ('n', 'no', 'nop', 'nope'): | |
313 | return False |
|
313 | return False | |
314 | retries = retries - 1 |
|
314 | retries = retries - 1 | |
315 | if retries < 0: |
|
315 | if retries < 0: | |
316 | raise IOError |
|
316 | raise IOError | |
317 | print complaint |
|
317 | print complaint | |
318 |
|
318 | |||
319 | #propagated from mercurial documentation |
|
319 | #propagated from mercurial documentation | |
320 | ui_sections = ['alias', 'auth', |
|
320 | ui_sections = ['alias', 'auth', | |
321 | 'decode/encode', 'defaults', |
|
321 | 'decode/encode', 'defaults', | |
322 | 'diff', 'email', |
|
322 | 'diff', 'email', | |
323 | 'extensions', 'format', |
|
323 | 'extensions', 'format', | |
324 | 'merge-patterns', 'merge-tools', |
|
324 | 'merge-patterns', 'merge-tools', | |
325 | 'hooks', 'http_proxy', |
|
325 | 'hooks', 'http_proxy', | |
326 | 'smtp', 'patch', |
|
326 | 'smtp', 'patch', | |
327 | 'paths', 'profiling', |
|
327 | 'paths', 'profiling', | |
328 | 'server', 'trusted', |
|
328 | 'server', 'trusted', | |
329 | 'ui', 'web', ] |
|
329 | 'ui', 'web', ] | |
330 |
|
330 | |||
331 |
|
331 | |||
332 | def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True): |
|
332 | def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True): | |
333 | """ |
|
333 | """ | |
334 | A function that will read python rc files or database |
|
334 | A function that will read python rc files or database | |
335 | and make an mercurial ui object from read options |
|
335 | and make an mercurial ui object from read options | |
336 |
|
336 | |||
337 | :param path: path to mercurial config file |
|
337 | :param path: path to mercurial config file | |
338 | :param checkpaths: check the path |
|
338 | :param checkpaths: check the path | |
339 | :param read_from: read from 'file' or 'db' |
|
339 | :param read_from: read from 'file' or 'db' | |
340 | """ |
|
340 | """ | |
341 |
|
341 | |||
342 | baseui = ui.ui() |
|
342 | baseui = ui.ui() | |
343 |
|
343 | |||
344 | # clean the baseui object |
|
344 | # clean the baseui object | |
345 | baseui._ocfg = config.config() |
|
345 | baseui._ocfg = config.config() | |
346 | baseui._ucfg = config.config() |
|
346 | baseui._ucfg = config.config() | |
347 | baseui._tcfg = config.config() |
|
347 | baseui._tcfg = config.config() | |
348 |
|
348 | |||
349 | if read_from == 'file': |
|
349 | if read_from == 'file': | |
350 | if not os.path.isfile(path): |
|
350 | if not os.path.isfile(path): | |
351 | log.debug('hgrc file is not present at %s, skipping...', path) |
|
351 | log.debug('hgrc file is not present at %s, skipping...', path) | |
352 | return False |
|
352 | return False | |
353 | log.debug('reading hgrc from %s', path) |
|
353 | log.debug('reading hgrc from %s', path) | |
354 | cfg = config.config() |
|
354 | cfg = config.config() | |
355 | cfg.read(path) |
|
355 | cfg.read(path) | |
356 | for section in ui_sections: |
|
356 | for section in ui_sections: | |
357 | for k, v in cfg.items(section): |
|
357 | for k, v in cfg.items(section): | |
358 | log.debug('settings ui from file: [%s] %s=%s', section, k, v) |
|
358 | log.debug('settings ui from file: [%s] %s=%s', section, k, v) | |
359 | baseui.setconfig(safe_str(section), safe_str(k), safe_str(v)) |
|
359 | baseui.setconfig(safe_str(section), safe_str(k), safe_str(v)) | |
360 |
|
360 | |||
361 | elif read_from == 'db': |
|
361 | elif read_from == 'db': | |
362 | sa = meta.Session() |
|
362 | sa = meta.Session() | |
363 | ret = sa.query(Ui).all() |
|
363 | ret = sa.query(Ui).all() | |
364 |
|
364 | |||
365 | hg_ui = ret |
|
365 | hg_ui = ret | |
366 | for ui_ in hg_ui: |
|
366 | for ui_ in hg_ui: | |
367 | if ui_.ui_active: |
|
367 | if ui_.ui_active: | |
368 | ui_val = safe_str(ui_.ui_value) |
|
368 | ui_val = safe_str(ui_.ui_value) | |
369 | if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'): |
|
369 | if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'): | |
370 | ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.') |
|
370 | ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.') | |
371 | log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section, |
|
371 | log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section, | |
372 | ui_.ui_key, ui_val) |
|
372 | ui_.ui_key, ui_val) | |
373 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), |
|
373 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), | |
374 | ui_val) |
|
374 | ui_val) | |
375 | if ui_.ui_key == 'push_ssl': |
|
375 | if ui_.ui_key == 'push_ssl': | |
376 | # force set push_ssl requirement to False, kallithea |
|
376 | # force set push_ssl requirement to False, kallithea | |
377 | # handles that |
|
377 | # handles that | |
378 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), |
|
378 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), | |
379 | False) |
|
379 | False) | |
380 | if clear_session: |
|
380 | if clear_session: | |
381 | meta.Session.remove() |
|
381 | meta.Session.remove() | |
382 |
|
382 | |||
383 | # prevent interactive questions for ssh password / passphrase |
|
383 | # prevent interactive questions for ssh password / passphrase | |
384 | ssh = baseui.config('ui', 'ssh', default='ssh') |
|
384 | ssh = baseui.config('ui', 'ssh', default='ssh') | |
385 | baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh) |
|
385 | baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh) | |
386 |
|
386 | |||
387 | return baseui |
|
387 | return baseui | |
388 |
|
388 | |||
389 |
|
389 | |||
390 | def set_app_settings(config): |
|
390 | def set_app_settings(config): | |
391 | """ |
|
391 | """ | |
392 | Updates pylons config with new settings from database |
|
392 | Updates pylons config with new settings from database | |
393 |
|
393 | |||
394 | :param config: |
|
394 | :param config: | |
395 | """ |
|
395 | """ | |
396 | hgsettings = Setting.get_app_settings() |
|
396 | hgsettings = Setting.get_app_settings() | |
397 |
|
397 | |||
398 | for k, v in hgsettings.items(): |
|
398 | for k, v in hgsettings.items(): | |
399 | config[k] = v |
|
399 | config[k] = v | |
400 |
|
400 | |||
401 |
|
401 | |||
402 | def set_vcs_config(config): |
|
402 | def set_vcs_config(config): | |
403 | """ |
|
403 | """ | |
404 | Patch VCS config with some Kallithea specific stuff |
|
404 | Patch VCS config with some Kallithea specific stuff | |
405 |
|
405 | |||
406 | :param config: kallithea.CONFIG |
|
406 | :param config: kallithea.CONFIG | |
407 | """ |
|
407 | """ | |
408 | from kallithea.lib.vcs import conf |
|
408 | from kallithea.lib.vcs import conf | |
409 | from kallithea.lib.utils2 import aslist |
|
409 | from kallithea.lib.utils2 import aslist | |
410 | conf.settings.BACKENDS = { |
|
410 | conf.settings.BACKENDS = { | |
411 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', |
|
411 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', | |
412 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', |
|
412 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', | |
413 | } |
|
413 | } | |
414 |
|
414 | |||
415 | conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git') |
|
415 | conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git') | |
416 | conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip() |
|
416 | conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip() | |
417 | conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding', |
|
417 | conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding', | |
418 | 'utf8'), sep=',') |
|
418 | 'utf8'), sep=',') | |
419 |
|
419 | |||
420 |
|
420 | |||
421 | def set_indexer_config(config): |
|
421 | def set_indexer_config(config): | |
422 | """ |
|
422 | """ | |
423 | Update Whoosh index mapping |
|
423 | Update Whoosh index mapping | |
424 |
|
424 | |||
425 | :param config: kallithea.CONFIG |
|
425 | :param config: kallithea.CONFIG | |
426 | """ |
|
426 | """ | |
427 | from kallithea.config import conf |
|
427 | from kallithea.config import conf | |
428 |
|
428 | |||
429 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
429 | log.debug('adding extra into INDEX_EXTENSIONS') | |
430 | conf.INDEX_EXTENSIONS.extend(re.split('\s+', config.get('index.extensions', ''))) |
|
430 | conf.INDEX_EXTENSIONS.extend(re.split('\s+', config.get('index.extensions', ''))) | |
431 |
|
431 | |||
432 | log.debug('adding extra into INDEX_FILENAMES') |
|
432 | log.debug('adding extra into INDEX_FILENAMES') | |
433 | conf.INDEX_FILENAMES.extend(re.split('\s+', config.get('index.filenames', ''))) |
|
433 | conf.INDEX_FILENAMES.extend(re.split('\s+', config.get('index.filenames', ''))) | |
434 |
|
434 | |||
435 |
|
435 | |||
436 | def map_groups(path): |
|
436 | def map_groups(path): | |
437 | """ |
|
437 | """ | |
438 | Given a full path to a repository, create all nested groups that this |
|
438 | Given a full path to a repository, create all nested groups that this | |
439 | repo is inside. This function creates parent-child relationships between |
|
439 | repo is inside. This function creates parent-child relationships between | |
440 | groups and creates default perms for all new groups. |
|
440 | groups and creates default perms for all new groups. | |
441 |
|
441 | |||
442 | :param paths: full path to repository |
|
442 | :param paths: full path to repository | |
443 | """ |
|
443 | """ | |
444 | sa = meta.Session() |
|
444 | sa = meta.Session() | |
445 | groups = path.split(Repository.url_sep()) |
|
445 | groups = path.split(Repository.url_sep()) | |
446 | parent = None |
|
446 | parent = None | |
447 | group = None |
|
447 | group = None | |
448 |
|
448 | |||
449 | # last element is repo in nested groups structure |
|
449 | # last element is repo in nested groups structure | |
450 | groups = groups[:-1] |
|
450 | groups = groups[:-1] | |
451 | rgm = RepoGroupModel(sa) |
|
451 | rgm = RepoGroupModel(sa) | |
452 | owner = User.get_first_admin() |
|
452 | owner = User.get_first_admin() | |
453 | for lvl, group_name in enumerate(groups): |
|
453 | for lvl, group_name in enumerate(groups): | |
454 | group_name = u'/'.join(groups[:lvl] + [group_name]) |
|
454 | group_name = u'/'.join(groups[:lvl] + [group_name]) | |
455 | group = RepoGroup.get_by_group_name(group_name) |
|
455 | group = RepoGroup.get_by_group_name(group_name) | |
456 | desc = '%s group' % group_name |
|
456 | desc = '%s group' % group_name | |
457 |
|
457 | |||
458 | # skip folders that are now removed repos |
|
458 | # skip folders that are now removed repos | |
459 | if REMOVED_REPO_PAT.match(group_name): |
|
459 | if REMOVED_REPO_PAT.match(group_name): | |
460 | break |
|
460 | break | |
461 |
|
461 | |||
462 | if group is None: |
|
462 | if group is None: | |
463 | log.debug('creating group level: %s group_name: %s', |
|
463 | log.debug('creating group level: %s group_name: %s', | |
464 | lvl, group_name) |
|
464 | lvl, group_name) | |
465 | group = RepoGroup(group_name, parent) |
|
465 | group = RepoGroup(group_name, parent) | |
466 | group.group_description = desc |
|
466 | group.group_description = desc | |
467 | group.user = owner |
|
467 | group.user = owner | |
468 | sa.add(group) |
|
468 | sa.add(group) | |
469 | perm_obj = rgm._create_default_perms(group) |
|
469 | perm_obj = rgm._create_default_perms(group) | |
470 | sa.add(perm_obj) |
|
470 | sa.add(perm_obj) | |
471 | sa.flush() |
|
471 | sa.flush() | |
472 |
|
472 | |||
473 | parent = group |
|
473 | parent = group | |
474 | return group |
|
474 | return group | |
475 |
|
475 | |||
476 |
|
476 | |||
477 | def repo2db_mapper(initial_repo_list, remove_obsolete=False, |
|
477 | def repo2db_mapper(initial_repo_list, remove_obsolete=False, | |
478 | install_git_hooks=False, user=None, overwrite_git_hooks=False): |
|
478 | install_git_hooks=False, user=None, overwrite_git_hooks=False): | |
479 | """ |
|
479 | """ | |
480 | maps all repos given in initial_repo_list, non existing repositories |
|
480 | maps all repos given in initial_repo_list, non existing repositories | |
481 | are created, if remove_obsolete is True it also check for db entries |
|
481 | are created, if remove_obsolete is True it also check for db entries | |
482 | that are not in initial_repo_list and removes them. |
|
482 | that are not in initial_repo_list and removes them. | |
483 |
|
483 | |||
484 | :param initial_repo_list: list of repositories found by scanning methods |
|
484 | :param initial_repo_list: list of repositories found by scanning methods | |
485 | :param remove_obsolete: check for obsolete entries in database |
|
485 | :param remove_obsolete: check for obsolete entries in database | |
486 | :param install_git_hooks: if this is True, also check and install git hook |
|
486 | :param install_git_hooks: if this is True, also check and install git hook | |
487 | for a repo if missing |
|
487 | for a repo if missing | |
488 | :param overwrite_git_hooks: if this is True, overwrite any existing git hooks |
|
488 | :param overwrite_git_hooks: if this is True, overwrite any existing git hooks | |
489 | that may be encountered (even if user-deployed) |
|
489 | that may be encountered (even if user-deployed) | |
490 | """ |
|
490 | """ | |
491 | from kallithea.model.repo import RepoModel |
|
491 | from kallithea.model.repo import RepoModel | |
492 | from kallithea.model.scm import ScmModel |
|
492 | from kallithea.model.scm import ScmModel | |
493 | sa = meta.Session() |
|
493 | sa = meta.Session() | |
494 | repo_model = RepoModel() |
|
494 | repo_model = RepoModel() | |
495 | if user is None: |
|
495 | if user is None: | |
496 | user = User.get_first_admin() |
|
496 | user = User.get_first_admin() | |
497 | added = [] |
|
497 | added = [] | |
498 |
|
498 | |||
499 | ##creation defaults |
|
499 | ##creation defaults | |
500 | defs = Setting.get_default_repo_settings(strip_prefix=True) |
|
500 | defs = Setting.get_default_repo_settings(strip_prefix=True) | |
501 | enable_statistics = defs.get('repo_enable_statistics') |
|
501 | enable_statistics = defs.get('repo_enable_statistics') | |
502 | enable_locking = defs.get('repo_enable_locking') |
|
502 | enable_locking = defs.get('repo_enable_locking') | |
503 | enable_downloads = defs.get('repo_enable_downloads') |
|
503 | enable_downloads = defs.get('repo_enable_downloads') | |
504 | private = defs.get('repo_private') |
|
504 | private = defs.get('repo_private') | |
505 |
|
505 | |||
506 | for name, repo in initial_repo_list.items(): |
|
506 | for name, repo in initial_repo_list.items(): | |
507 | group = map_groups(name) |
|
507 | group = map_groups(name) | |
508 | unicode_name = safe_unicode(name) |
|
508 | unicode_name = safe_unicode(name) | |
509 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
509 | db_repo = repo_model.get_by_repo_name(unicode_name) | |
510 | # found repo that is on filesystem not in Kallithea database |
|
510 | # found repo that is on filesystem not in Kallithea database | |
511 | if not db_repo: |
|
511 | if not db_repo: | |
512 | log.info('repository %s not found, creating now', name) |
|
512 | log.info('repository %s not found, creating now', name) | |
513 | added.append(name) |
|
513 | added.append(name) | |
514 | desc = (repo.description |
|
514 | desc = (repo.description | |
515 | if repo.description != 'unknown' |
|
515 | if repo.description != 'unknown' | |
516 | else '%s repository' % name) |
|
516 | else '%s repository' % name) | |
517 |
|
517 | |||
518 | new_repo = repo_model._create_repo( |
|
518 | new_repo = repo_model._create_repo( | |
519 | repo_name=name, |
|
519 | repo_name=name, | |
520 | repo_type=repo.alias, |
|
520 | repo_type=repo.alias, | |
521 | description=desc, |
|
521 | description=desc, | |
522 | repo_group=getattr(group, 'group_id', None), |
|
522 | repo_group=getattr(group, 'group_id', None), | |
523 | owner=user, |
|
523 | owner=user, | |
524 | enable_locking=enable_locking, |
|
524 | enable_locking=enable_locking, | |
525 | enable_downloads=enable_downloads, |
|
525 | enable_downloads=enable_downloads, | |
526 | enable_statistics=enable_statistics, |
|
526 | enable_statistics=enable_statistics, | |
527 | private=private, |
|
527 | private=private, | |
528 | state=Repository.STATE_CREATED |
|
528 | state=Repository.STATE_CREATED | |
529 | ) |
|
529 | ) | |
530 | sa.commit() |
|
530 | sa.commit() | |
531 | # we added that repo just now, and make sure it has githook |
|
531 | # we added that repo just now, and make sure it has githook | |
532 | # installed, and updated server info |
|
532 | # installed, and updated server info | |
533 | if new_repo.repo_type == 'git': |
|
533 | if new_repo.repo_type == 'git': | |
534 | git_repo = new_repo.scm_instance |
|
534 | git_repo = new_repo.scm_instance | |
535 | ScmModel().install_git_hooks(git_repo) |
|
535 | ScmModel().install_git_hooks(git_repo) | |
536 | # update repository server-info |
|
536 | # update repository server-info | |
537 | log.debug('Running update server info') |
|
537 | log.debug('Running update server info') | |
538 | git_repo._update_server_info() |
|
538 | git_repo._update_server_info() | |
539 | new_repo.update_changeset_cache() |
|
539 | new_repo.update_changeset_cache() | |
540 | elif install_git_hooks: |
|
540 | elif install_git_hooks: | |
541 | if db_repo.repo_type == 'git': |
|
541 | if db_repo.repo_type == 'git': | |
542 | ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) |
|
542 | ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) | |
543 |
|
543 | |||
544 | removed = [] |
|
544 | removed = [] | |
545 | # remove from database those repositories that are not in the filesystem |
|
545 | # remove from database those repositories that are not in the filesystem | |
546 | unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list) |
|
546 | unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list) | |
547 | for repo in sa.query(Repository).all(): |
|
547 | for repo in sa.query(Repository).all(): | |
548 | if repo.repo_name not in unicode_initial_repo_list: |
|
548 | if repo.repo_name not in unicode_initial_repo_list: | |
549 | if remove_obsolete: |
|
549 | if remove_obsolete: | |
550 | log.debug("Removing non-existing repository found in db `%s`", |
|
550 | log.debug("Removing non-existing repository found in db `%s`", | |
551 | repo.repo_name) |
|
551 | repo.repo_name) | |
552 | try: |
|
552 | try: | |
553 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
553 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) | |
554 | sa.commit() |
|
554 | sa.commit() | |
555 | except Exception: |
|
555 | except Exception: | |
556 | #don't hold further removals on error |
|
556 | #don't hold further removals on error | |
557 | log.error(traceback.format_exc()) |
|
557 | log.error(traceback.format_exc()) | |
558 | sa.rollback() |
|
558 | sa.rollback() | |
559 | removed.append(repo.repo_name) |
|
559 | removed.append(repo.repo_name) | |
560 | return added, removed |
|
560 | return added, removed | |
561 |
|
561 | |||
562 |
|
562 | |||
563 | # set cache regions for beaker so celery can utilise it |
|
563 | # set cache regions for beaker so celery can utilise it | |
564 | def add_cache(settings): |
|
564 | def add_cache(settings): | |
565 | cache_settings = {'regions': None} |
|
565 | cache_settings = {'regions': None} | |
566 | for key in settings.keys(): |
|
566 | for key in settings.keys(): | |
567 | for prefix in ['beaker.cache.', 'cache.']: |
|
567 | for prefix in ['beaker.cache.', 'cache.']: | |
568 | if key.startswith(prefix): |
|
568 | if key.startswith(prefix): | |
569 | name = key.split(prefix)[1].strip() |
|
569 | name = key.split(prefix)[1].strip() | |
570 | cache_settings[name] = settings[key].strip() |
|
570 | cache_settings[name] = settings[key].strip() | |
571 | if cache_settings['regions']: |
|
571 | if cache_settings['regions']: | |
572 | for region in cache_settings['regions'].split(','): |
|
572 | for region in cache_settings['regions'].split(','): | |
573 | region = region.strip() |
|
573 | region = region.strip() | |
574 | region_settings = {} |
|
574 | region_settings = {} | |
575 | for key, value in cache_settings.items(): |
|
575 | for key, value in cache_settings.items(): | |
576 | if key.startswith(region): |
|
576 | if key.startswith(region): | |
577 | region_settings[key.split('.')[1]] = value |
|
577 | region_settings[key.split('.')[1]] = value | |
578 | region_settings['expire'] = int(region_settings.get('expire', |
|
578 | region_settings['expire'] = int(region_settings.get('expire', | |
579 | 60)) |
|
579 | 60)) | |
580 | region_settings.setdefault('lock_dir', |
|
580 | region_settings.setdefault('lock_dir', | |
581 | cache_settings.get('lock_dir')) |
|
581 | cache_settings.get('lock_dir')) | |
582 | region_settings.setdefault('data_dir', |
|
582 | region_settings.setdefault('data_dir', | |
583 | cache_settings.get('data_dir')) |
|
583 | cache_settings.get('data_dir')) | |
584 |
|
584 | |||
585 | if 'type' not in region_settings: |
|
585 | if 'type' not in region_settings: | |
586 | region_settings['type'] = cache_settings.get('type', |
|
586 | region_settings['type'] = cache_settings.get('type', | |
587 | 'memory') |
|
587 | 'memory') | |
588 | beaker.cache.cache_regions[region] = region_settings |
|
588 | beaker.cache.cache_regions[region] = region_settings | |
589 |
|
589 | |||
590 |
|
590 | |||
591 | def load_rcextensions(root_path): |
|
591 | def load_rcextensions(root_path): | |
592 | import kallithea |
|
592 | import kallithea | |
593 | from kallithea.config import conf |
|
593 | from kallithea.config import conf | |
594 |
|
594 | |||
595 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
595 | path = os.path.join(root_path, 'rcextensions', '__init__.py') | |
596 | if os.path.isfile(path): |
|
596 | if os.path.isfile(path): | |
597 | rcext = create_module('rc', path) |
|
597 | rcext = create_module('rc', path) | |
598 | EXT = kallithea.EXTENSIONS = rcext |
|
598 | EXT = kallithea.EXTENSIONS = rcext | |
599 | log.debug('Found rcextensions now loading %s...', rcext) |
|
599 | log.debug('Found rcextensions now loading %s...', rcext) | |
600 |
|
600 | |||
601 | # Additional mappings that are not present in the pygments lexers |
|
601 | # Additional mappings that are not present in the pygments lexers | |
602 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
602 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) | |
603 |
|
603 | |||
604 | #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) |
|
604 | #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) | |
605 |
|
605 | |||
606 | if getattr(EXT, 'INDEX_EXTENSIONS', []): |
|
606 | if getattr(EXT, 'INDEX_EXTENSIONS', []): | |
607 | log.debug('settings custom INDEX_EXTENSIONS') |
|
607 | log.debug('settings custom INDEX_EXTENSIONS') | |
608 | conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) |
|
608 | conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) | |
609 |
|
609 | |||
610 | #ADDITIONAL MAPPINGS |
|
610 | #ADDITIONAL MAPPINGS | |
611 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
611 | log.debug('adding extra into INDEX_EXTENSIONS') | |
612 | conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) |
|
612 | conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) | |
613 |
|
613 | |||
614 | # auto check if the module is not missing any data, set to default if is |
|
614 | # auto check if the module is not missing any data, set to default if is | |
615 | # this will help autoupdate new feature of rcext module |
|
615 | # this will help autoupdate new feature of rcext module | |
616 | #from kallithea.config import rcextensions |
|
616 | #from kallithea.config import rcextensions | |
617 | #for k in dir(rcextensions): |
|
617 | #for k in dir(rcextensions): | |
618 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
618 | # if not k.startswith('_') and not hasattr(EXT, k): | |
619 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
619 | # setattr(EXT, k, getattr(rcextensions, k)) | |
620 |
|
620 | |||
621 |
|
621 | |||
622 | def get_custom_lexer(extension): |
|
622 | def get_custom_lexer(extension): | |
623 | """ |
|
623 | """ | |
624 | returns a custom lexer if it's defined in rcextensions module, or None |
|
624 | returns a custom lexer if it's defined in rcextensions module, or None | |
625 | if there's no custom lexer defined |
|
625 | if there's no custom lexer defined | |
626 | """ |
|
626 | """ | |
627 | import kallithea |
|
627 | import kallithea | |
628 | from pygments import lexers |
|
628 | from pygments import lexers | |
629 | #check if we didn't define this extension as other lexer |
|
629 | #check if we didn't define this extension as other lexer | |
630 | if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS: |
|
630 | if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS: | |
631 | _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension] |
|
631 | _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension] | |
632 | return lexers.get_lexer_by_name(_lexer_name) |
|
632 | return lexers.get_lexer_by_name(_lexer_name) | |
633 |
|
633 | |||
634 |
|
634 | |||
635 | #============================================================================== |
|
635 | #============================================================================== | |
636 | # TEST FUNCTIONS AND CREATORS |
|
636 | # TEST FUNCTIONS AND CREATORS | |
637 | #============================================================================== |
|
637 | #============================================================================== | |
638 | def create_test_index(repo_location, config, full_index): |
|
638 | def create_test_index(repo_location, config, full_index): | |
639 | """ |
|
639 | """ | |
640 | Makes default test index |
|
640 | Makes default test index | |
641 |
|
641 | |||
642 | :param config: test config |
|
642 | :param config: test config | |
643 | :param full_index: |
|
643 | :param full_index: | |
644 | """ |
|
644 | """ | |
645 |
|
645 | |||
646 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon |
|
646 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon | |
647 | from kallithea.lib.pidlock import DaemonLock, LockHeld |
|
647 | from kallithea.lib.pidlock import DaemonLock, LockHeld | |
648 |
|
648 | |||
649 | repo_location = repo_location |
|
649 | repo_location = repo_location | |
650 |
|
650 | |||
651 | index_location = os.path.join(config['app_conf']['index_dir']) |
|
651 | index_location = os.path.join(config['app_conf']['index_dir']) | |
652 | if not os.path.exists(index_location): |
|
652 | if not os.path.exists(index_location): | |
653 | os.makedirs(index_location) |
|
653 | os.makedirs(index_location) | |
654 |
|
654 | |||
655 | try: |
|
655 | try: | |
656 | l = DaemonLock(file_=os.path.join(dn(index_location), 'make_index.lock')) |
|
656 | l = DaemonLock(file_=os.path.join(dirname(index_location), 'make_index.lock')) | |
657 | WhooshIndexingDaemon(index_location=index_location, |
|
657 | WhooshIndexingDaemon(index_location=index_location, | |
658 | repo_location=repo_location) \ |
|
658 | repo_location=repo_location) \ | |
659 | .run(full_index=full_index) |
|
659 | .run(full_index=full_index) | |
660 | l.release() |
|
660 | l.release() | |
661 | except LockHeld: |
|
661 | except LockHeld: | |
662 | pass |
|
662 | pass | |
663 |
|
663 | |||
664 |
|
664 | |||
665 | def create_test_env(repos_test_path, config): |
|
665 | def create_test_env(repos_test_path, config): | |
666 | """ |
|
666 | """ | |
667 | Makes a fresh database and |
|
667 | Makes a fresh database and | |
668 | install test repository into tmp dir |
|
668 | install test repository into tmp dir | |
669 | """ |
|
669 | """ | |
670 | from kallithea.lib.db_manage import DbManage |
|
670 | from kallithea.lib.db_manage import DbManage | |
671 | from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH |
|
671 | from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH | |
672 |
|
672 | |||
673 | # PART ONE create db |
|
673 | # PART ONE create db | |
674 | dbconf = config['sqlalchemy.db1.url'] |
|
674 | dbconf = config['sqlalchemy.db1.url'] | |
675 | log.debug('making test db %s', dbconf) |
|
675 | log.debug('making test db %s', dbconf) | |
676 |
|
676 | |||
677 | # create test dir if it doesn't exist |
|
677 | # create test dir if it doesn't exist | |
678 | if not os.path.isdir(repos_test_path): |
|
678 | if not os.path.isdir(repos_test_path): | |
679 | log.debug('Creating testdir %s', repos_test_path) |
|
679 | log.debug('Creating testdir %s', repos_test_path) | |
680 | os.makedirs(repos_test_path) |
|
680 | os.makedirs(repos_test_path) | |
681 |
|
681 | |||
682 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
682 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], | |
683 | tests=True) |
|
683 | tests=True) | |
684 | dbmanage.create_tables(override=True) |
|
684 | dbmanage.create_tables(override=True) | |
685 | # for tests dynamically set new root paths based on generated content |
|
685 | # for tests dynamically set new root paths based on generated content | |
686 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
686 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) | |
687 | dbmanage.create_default_user() |
|
687 | dbmanage.create_default_user() | |
688 | dbmanage.admin_prompt() |
|
688 | dbmanage.admin_prompt() | |
689 | dbmanage.create_permissions() |
|
689 | dbmanage.create_permissions() | |
690 | dbmanage.populate_default_permissions() |
|
690 | dbmanage.populate_default_permissions() | |
691 | Session().commit() |
|
691 | Session().commit() | |
692 | # PART TWO make test repo |
|
692 | # PART TWO make test repo | |
693 | log.debug('making test vcs repositories') |
|
693 | log.debug('making test vcs repositories') | |
694 |
|
694 | |||
695 | idx_path = config['app_conf']['index_dir'] |
|
695 | idx_path = config['app_conf']['index_dir'] | |
696 | data_path = config['app_conf']['cache_dir'] |
|
696 | data_path = config['app_conf']['cache_dir'] | |
697 |
|
697 | |||
698 | #clean index and data |
|
698 | #clean index and data | |
699 | if idx_path and os.path.exists(idx_path): |
|
699 | if idx_path and os.path.exists(idx_path): | |
700 | log.debug('remove %s', idx_path) |
|
700 | log.debug('remove %s', idx_path) | |
701 | shutil.rmtree(idx_path) |
|
701 | shutil.rmtree(idx_path) | |
702 |
|
702 | |||
703 | if data_path and os.path.exists(data_path): |
|
703 | if data_path and os.path.exists(data_path): | |
704 | log.debug('remove %s', data_path) |
|
704 | log.debug('remove %s', data_path) | |
705 | shutil.rmtree(data_path) |
|
705 | shutil.rmtree(data_path) | |
706 |
|
706 | |||
707 | #CREATE DEFAULT TEST REPOS |
|
707 | #CREATE DEFAULT TEST REPOS | |
708 | cur_dir = dn(dn(abspath(__file__))) |
|
708 | cur_dir = dirname(dirname(abspath(__file__))) | |
709 | tar = tarfile.open(os.path.join(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz")) |
|
709 | tar = tarfile.open(os.path.join(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz")) | |
710 | tar.extractall(os.path.join(TESTS_TMP_PATH, HG_REPO)) |
|
710 | tar.extractall(os.path.join(TESTS_TMP_PATH, HG_REPO)) | |
711 | tar.close() |
|
711 | tar.close() | |
712 |
|
712 | |||
713 | cur_dir = dn(dn(abspath(__file__))) |
|
713 | cur_dir = dirname(dirname(abspath(__file__))) | |
714 | tar = tarfile.open(os.path.join(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz")) |
|
714 | tar = tarfile.open(os.path.join(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz")) | |
715 | tar.extractall(os.path.join(TESTS_TMP_PATH, GIT_REPO)) |
|
715 | tar.extractall(os.path.join(TESTS_TMP_PATH, GIT_REPO)) | |
716 | tar.close() |
|
716 | tar.close() | |
717 |
|
717 | |||
718 | #LOAD VCS test stuff |
|
718 | #LOAD VCS test stuff | |
719 | from kallithea.tests.vcs import setup_package |
|
719 | from kallithea.tests.vcs import setup_package | |
720 | setup_package() |
|
720 | setup_package() | |
721 |
|
721 | |||
722 |
|
722 | |||
723 | #============================================================================== |
|
723 | #============================================================================== | |
724 | # PASTER COMMANDS |
|
724 | # PASTER COMMANDS | |
725 | #============================================================================== |
|
725 | #============================================================================== | |
726 | class BasePasterCommand(Command): |
|
726 | class BasePasterCommand(Command): | |
727 | """ |
|
727 | """ | |
728 | Abstract Base Class for paster commands. |
|
728 | Abstract Base Class for paster commands. | |
729 |
|
729 | |||
730 | The celery commands are somewhat aggressive about loading |
|
730 | The celery commands are somewhat aggressive about loading | |
731 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
731 | celery.conf, and since our module sets the `CELERY_LOADER` | |
732 | environment variable to our loader, we have to bootstrap a bit and |
|
732 | environment variable to our loader, we have to bootstrap a bit and | |
733 | make sure we've had a chance to load the pylons config off of the |
|
733 | make sure we've had a chance to load the pylons config off of the | |
734 | command line, otherwise everything fails. |
|
734 | command line, otherwise everything fails. | |
735 | """ |
|
735 | """ | |
736 | min_args = 1 |
|
736 | min_args = 1 | |
737 | min_args_error = "Please provide a paster config file as an argument." |
|
737 | min_args_error = "Please provide a paster config file as an argument." | |
738 | takes_config_file = 1 |
|
738 | takes_config_file = 1 | |
739 | requires_config_file = True |
|
739 | requires_config_file = True | |
740 |
|
740 | |||
741 | def run(self, args): |
|
741 | def run(self, args): | |
742 | """ |
|
742 | """ | |
743 | Overrides Command.run |
|
743 | Overrides Command.run | |
744 |
|
744 | |||
745 | Checks for a config file argument and loads it. |
|
745 | Checks for a config file argument and loads it. | |
746 | """ |
|
746 | """ | |
747 | if len(args) < self.min_args: |
|
747 | if len(args) < self.min_args: | |
748 | raise BadCommand( |
|
748 | raise BadCommand( | |
749 | self.min_args_error % {'min_args': self.min_args, |
|
749 | self.min_args_error % {'min_args': self.min_args, | |
750 | 'actual_args': len(args)}) |
|
750 | 'actual_args': len(args)}) | |
751 |
|
751 | |||
752 | # Decrement because we're going to lob off the first argument. |
|
752 | # Decrement because we're going to lob off the first argument. | |
753 | # @@ This is hacky |
|
753 | # @@ This is hacky | |
754 | self.min_args -= 1 |
|
754 | self.min_args -= 1 | |
755 | self.bootstrap_config(args[0]) |
|
755 | self.bootstrap_config(args[0]) | |
756 | self.update_parser() |
|
756 | self.update_parser() | |
757 | return super(BasePasterCommand, self).run(args[1:]) |
|
757 | return super(BasePasterCommand, self).run(args[1:]) | |
758 |
|
758 | |||
759 | def update_parser(self): |
|
759 | def update_parser(self): | |
760 | """ |
|
760 | """ | |
761 | Abstract method. Allows for the class's parser to be updated |
|
761 | Abstract method. Allows for the class's parser to be updated | |
762 | before the superclass's `run` method is called. Necessary to |
|
762 | before the superclass's `run` method is called. Necessary to | |
763 | allow options/arguments to be passed through to the underlying |
|
763 | allow options/arguments to be passed through to the underlying | |
764 | celery command. |
|
764 | celery command. | |
765 | """ |
|
765 | """ | |
766 | raise NotImplementedError("Abstract Method.") |
|
766 | raise NotImplementedError("Abstract Method.") | |
767 |
|
767 | |||
768 | def bootstrap_config(self, conf): |
|
768 | def bootstrap_config(self, conf): | |
769 | """ |
|
769 | """ | |
770 | Loads the pylons configuration. |
|
770 | Loads the pylons configuration. | |
771 | """ |
|
771 | """ | |
772 | from pylons import config as pylonsconfig |
|
772 | from pylons import config as pylonsconfig | |
773 |
|
773 | |||
774 | self.path_to_ini_file = os.path.realpath(conf) |
|
774 | self.path_to_ini_file = os.path.realpath(conf) | |
775 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) |
|
775 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) | |
776 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
776 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) | |
777 |
|
777 | |||
778 | def _init_session(self): |
|
778 | def _init_session(self): | |
779 | """ |
|
779 | """ | |
780 | Inits SqlAlchemy Session |
|
780 | Inits SqlAlchemy Session | |
781 | """ |
|
781 | """ | |
782 | logging.config.fileConfig(self.path_to_ini_file) |
|
782 | logging.config.fileConfig(self.path_to_ini_file) | |
783 |
|
783 | |||
784 | from pylons import config |
|
784 | from pylons import config | |
785 | from kallithea.model import init_model |
|
785 | from kallithea.model import init_model | |
786 | from kallithea.lib.utils2 import engine_from_config |
|
786 | from kallithea.lib.utils2 import engine_from_config | |
787 | add_cache(config) |
|
787 | add_cache(config) | |
788 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
788 | engine = engine_from_config(config, 'sqlalchemy.db1.') | |
789 | init_model(engine) |
|
789 | init_model(engine) | |
790 |
|
790 | |||
791 |
|
791 | |||
792 | def check_git_version(): |
|
792 | def check_git_version(): | |
793 | """ |
|
793 | """ | |
794 | Checks what version of git is installed in system, and issues a warning |
|
794 | Checks what version of git is installed in system, and issues a warning | |
795 | if it's too old for Kallithea to work properly. |
|
795 | if it's too old for Kallithea to work properly. | |
796 | """ |
|
796 | """ | |
797 | from kallithea import BACKENDS |
|
797 | from kallithea import BACKENDS | |
798 | from kallithea.lib.vcs.backends.git.repository import GitRepository |
|
798 | from kallithea.lib.vcs.backends.git.repository import GitRepository | |
799 | from kallithea.lib.vcs.conf import settings |
|
799 | from kallithea.lib.vcs.conf import settings | |
800 | from distutils.version import StrictVersion |
|
800 | from distutils.version import StrictVersion | |
801 |
|
801 | |||
802 | if 'git' not in BACKENDS: |
|
802 | if 'git' not in BACKENDS: | |
803 | return None |
|
803 | return None | |
804 |
|
804 | |||
805 | stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True, |
|
805 | stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True, | |
806 | _safe=True) |
|
806 | _safe=True) | |
807 |
|
807 | |||
808 | m = re.search("\d+.\d+.\d+", stdout) |
|
808 | m = re.search("\d+.\d+.\d+", stdout) | |
809 | if m: |
|
809 | if m: | |
810 | ver = StrictVersion(m.group(0)) |
|
810 | ver = StrictVersion(m.group(0)) | |
811 | else: |
|
811 | else: | |
812 | ver = StrictVersion('0.0.0') |
|
812 | ver = StrictVersion('0.0.0') | |
813 |
|
813 | |||
814 | req_ver = StrictVersion('1.7.4') |
|
814 | req_ver = StrictVersion('1.7.4') | |
815 |
|
815 | |||
816 | log.debug('Git executable: "%s" version %s detected: %s', |
|
816 | log.debug('Git executable: "%s" version %s detected: %s', | |
817 | settings.GIT_EXECUTABLE_PATH, ver, stdout) |
|
817 | settings.GIT_EXECUTABLE_PATH, ver, stdout) | |
818 | if stderr: |
|
818 | if stderr: | |
819 | log.warning('Error detecting git version: %r', stderr) |
|
819 | log.warning('Error detecting git version: %r', stderr) | |
820 | elif ver < req_ver: |
|
820 | elif ver < req_ver: | |
821 | log.warning('Kallithea detected git version %s, which is too old ' |
|
821 | log.warning('Kallithea detected git version %s, which is too old ' | |
822 | 'for the system to function properly. ' |
|
822 | 'for the system to function properly. ' | |
823 | 'Please upgrade to version %s or later.' % (ver, req_ver)) |
|
823 | 'Please upgrade to version %s or later.' % (ver, req_ver)) | |
824 | return ver |
|
824 | return ver | |
825 |
|
825 | |||
826 |
|
826 | |||
827 | @decorator.decorator |
|
827 | @decorator.decorator | |
828 | def jsonify(func, *args, **kwargs): |
|
828 | def jsonify(func, *args, **kwargs): | |
829 | """Action decorator that formats output for JSON |
|
829 | """Action decorator that formats output for JSON | |
830 |
|
830 | |||
831 | Given a function that will return content, this decorator will turn |
|
831 | Given a function that will return content, this decorator will turn | |
832 | the result into JSON, with a content-type of 'application/json' and |
|
832 | the result into JSON, with a content-type of 'application/json' and | |
833 | output it. |
|
833 | output it. | |
834 |
|
834 | |||
835 | """ |
|
835 | """ | |
836 | from pylons.decorators.util import get_pylons |
|
836 | from pylons.decorators.util import get_pylons | |
837 | from kallithea.lib.compat import json |
|
837 | from kallithea.lib.compat import json | |
838 | pylons = get_pylons(args) |
|
838 | pylons = get_pylons(args) | |
839 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
839 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' | |
840 | data = func(*args, **kwargs) |
|
840 | data = func(*args, **kwargs) | |
841 | if isinstance(data, (list, tuple)): |
|
841 | if isinstance(data, (list, tuple)): | |
842 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
842 | msg = "JSON responses with Array envelopes are susceptible to " \ | |
843 | "cross-site data leak attacks, see " \ |
|
843 | "cross-site data leak attacks, see " \ | |
844 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
844 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" | |
845 | warnings.warn(msg, Warning, 2) |
|
845 | warnings.warn(msg, Warning, 2) | |
846 | log.warning(msg) |
|
846 | log.warning(msg) | |
847 | log.debug("Returning JSON wrapped action output") |
|
847 | log.debug("Returning JSON wrapped action output") | |
848 | return json.dumps(data, encoding='utf-8') |
|
848 | return json.dumps(data, encoding='utf-8') | |
849 |
|
849 | |||
850 |
|
850 | |||
851 | def conditional_cache(region, prefix, condition, func): |
|
851 | def conditional_cache(region, prefix, condition, func): | |
852 | """ |
|
852 | """ | |
853 |
|
853 | |||
854 | Conditional caching function use like:: |
|
854 | Conditional caching function use like:: | |
855 | def _c(arg): |
|
855 | def _c(arg): | |
856 | #heavy computation function |
|
856 | #heavy computation function | |
857 | return data |
|
857 | return data | |
858 |
|
858 | |||
859 | # depending from condition the compute is wrapped in cache or not |
|
859 | # depending from condition the compute is wrapped in cache or not | |
860 | compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func) |
|
860 | compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func) | |
861 | return compute(arg) |
|
861 | return compute(arg) | |
862 |
|
862 | |||
863 | :param region: name of cache region |
|
863 | :param region: name of cache region | |
864 | :param prefix: cache region prefix |
|
864 | :param prefix: cache region prefix | |
865 | :param condition: condition for cache to be triggered, and return data cached |
|
865 | :param condition: condition for cache to be triggered, and return data cached | |
866 | :param func: wrapped heavy function to compute |
|
866 | :param func: wrapped heavy function to compute | |
867 |
|
867 | |||
868 | """ |
|
868 | """ | |
869 | wrapped = func |
|
869 | wrapped = func | |
870 | if condition: |
|
870 | if condition: | |
871 | log.debug('conditional_cache: True, wrapping call of ' |
|
871 | log.debug('conditional_cache: True, wrapping call of ' | |
872 | 'func: %s into %s region cache' % (region, func)) |
|
872 | 'func: %s into %s region cache' % (region, func)) | |
873 | wrapped = _cache_decorate((prefix,), None, None, region)(func) |
|
873 | wrapped = _cache_decorate((prefix,), None, None, region)(func) | |
874 |
|
874 | |||
875 | return wrapped |
|
875 | return wrapped |
@@ -1,299 +1,299 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 |
|
14 | |||
15 | """ |
|
15 | """ | |
16 | Helpers for fixture generation |
|
16 | Helpers for fixture generation | |
17 | """ |
|
17 | """ | |
18 | import os |
|
18 | import os | |
19 | import time |
|
19 | import time | |
20 | from kallithea.tests import * |
|
20 | from kallithea.tests import * | |
21 | from kallithea.model.db import Repository, User, RepoGroup, UserGroup |
|
21 | from kallithea.model.db import Repository, User, RepoGroup, UserGroup | |
22 | from kallithea.model.meta import Session |
|
22 | from kallithea.model.meta import Session | |
23 | from kallithea.model.repo import RepoModel |
|
23 | from kallithea.model.repo import RepoModel | |
24 | from kallithea.model.user import UserModel |
|
24 | from kallithea.model.user import UserModel | |
25 | from kallithea.model.repo_group import RepoGroupModel |
|
25 | from kallithea.model.repo_group import RepoGroupModel | |
26 | from kallithea.model.user_group import UserGroupModel |
|
26 | from kallithea.model.user_group import UserGroupModel | |
27 | from kallithea.model.gist import GistModel |
|
27 | from kallithea.model.gist import GistModel | |
28 | from kallithea.model.scm import ScmModel |
|
28 | from kallithea.model.scm import ScmModel | |
29 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
29 | from kallithea.lib.vcs.backends.base import EmptyChangeset | |
|
30 | from os.path import dirname | |||
30 |
|
31 | |||
31 | dn = os.path.dirname |
|
32 | FIXTURES = os.path.join(dirname(dirname(os.path.abspath(__file__))), 'tests', 'fixtures') | |
32 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
|||
33 |
|
33 | |||
34 |
|
34 | |||
35 | def error_function(*args, **kwargs): |
|
35 | def error_function(*args, **kwargs): | |
36 | raise Exception('Total Crash !') |
|
36 | raise Exception('Total Crash !') | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | class Fixture(object): |
|
39 | class Fixture(object): | |
40 |
|
40 | |||
41 | def __init__(self): |
|
41 | def __init__(self): | |
42 | pass |
|
42 | pass | |
43 |
|
43 | |||
44 | def anon_access(self, status): |
|
44 | def anon_access(self, status): | |
45 | """ |
|
45 | """ | |
46 | Context manager for controlling anonymous access. |
|
46 | Context manager for controlling anonymous access. | |
47 | Anon access will be set and committed, but restored again when exiting the block. |
|
47 | Anon access will be set and committed, but restored again when exiting the block. | |
48 |
|
48 | |||
49 | Usage: |
|
49 | Usage: | |
50 |
|
50 | |||
51 | fixture = Fixture() |
|
51 | fixture = Fixture() | |
52 | with fixture.anon_access(False): |
|
52 | with fixture.anon_access(False): | |
53 | stuff |
|
53 | stuff | |
54 | """ |
|
54 | """ | |
55 |
|
55 | |||
56 | class context(object): |
|
56 | class context(object): | |
57 | def __enter__(self): |
|
57 | def __enter__(self): | |
58 | anon = User.get_default_user() |
|
58 | anon = User.get_default_user() | |
59 | self._before = anon.active |
|
59 | self._before = anon.active | |
60 | anon.active = status |
|
60 | anon.active = status | |
61 | Session().add(anon) |
|
61 | Session().add(anon) | |
62 | Session().commit() |
|
62 | Session().commit() | |
63 | time.sleep(1.5) # hack: wait for beaker sql_cache_short to expire |
|
63 | time.sleep(1.5) # hack: wait for beaker sql_cache_short to expire | |
64 |
|
64 | |||
65 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
65 | def __exit__(self, exc_type, exc_val, exc_tb): | |
66 | anon = User.get_default_user() |
|
66 | anon = User.get_default_user() | |
67 | anon.active = self._before |
|
67 | anon.active = self._before | |
68 | Session().add(anon) |
|
68 | Session().add(anon) | |
69 | Session().commit() |
|
69 | Session().commit() | |
70 |
|
70 | |||
71 | return context() |
|
71 | return context() | |
72 |
|
72 | |||
73 | def _get_repo_create_params(self, **custom): |
|
73 | def _get_repo_create_params(self, **custom): | |
74 | defs = dict( |
|
74 | defs = dict( | |
75 | repo_name=None, |
|
75 | repo_name=None, | |
76 | repo_type='hg', |
|
76 | repo_type='hg', | |
77 | clone_uri='', |
|
77 | clone_uri='', | |
78 | repo_group=u'-1', |
|
78 | repo_group=u'-1', | |
79 | repo_description=u'DESC', |
|
79 | repo_description=u'DESC', | |
80 | repo_private=False, |
|
80 | repo_private=False, | |
81 | repo_landing_rev='rev:tip', |
|
81 | repo_landing_rev='rev:tip', | |
82 | repo_copy_permissions=False, |
|
82 | repo_copy_permissions=False, | |
83 | repo_state=Repository.STATE_CREATED, |
|
83 | repo_state=Repository.STATE_CREATED, | |
84 | ) |
|
84 | ) | |
85 | defs.update(custom) |
|
85 | defs.update(custom) | |
86 | if 'repo_name_full' not in custom: |
|
86 | if 'repo_name_full' not in custom: | |
87 | defs.update({'repo_name_full': defs['repo_name']}) |
|
87 | defs.update({'repo_name_full': defs['repo_name']}) | |
88 |
|
88 | |||
89 | # fix the repo name if passed as repo_name_full |
|
89 | # fix the repo name if passed as repo_name_full | |
90 | if defs['repo_name']: |
|
90 | if defs['repo_name']: | |
91 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
91 | defs['repo_name'] = defs['repo_name'].split('/')[-1] | |
92 |
|
92 | |||
93 | return defs |
|
93 | return defs | |
94 |
|
94 | |||
95 | def _get_group_create_params(self, **custom): |
|
95 | def _get_group_create_params(self, **custom): | |
96 | defs = dict( |
|
96 | defs = dict( | |
97 | group_name=None, |
|
97 | group_name=None, | |
98 | group_description=u'DESC', |
|
98 | group_description=u'DESC', | |
99 | group_parent_id=None, |
|
99 | group_parent_id=None, | |
100 | perms_updates=[], |
|
100 | perms_updates=[], | |
101 | perms_new=[], |
|
101 | perms_new=[], | |
102 | enable_locking=False, |
|
102 | enable_locking=False, | |
103 | recursive=False |
|
103 | recursive=False | |
104 | ) |
|
104 | ) | |
105 | defs.update(custom) |
|
105 | defs.update(custom) | |
106 |
|
106 | |||
107 | return defs |
|
107 | return defs | |
108 |
|
108 | |||
109 | def _get_user_create_params(self, name, **custom): |
|
109 | def _get_user_create_params(self, name, **custom): | |
110 | defs = dict( |
|
110 | defs = dict( | |
111 | username=name, |
|
111 | username=name, | |
112 | password='qweqwe', |
|
112 | password='qweqwe', | |
113 | email='%s+test@example.com' % name, |
|
113 | email='%s+test@example.com' % name, | |
114 | firstname=u'TestUser', |
|
114 | firstname=u'TestUser', | |
115 | lastname=u'Test', |
|
115 | lastname=u'Test', | |
116 | active=True, |
|
116 | active=True, | |
117 | admin=False, |
|
117 | admin=False, | |
118 | extern_type='internal', |
|
118 | extern_type='internal', | |
119 | extern_name=None |
|
119 | extern_name=None | |
120 | ) |
|
120 | ) | |
121 | defs.update(custom) |
|
121 | defs.update(custom) | |
122 |
|
122 | |||
123 | return defs |
|
123 | return defs | |
124 |
|
124 | |||
125 | def _get_user_group_create_params(self, name, **custom): |
|
125 | def _get_user_group_create_params(self, name, **custom): | |
126 | defs = dict( |
|
126 | defs = dict( | |
127 | users_group_name=name, |
|
127 | users_group_name=name, | |
128 | user_group_description=u'DESC', |
|
128 | user_group_description=u'DESC', | |
129 | users_group_active=True, |
|
129 | users_group_active=True, | |
130 | user_group_data={}, |
|
130 | user_group_data={}, | |
131 | ) |
|
131 | ) | |
132 | defs.update(custom) |
|
132 | defs.update(custom) | |
133 |
|
133 | |||
134 | return defs |
|
134 | return defs | |
135 |
|
135 | |||
136 | def create_repo(self, name, **kwargs): |
|
136 | def create_repo(self, name, **kwargs): | |
137 | if 'skip_if_exists' in kwargs: |
|
137 | if 'skip_if_exists' in kwargs: | |
138 | del kwargs['skip_if_exists'] |
|
138 | del kwargs['skip_if_exists'] | |
139 | r = Repository.get_by_repo_name(name) |
|
139 | r = Repository.get_by_repo_name(name) | |
140 | if r: |
|
140 | if r: | |
141 | return r |
|
141 | return r | |
142 |
|
142 | |||
143 | if isinstance(kwargs.get('repo_group'), RepoGroup): |
|
143 | if isinstance(kwargs.get('repo_group'), RepoGroup): | |
144 | kwargs['repo_group'] = kwargs['repo_group'].group_id |
|
144 | kwargs['repo_group'] = kwargs['repo_group'].group_id | |
145 |
|
145 | |||
146 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
146 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) | |
147 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
147 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
148 | RepoModel().create(form_data, cur_user) |
|
148 | RepoModel().create(form_data, cur_user) | |
149 | Session().commit() |
|
149 | Session().commit() | |
150 | ScmModel().mark_for_invalidation(name) |
|
150 | ScmModel().mark_for_invalidation(name) | |
151 | return Repository.get_by_repo_name(name) |
|
151 | return Repository.get_by_repo_name(name) | |
152 |
|
152 | |||
153 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
153 | def create_fork(self, repo_to_fork, fork_name, **kwargs): | |
154 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
154 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) | |
155 |
|
155 | |||
156 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
156 | form_data = self._get_repo_create_params(repo_name=fork_name, | |
157 | fork_parent_id=repo_to_fork, |
|
157 | fork_parent_id=repo_to_fork, | |
158 | repo_type=repo_to_fork.repo_type, |
|
158 | repo_type=repo_to_fork.repo_type, | |
159 | **kwargs) |
|
159 | **kwargs) | |
160 | form_data['update_after_clone'] = False |
|
160 | form_data['update_after_clone'] = False | |
161 |
|
161 | |||
162 | #TODO: fix it !! |
|
162 | #TODO: fix it !! | |
163 | form_data['description'] = form_data['repo_description'] |
|
163 | form_data['description'] = form_data['repo_description'] | |
164 | form_data['private'] = form_data['repo_private'] |
|
164 | form_data['private'] = form_data['repo_private'] | |
165 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
165 | form_data['landing_rev'] = form_data['repo_landing_rev'] | |
166 |
|
166 | |||
167 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
167 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
168 | RepoModel().create_fork(form_data, cur_user=owner) |
|
168 | RepoModel().create_fork(form_data, cur_user=owner) | |
169 | Session().commit() |
|
169 | Session().commit() | |
170 | ScmModel().mark_for_invalidation(fork_name) |
|
170 | ScmModel().mark_for_invalidation(fork_name) | |
171 | r = Repository.get_by_repo_name(fork_name) |
|
171 | r = Repository.get_by_repo_name(fork_name) | |
172 | assert r |
|
172 | assert r | |
173 | return r |
|
173 | return r | |
174 |
|
174 | |||
175 | def destroy_repo(self, repo_name, **kwargs): |
|
175 | def destroy_repo(self, repo_name, **kwargs): | |
176 | RepoModel().delete(repo_name, **kwargs) |
|
176 | RepoModel().delete(repo_name, **kwargs) | |
177 | Session().commit() |
|
177 | Session().commit() | |
178 |
|
178 | |||
179 | def create_repo_group(self, name, **kwargs): |
|
179 | def create_repo_group(self, name, **kwargs): | |
180 | if 'skip_if_exists' in kwargs: |
|
180 | if 'skip_if_exists' in kwargs: | |
181 | del kwargs['skip_if_exists'] |
|
181 | del kwargs['skip_if_exists'] | |
182 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
182 | gr = RepoGroup.get_by_group_name(group_name=name) | |
183 | if gr: |
|
183 | if gr: | |
184 | return gr |
|
184 | return gr | |
185 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
185 | form_data = self._get_group_create_params(group_name=name, **kwargs) | |
186 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
186 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
187 | gr = RepoGroupModel().create( |
|
187 | gr = RepoGroupModel().create( | |
188 | group_name=form_data['group_name'], |
|
188 | group_name=form_data['group_name'], | |
189 | group_description=form_data['group_name'], |
|
189 | group_description=form_data['group_name'], | |
190 | owner=owner, parent=form_data['group_parent_id']) |
|
190 | owner=owner, parent=form_data['group_parent_id']) | |
191 | Session().commit() |
|
191 | Session().commit() | |
192 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
192 | gr = RepoGroup.get_by_group_name(gr.group_name) | |
193 | return gr |
|
193 | return gr | |
194 |
|
194 | |||
195 | def destroy_repo_group(self, repogroupid): |
|
195 | def destroy_repo_group(self, repogroupid): | |
196 | RepoGroupModel().delete(repogroupid) |
|
196 | RepoGroupModel().delete(repogroupid) | |
197 | Session().commit() |
|
197 | Session().commit() | |
198 |
|
198 | |||
199 | def create_user(self, name, **kwargs): |
|
199 | def create_user(self, name, **kwargs): | |
200 | if 'skip_if_exists' in kwargs: |
|
200 | if 'skip_if_exists' in kwargs: | |
201 | del kwargs['skip_if_exists'] |
|
201 | del kwargs['skip_if_exists'] | |
202 | user = User.get_by_username(name) |
|
202 | user = User.get_by_username(name) | |
203 | if user: |
|
203 | if user: | |
204 | return user |
|
204 | return user | |
205 | form_data = self._get_user_create_params(name, **kwargs) |
|
205 | form_data = self._get_user_create_params(name, **kwargs) | |
206 | user = UserModel().create(form_data) |
|
206 | user = UserModel().create(form_data) | |
207 | Session().commit() |
|
207 | Session().commit() | |
208 | user = User.get_by_username(user.username) |
|
208 | user = User.get_by_username(user.username) | |
209 | return user |
|
209 | return user | |
210 |
|
210 | |||
211 | def destroy_user(self, userid): |
|
211 | def destroy_user(self, userid): | |
212 | UserModel().delete(userid) |
|
212 | UserModel().delete(userid) | |
213 | Session().commit() |
|
213 | Session().commit() | |
214 |
|
214 | |||
215 | def create_user_group(self, name, **kwargs): |
|
215 | def create_user_group(self, name, **kwargs): | |
216 | if 'skip_if_exists' in kwargs: |
|
216 | if 'skip_if_exists' in kwargs: | |
217 | del kwargs['skip_if_exists'] |
|
217 | del kwargs['skip_if_exists'] | |
218 | gr = UserGroup.get_by_group_name(group_name=name) |
|
218 | gr = UserGroup.get_by_group_name(group_name=name) | |
219 | if gr: |
|
219 | if gr: | |
220 | return gr |
|
220 | return gr | |
221 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
221 | form_data = self._get_user_group_create_params(name, **kwargs) | |
222 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
222 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
223 | user_group = UserGroupModel().create( |
|
223 | user_group = UserGroupModel().create( | |
224 | name=form_data['users_group_name'], |
|
224 | name=form_data['users_group_name'], | |
225 | description=form_data['user_group_description'], |
|
225 | description=form_data['user_group_description'], | |
226 | owner=owner, active=form_data['users_group_active'], |
|
226 | owner=owner, active=form_data['users_group_active'], | |
227 | group_data=form_data['user_group_data']) |
|
227 | group_data=form_data['user_group_data']) | |
228 | Session().commit() |
|
228 | Session().commit() | |
229 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
229 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) | |
230 | return user_group |
|
230 | return user_group | |
231 |
|
231 | |||
232 | def destroy_user_group(self, usergroupid): |
|
232 | def destroy_user_group(self, usergroupid): | |
233 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
233 | UserGroupModel().delete(user_group=usergroupid, force=True) | |
234 | Session().commit() |
|
234 | Session().commit() | |
235 |
|
235 | |||
236 | def create_gist(self, **kwargs): |
|
236 | def create_gist(self, **kwargs): | |
237 | form_data = { |
|
237 | form_data = { | |
238 | 'description': u'new-gist', |
|
238 | 'description': u'new-gist', | |
239 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
239 | 'owner': TEST_USER_ADMIN_LOGIN, | |
240 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
240 | 'gist_type': GistModel.cls.GIST_PUBLIC, | |
241 | 'lifetime': -1, |
|
241 | 'lifetime': -1, | |
242 | 'gist_mapping': {'filename1.txt':{'content':'hello world'},} |
|
242 | 'gist_mapping': {'filename1.txt':{'content':'hello world'},} | |
243 | } |
|
243 | } | |
244 | form_data.update(kwargs) |
|
244 | form_data.update(kwargs) | |
245 | gist = GistModel().create( |
|
245 | gist = GistModel().create( | |
246 | description=form_data['description'],owner=form_data['owner'], |
|
246 | description=form_data['description'],owner=form_data['owner'], | |
247 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
247 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], | |
248 | lifetime=form_data['lifetime'] |
|
248 | lifetime=form_data['lifetime'] | |
249 | ) |
|
249 | ) | |
250 | Session().commit() |
|
250 | Session().commit() | |
251 |
|
251 | |||
252 | return gist |
|
252 | return gist | |
253 |
|
253 | |||
254 | def destroy_gists(self, gistid=None): |
|
254 | def destroy_gists(self, gistid=None): | |
255 | for g in GistModel.cls.get_all(): |
|
255 | for g in GistModel.cls.get_all(): | |
256 | if gistid: |
|
256 | if gistid: | |
257 | if gistid == g.gist_access_id: |
|
257 | if gistid == g.gist_access_id: | |
258 | GistModel().delete(g) |
|
258 | GistModel().delete(g) | |
259 | else: |
|
259 | else: | |
260 | GistModel().delete(g) |
|
260 | GistModel().delete(g) | |
261 | Session().commit() |
|
261 | Session().commit() | |
262 |
|
262 | |||
263 | def load_resource(self, resource_name, strip=True): |
|
263 | def load_resource(self, resource_name, strip=True): | |
264 | with open(os.path.join(FIXTURES, resource_name), 'rb') as f: |
|
264 | with open(os.path.join(FIXTURES, resource_name), 'rb') as f: | |
265 | source = f.read() |
|
265 | source = f.read() | |
266 | if strip: |
|
266 | if strip: | |
267 | source = source.strip() |
|
267 | source = source.strip() | |
268 |
|
268 | |||
269 | return source |
|
269 | return source | |
270 |
|
270 | |||
271 | def commit_change(self, repo, filename, content, message, vcs_type, parent=None, newfile=False): |
|
271 | def commit_change(self, repo, filename, content, message, vcs_type, parent=None, newfile=False): | |
272 | repo = Repository.get_by_repo_name(repo) |
|
272 | repo = Repository.get_by_repo_name(repo) | |
273 | _cs = parent |
|
273 | _cs = parent | |
274 | if not parent: |
|
274 | if not parent: | |
275 | _cs = EmptyChangeset(alias=vcs_type) |
|
275 | _cs = EmptyChangeset(alias=vcs_type) | |
276 |
|
276 | |||
277 | if newfile: |
|
277 | if newfile: | |
278 | nodes = { |
|
278 | nodes = { | |
279 | filename: { |
|
279 | filename: { | |
280 | 'content': content |
|
280 | 'content': content | |
281 | } |
|
281 | } | |
282 | } |
|
282 | } | |
283 | cs = ScmModel().create_nodes( |
|
283 | cs = ScmModel().create_nodes( | |
284 | user=TEST_USER_ADMIN_LOGIN, repo=repo, |
|
284 | user=TEST_USER_ADMIN_LOGIN, repo=repo, | |
285 | message=message, |
|
285 | message=message, | |
286 | nodes=nodes, |
|
286 | nodes=nodes, | |
287 | parent_cs=_cs, |
|
287 | parent_cs=_cs, | |
288 | author=TEST_USER_ADMIN_LOGIN, |
|
288 | author=TEST_USER_ADMIN_LOGIN, | |
289 | ) |
|
289 | ) | |
290 | else: |
|
290 | else: | |
291 | cs = ScmModel().commit_change( |
|
291 | cs = ScmModel().commit_change( | |
292 | repo=repo.scm_instance, repo_name=repo.repo_name, |
|
292 | repo=repo.scm_instance, repo_name=repo.repo_name, | |
293 | cs=parent, user=TEST_USER_ADMIN_LOGIN, |
|
293 | cs=parent, user=TEST_USER_ADMIN_LOGIN, | |
294 | author=TEST_USER_ADMIN_LOGIN, |
|
294 | author=TEST_USER_ADMIN_LOGIN, | |
295 | message=message, |
|
295 | message=message, | |
296 | content=content, |
|
296 | content=content, | |
297 | f_path=filename |
|
297 | f_path=filename | |
298 | ) |
|
298 | ) | |
299 | return cs |
|
299 | return cs |
@@ -1,149 +1,149 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import csv |
|
2 | import csv | |
3 | import datetime |
|
3 | import datetime | |
4 | from kallithea.tests import * |
|
4 | from kallithea.tests import * | |
5 | from kallithea.model.db import UserLog |
|
5 | from kallithea.model.db import UserLog | |
6 | from kallithea.model.meta import Session |
|
6 | from kallithea.model.meta import Session | |
7 | from kallithea.lib.utils2 import safe_unicode |
|
7 | from kallithea.lib.utils2 import safe_unicode | |
|
8 | from os.path import dirname | |||
8 |
|
9 | |||
9 | dn = os.path.dirname |
|
10 | FIXTURES = os.path.join(dirname(dirname(os.path.abspath(__file__))), 'fixtures') | |
10 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'fixtures') |
|
|||
11 |
|
11 | |||
12 |
|
12 | |||
13 | class TestAdminController(TestController): |
|
13 | class TestAdminController(TestController): | |
14 |
|
14 | |||
15 | @classmethod |
|
15 | @classmethod | |
16 | def setup_class(cls): |
|
16 | def setup_class(cls): | |
17 | UserLog.query().delete() |
|
17 | UserLog.query().delete() | |
18 | Session().commit() |
|
18 | Session().commit() | |
19 |
|
19 | |||
20 | def strptime(val): |
|
20 | def strptime(val): | |
21 | fmt = '%Y-%m-%d %H:%M:%S' |
|
21 | fmt = '%Y-%m-%d %H:%M:%S' | |
22 | if '.' not in val: |
|
22 | if '.' not in val: | |
23 | return datetime.datetime.strptime(val, fmt) |
|
23 | return datetime.datetime.strptime(val, fmt) | |
24 |
|
24 | |||
25 | nofrag, frag = val.split(".") |
|
25 | nofrag, frag = val.split(".") | |
26 | date = datetime.datetime.strptime(nofrag, fmt) |
|
26 | date = datetime.datetime.strptime(nofrag, fmt) | |
27 |
|
27 | |||
28 | frag = frag[:6] # truncate to microseconds |
|
28 | frag = frag[:6] # truncate to microseconds | |
29 | frag += (6 - len(frag)) * '0' # add 0s |
|
29 | frag += (6 - len(frag)) * '0' # add 0s | |
30 | return date.replace(microsecond=int(frag)) |
|
30 | return date.replace(microsecond=int(frag)) | |
31 |
|
31 | |||
32 | with open(os.path.join(FIXTURES, 'journal_dump.csv')) as f: |
|
32 | with open(os.path.join(FIXTURES, 'journal_dump.csv')) as f: | |
33 | for row in csv.DictReader(f): |
|
33 | for row in csv.DictReader(f): | |
34 | ul = UserLog() |
|
34 | ul = UserLog() | |
35 | for k, v in row.iteritems(): |
|
35 | for k, v in row.iteritems(): | |
36 | v = safe_unicode(v) |
|
36 | v = safe_unicode(v) | |
37 | if k == 'action_date': |
|
37 | if k == 'action_date': | |
38 | v = strptime(v) |
|
38 | v = strptime(v) | |
39 | if k in ['user_id', 'repository_id']: |
|
39 | if k in ['user_id', 'repository_id']: | |
40 | # nullable due to FK problems |
|
40 | # nullable due to FK problems | |
41 | v = None |
|
41 | v = None | |
42 | setattr(ul, k, v) |
|
42 | setattr(ul, k, v) | |
43 | Session().add(ul) |
|
43 | Session().add(ul) | |
44 | Session().commit() |
|
44 | Session().commit() | |
45 |
|
45 | |||
46 | @classmethod |
|
46 | @classmethod | |
47 | def teardown_class(cls): |
|
47 | def teardown_class(cls): | |
48 | UserLog.query().delete() |
|
48 | UserLog.query().delete() | |
49 | Session().commit() |
|
49 | Session().commit() | |
50 |
|
50 | |||
51 | def test_index(self): |
|
51 | def test_index(self): | |
52 | self.log_user() |
|
52 | self.log_user() | |
53 | response = self.app.get(url(controller='admin/admin', action='index')) |
|
53 | response = self.app.get(url(controller='admin/admin', action='index')) | |
54 | response.mustcontain('Admin Journal') |
|
54 | response.mustcontain('Admin Journal') | |
55 |
|
55 | |||
56 | def test_filter_all_entries(self): |
|
56 | def test_filter_all_entries(self): | |
57 | self.log_user() |
|
57 | self.log_user() | |
58 | response = self.app.get(url(controller='admin/admin', action='index',)) |
|
58 | response = self.app.get(url(controller='admin/admin', action='index',)) | |
59 | response.mustcontain('2034 Entries') |
|
59 | response.mustcontain('2034 Entries') | |
60 |
|
60 | |||
61 | def test_filter_journal_filter_exact_match_on_repository(self): |
|
61 | def test_filter_journal_filter_exact_match_on_repository(self): | |
62 | self.log_user() |
|
62 | self.log_user() | |
63 | response = self.app.get(url(controller='admin/admin', action='index', |
|
63 | response = self.app.get(url(controller='admin/admin', action='index', | |
64 | filter='repository:xxx')) |
|
64 | filter='repository:xxx')) | |
65 | response.mustcontain('3 Entries') |
|
65 | response.mustcontain('3 Entries') | |
66 |
|
66 | |||
67 | def test_filter_journal_filter_exact_match_on_repository_CamelCase(self): |
|
67 | def test_filter_journal_filter_exact_match_on_repository_CamelCase(self): | |
68 | self.log_user() |
|
68 | self.log_user() | |
69 | response = self.app.get(url(controller='admin/admin', action='index', |
|
69 | response = self.app.get(url(controller='admin/admin', action='index', | |
70 | filter='repository:XxX')) |
|
70 | filter='repository:XxX')) | |
71 | response.mustcontain('3 Entries') |
|
71 | response.mustcontain('3 Entries') | |
72 |
|
72 | |||
73 | def test_filter_journal_filter_wildcard_on_repository(self): |
|
73 | def test_filter_journal_filter_wildcard_on_repository(self): | |
74 | self.log_user() |
|
74 | self.log_user() | |
75 | response = self.app.get(url(controller='admin/admin', action='index', |
|
75 | response = self.app.get(url(controller='admin/admin', action='index', | |
76 | filter='repository:*test*')) |
|
76 | filter='repository:*test*')) | |
77 | response.mustcontain('862 Entries') |
|
77 | response.mustcontain('862 Entries') | |
78 |
|
78 | |||
79 | def test_filter_journal_filter_prefix_on_repository(self): |
|
79 | def test_filter_journal_filter_prefix_on_repository(self): | |
80 | self.log_user() |
|
80 | self.log_user() | |
81 | response = self.app.get(url(controller='admin/admin', action='index', |
|
81 | response = self.app.get(url(controller='admin/admin', action='index', | |
82 | filter='repository:test*')) |
|
82 | filter='repository:test*')) | |
83 | response.mustcontain('257 Entries') |
|
83 | response.mustcontain('257 Entries') | |
84 |
|
84 | |||
85 | def test_filter_journal_filter_prefix_on_repository_CamelCase(self): |
|
85 | def test_filter_journal_filter_prefix_on_repository_CamelCase(self): | |
86 | self.log_user() |
|
86 | self.log_user() | |
87 | response = self.app.get(url(controller='admin/admin', action='index', |
|
87 | response = self.app.get(url(controller='admin/admin', action='index', | |
88 | filter='repository:Test*')) |
|
88 | filter='repository:Test*')) | |
89 | response.mustcontain('257 Entries') |
|
89 | response.mustcontain('257 Entries') | |
90 |
|
90 | |||
91 | def test_filter_journal_filter_prefix_on_repository_and_user(self): |
|
91 | def test_filter_journal_filter_prefix_on_repository_and_user(self): | |
92 | self.log_user() |
|
92 | self.log_user() | |
93 | response = self.app.get(url(controller='admin/admin', action='index', |
|
93 | response = self.app.get(url(controller='admin/admin', action='index', | |
94 | filter='repository:test* AND username:demo')) |
|
94 | filter='repository:test* AND username:demo')) | |
95 | response.mustcontain('130 Entries') |
|
95 | response.mustcontain('130 Entries') | |
96 |
|
96 | |||
97 | def test_filter_journal_filter_prefix_on_repository_or_other_repo(self): |
|
97 | def test_filter_journal_filter_prefix_on_repository_or_other_repo(self): | |
98 | self.log_user() |
|
98 | self.log_user() | |
99 | response = self.app.get(url(controller='admin/admin', action='index', |
|
99 | response = self.app.get(url(controller='admin/admin', action='index', | |
100 | filter='repository:test* OR repository:xxx')) |
|
100 | filter='repository:test* OR repository:xxx')) | |
101 | response.mustcontain('260 Entries') # 257 + 3 |
|
101 | response.mustcontain('260 Entries') # 257 + 3 | |
102 |
|
102 | |||
103 | def test_filter_journal_filter_exact_match_on_username(self): |
|
103 | def test_filter_journal_filter_exact_match_on_username(self): | |
104 | self.log_user() |
|
104 | self.log_user() | |
105 | response = self.app.get(url(controller='admin/admin', action='index', |
|
105 | response = self.app.get(url(controller='admin/admin', action='index', | |
106 | filter='username:demo')) |
|
106 | filter='username:demo')) | |
107 | response.mustcontain('1087 Entries') |
|
107 | response.mustcontain('1087 Entries') | |
108 |
|
108 | |||
109 | def test_filter_journal_filter_exact_match_on_username_camelCase(self): |
|
109 | def test_filter_journal_filter_exact_match_on_username_camelCase(self): | |
110 | self.log_user() |
|
110 | self.log_user() | |
111 | response = self.app.get(url(controller='admin/admin', action='index', |
|
111 | response = self.app.get(url(controller='admin/admin', action='index', | |
112 | filter='username:DemO')) |
|
112 | filter='username:DemO')) | |
113 | response.mustcontain('1087 Entries') |
|
113 | response.mustcontain('1087 Entries') | |
114 |
|
114 | |||
115 | def test_filter_journal_filter_wildcard_on_username(self): |
|
115 | def test_filter_journal_filter_wildcard_on_username(self): | |
116 | self.log_user() |
|
116 | self.log_user() | |
117 | response = self.app.get(url(controller='admin/admin', action='index', |
|
117 | response = self.app.get(url(controller='admin/admin', action='index', | |
118 | filter='username:*test*')) |
|
118 | filter='username:*test*')) | |
119 | response.mustcontain('100 Entries') |
|
119 | response.mustcontain('100 Entries') | |
120 |
|
120 | |||
121 | def test_filter_journal_filter_prefix_on_username(self): |
|
121 | def test_filter_journal_filter_prefix_on_username(self): | |
122 | self.log_user() |
|
122 | self.log_user() | |
123 | response = self.app.get(url(controller='admin/admin', action='index', |
|
123 | response = self.app.get(url(controller='admin/admin', action='index', | |
124 | filter='username:demo*')) |
|
124 | filter='username:demo*')) | |
125 | response.mustcontain('1101 Entries') |
|
125 | response.mustcontain('1101 Entries') | |
126 |
|
126 | |||
127 | def test_filter_journal_filter_prefix_on_user_or_other_user(self): |
|
127 | def test_filter_journal_filter_prefix_on_user_or_other_user(self): | |
128 | self.log_user() |
|
128 | self.log_user() | |
129 | response = self.app.get(url(controller='admin/admin', action='index', |
|
129 | response = self.app.get(url(controller='admin/admin', action='index', | |
130 | filter='username:demo OR username:volcan')) |
|
130 | filter='username:demo OR username:volcan')) | |
131 | response.mustcontain('1095 Entries') # 1087 + 8 |
|
131 | response.mustcontain('1095 Entries') # 1087 + 8 | |
132 |
|
132 | |||
133 | def test_filter_journal_filter_wildcard_on_action(self): |
|
133 | def test_filter_journal_filter_wildcard_on_action(self): | |
134 | self.log_user() |
|
134 | self.log_user() | |
135 | response = self.app.get(url(controller='admin/admin', action='index', |
|
135 | response = self.app.get(url(controller='admin/admin', action='index', | |
136 | filter='action:*pull_request*')) |
|
136 | filter='action:*pull_request*')) | |
137 | response.mustcontain('187 Entries') |
|
137 | response.mustcontain('187 Entries') | |
138 |
|
138 | |||
139 | def test_filter_journal_filter_on_date(self): |
|
139 | def test_filter_journal_filter_on_date(self): | |
140 | self.log_user() |
|
140 | self.log_user() | |
141 | response = self.app.get(url(controller='admin/admin', action='index', |
|
141 | response = self.app.get(url(controller='admin/admin', action='index', | |
142 | filter='date:20121010')) |
|
142 | filter='date:20121010')) | |
143 | response.mustcontain('47 Entries') |
|
143 | response.mustcontain('47 Entries') | |
144 |
|
144 | |||
145 | def test_filter_journal_filter_on_date_2(self): |
|
145 | def test_filter_journal_filter_on_date_2(self): | |
146 | self.log_user() |
|
146 | self.log_user() | |
147 | response = self.app.get(url(controller='admin/admin', action='index', |
|
147 | response = self.app.get(url(controller='admin/admin', action='index', | |
148 | filter='date:20121020')) |
|
148 | filter='date:20121020')) | |
149 | response.mustcontain('17 Entries') |
|
149 | response.mustcontain('17 Entries') |
@@ -1,221 +1,221 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.tests.scripts.manual_test_concurrency |
|
15 | kallithea.tests.scripts.manual_test_concurrency | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Test suite for making push/pull operations |
|
18 | Test suite for making push/pull operations | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Dec 30, 2010 |
|
22 | :created_on: Dec 30, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 |
|
26 | |||
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import shutil |
|
31 | import shutil | |
32 | import logging |
|
32 | import logging | |
33 |
from os.path import dirname |
|
33 | from os.path import dirname | |
34 |
|
34 | |||
35 | from tempfile import _RandomNameSequence |
|
35 | from tempfile import _RandomNameSequence | |
36 | from subprocess import Popen, PIPE |
|
36 | from subprocess import Popen, PIPE | |
37 |
|
37 | |||
38 | from paste.deploy import appconfig |
|
38 | from paste.deploy import appconfig | |
39 | from sqlalchemy import engine_from_config |
|
39 | from sqlalchemy import engine_from_config | |
40 |
|
40 | |||
41 | from kallithea.lib.utils import add_cache |
|
41 | from kallithea.lib.utils import add_cache | |
42 | from kallithea.model import init_model |
|
42 | from kallithea.model import init_model | |
43 | from kallithea.model import meta |
|
43 | from kallithea.model import meta | |
44 | from kallithea.model.db import User, Repository, Ui |
|
44 | from kallithea.model.db import User, Repository, Ui | |
45 | from kallithea.lib.auth import get_crypt_password |
|
45 | from kallithea.lib.auth import get_crypt_password | |
46 |
|
46 | |||
47 | from kallithea.tests import HG_REPO |
|
47 | from kallithea.tests import HG_REPO | |
48 | from kallithea.config.environment import load_environment |
|
48 | from kallithea.config.environment import load_environment | |
49 |
|
49 | |||
50 | rel_path = dn(dn(dn(dn(os.path.abspath(__file__))))) |
|
50 | rel_path = dirname(dirname(dirname(dirname(os.path.abspath(__file__))))) | |
51 | conf = appconfig('config:development.ini', relative_to=rel_path) |
|
51 | conf = appconfig('config:development.ini', relative_to=rel_path) | |
52 | load_environment(conf.global_conf, conf.local_conf) |
|
52 | load_environment(conf.global_conf, conf.local_conf) | |
53 |
|
53 | |||
54 | add_cache(conf) |
|
54 | add_cache(conf) | |
55 |
|
55 | |||
56 | USER = TEST_USER_ADMIN_LOGIN |
|
56 | USER = TEST_USER_ADMIN_LOGIN | |
57 | PASS = TEST_USER_ADMIN_PASS |
|
57 | PASS = TEST_USER_ADMIN_PASS | |
58 | HOST = 'server.local' |
|
58 | HOST = 'server.local' | |
59 | METHOD = 'pull' |
|
59 | METHOD = 'pull' | |
60 | DEBUG = True |
|
60 | DEBUG = True | |
61 | log = logging.getLogger(__name__) |
|
61 | log = logging.getLogger(__name__) | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | class Command(object): |
|
64 | class Command(object): | |
65 |
|
65 | |||
66 | def __init__(self, cwd): |
|
66 | def __init__(self, cwd): | |
67 | self.cwd = cwd |
|
67 | self.cwd = cwd | |
68 |
|
68 | |||
69 | def execute(self, cmd, *args): |
|
69 | def execute(self, cmd, *args): | |
70 | """Runs command on the system with given ``args``. |
|
70 | """Runs command on the system with given ``args``. | |
71 | """ |
|
71 | """ | |
72 |
|
72 | |||
73 | command = cmd + ' ' + ' '.join(args) |
|
73 | command = cmd + ' ' + ' '.join(args) | |
74 | log.debug('Executing %s', command) |
|
74 | log.debug('Executing %s', command) | |
75 | if DEBUG: |
|
75 | if DEBUG: | |
76 | print command |
|
76 | print command | |
77 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) |
|
77 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) | |
78 | stdout, stderr = p.communicate() |
|
78 | stdout, stderr = p.communicate() | |
79 | if DEBUG: |
|
79 | if DEBUG: | |
80 | print stdout, stderr |
|
80 | print stdout, stderr | |
81 | return stdout, stderr |
|
81 | return stdout, stderr | |
82 |
|
82 | |||
83 |
|
83 | |||
84 | def get_session(): |
|
84 | def get_session(): | |
85 | engine = engine_from_config(conf, 'sqlalchemy.db1.') |
|
85 | engine = engine_from_config(conf, 'sqlalchemy.db1.') | |
86 | init_model(engine) |
|
86 | init_model(engine) | |
87 | sa = meta.Session |
|
87 | sa = meta.Session | |
88 | return sa |
|
88 | return sa | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | def create_test_user(force=True): |
|
91 | def create_test_user(force=True): | |
92 | print 'creating test user' |
|
92 | print 'creating test user' | |
93 | sa = get_session() |
|
93 | sa = get_session() | |
94 |
|
94 | |||
95 | user = sa.query(User).filter(User.username == USER).scalar() |
|
95 | user = sa.query(User).filter(User.username == USER).scalar() | |
96 |
|
96 | |||
97 | if force and user is not None: |
|
97 | if force and user is not None: | |
98 | print 'removing current user' |
|
98 | print 'removing current user' | |
99 | for repo in sa.query(Repository).filter(Repository.user == user).all(): |
|
99 | for repo in sa.query(Repository).filter(Repository.user == user).all(): | |
100 | sa.delete(repo) |
|
100 | sa.delete(repo) | |
101 | sa.delete(user) |
|
101 | sa.delete(user) | |
102 | sa.commit() |
|
102 | sa.commit() | |
103 |
|
103 | |||
104 | if user is None or force: |
|
104 | if user is None or force: | |
105 | print 'creating new one' |
|
105 | print 'creating new one' | |
106 | new_usr = User() |
|
106 | new_usr = User() | |
107 | new_usr.username = USER |
|
107 | new_usr.username = USER | |
108 | new_usr.password = get_crypt_password(PASS) |
|
108 | new_usr.password = get_crypt_password(PASS) | |
109 | new_usr.email = 'mail@example.com' |
|
109 | new_usr.email = 'mail@example.com' | |
110 | new_usr.name = 'test' |
|
110 | new_usr.name = 'test' | |
111 | new_usr.lastname = 'lasttestname' |
|
111 | new_usr.lastname = 'lasttestname' | |
112 | new_usr.active = True |
|
112 | new_usr.active = True | |
113 | new_usr.admin = True |
|
113 | new_usr.admin = True | |
114 | sa.add(new_usr) |
|
114 | sa.add(new_usr) | |
115 | sa.commit() |
|
115 | sa.commit() | |
116 |
|
116 | |||
117 | print 'done' |
|
117 | print 'done' | |
118 |
|
118 | |||
119 |
|
119 | |||
120 | def create_test_repo(force=True): |
|
120 | def create_test_repo(force=True): | |
121 | print 'creating test repo' |
|
121 | print 'creating test repo' | |
122 | from kallithea.model.repo import RepoModel |
|
122 | from kallithea.model.repo import RepoModel | |
123 | sa = get_session() |
|
123 | sa = get_session() | |
124 |
|
124 | |||
125 | user = sa.query(User).filter(User.username == USER).scalar() |
|
125 | user = sa.query(User).filter(User.username == USER).scalar() | |
126 | if user is None: |
|
126 | if user is None: | |
127 | raise Exception('user not found') |
|
127 | raise Exception('user not found') | |
128 |
|
128 | |||
129 | repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar() |
|
129 | repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar() | |
130 |
|
130 | |||
131 | if repo is None: |
|
131 | if repo is None: | |
132 | print 'repo not found creating' |
|
132 | print 'repo not found creating' | |
133 |
|
133 | |||
134 | form_data = {'repo_name': HG_REPO, |
|
134 | form_data = {'repo_name': HG_REPO, | |
135 | 'repo_type': 'hg', |
|
135 | 'repo_type': 'hg', | |
136 | 'private':False, |
|
136 | 'private':False, | |
137 | 'clone_uri': '' } |
|
137 | 'clone_uri': '' } | |
138 | rm = RepoModel(sa) |
|
138 | rm = RepoModel(sa) | |
139 | rm.base_path = '/home/hg' |
|
139 | rm.base_path = '/home/hg' | |
140 | rm.create(form_data, user) |
|
140 | rm.create(form_data, user) | |
141 |
|
141 | |||
142 | print 'done' |
|
142 | print 'done' | |
143 |
|
143 | |||
144 |
|
144 | |||
145 | def set_anonymous_access(enable=True): |
|
145 | def set_anonymous_access(enable=True): | |
146 | sa = get_session() |
|
146 | sa = get_session() | |
147 | user = sa.query(User).filter(User.username == 'default').one() |
|
147 | user = sa.query(User).filter(User.username == 'default').one() | |
148 | user.active = enable |
|
148 | user.active = enable | |
149 | sa.add(user) |
|
149 | sa.add(user) | |
150 | sa.commit() |
|
150 | sa.commit() | |
151 |
|
151 | |||
152 |
|
152 | |||
153 | def get_anonymous_access(): |
|
153 | def get_anonymous_access(): | |
154 | sa = get_session() |
|
154 | sa = get_session() | |
155 | return sa.query(User).filter(User.username == 'default').one().active |
|
155 | return sa.query(User).filter(User.username == 'default').one().active | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | #============================================================================== |
|
158 | #============================================================================== | |
159 | # TESTS |
|
159 | # TESTS | |
160 | #============================================================================== |
|
160 | #============================================================================== | |
161 | def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, |
|
161 | def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, | |
162 | seq=None, backend='hg'): |
|
162 | seq=None, backend='hg'): | |
163 | cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo) |
|
163 | cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo) | |
164 |
|
164 | |||
165 | if seq is None: |
|
165 | if seq is None: | |
166 | seq = _RandomNameSequence().next() |
|
166 | seq = _RandomNameSequence().next() | |
167 |
|
167 | |||
168 | try: |
|
168 | try: | |
169 | shutil.rmtree(path, ignore_errors=True) |
|
169 | shutil.rmtree(path, ignore_errors=True) | |
170 | os.makedirs(path) |
|
170 | os.makedirs(path) | |
171 | #print 'made dirs %s' % os.path.join(path) |
|
171 | #print 'made dirs %s' % os.path.join(path) | |
172 | except OSError: |
|
172 | except OSError: | |
173 | raise |
|
173 | raise | |
174 |
|
174 | |||
175 | clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ |
|
175 | clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ | |
176 | {'user': USER, |
|
176 | {'user': USER, | |
177 | 'pass': PASS, |
|
177 | 'pass': PASS, | |
178 | 'host': HOST, |
|
178 | 'host': HOST, | |
179 | 'cloned_repo': repo, } |
|
179 | 'cloned_repo': repo, } | |
180 |
|
180 | |||
181 | dest = path + seq |
|
181 | dest = path + seq | |
182 | if method == 'pull': |
|
182 | if method == 'pull': | |
183 | stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) |
|
183 | stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) | |
184 | else: |
|
184 | else: | |
185 | stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) |
|
185 | stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) | |
186 | print stdout,'sdasdsadsa' |
|
186 | print stdout,'sdasdsadsa' | |
187 | if not no_errors: |
|
187 | if not no_errors: | |
188 | if backend == 'hg': |
|
188 | if backend == 'hg': | |
189 | assert """adding file changes""" in stdout, 'no messages about cloning' |
|
189 | assert """adding file changes""" in stdout, 'no messages about cloning' | |
190 | assert """abort""" not in stderr , 'got error from clone' |
|
190 | assert """abort""" not in stderr , 'got error from clone' | |
191 | elif backend == 'git': |
|
191 | elif backend == 'git': | |
192 | assert """Cloning into""" in stdout, 'no messages about cloning' |
|
192 | assert """Cloning into""" in stdout, 'no messages about cloning' | |
193 |
|
193 | |||
194 | if __name__ == '__main__': |
|
194 | if __name__ == '__main__': | |
195 | try: |
|
195 | try: | |
196 | create_test_user(force=False) |
|
196 | create_test_user(force=False) | |
197 | seq = None |
|
197 | seq = None | |
198 | import time |
|
198 | import time | |
199 |
|
199 | |||
200 | try: |
|
200 | try: | |
201 | METHOD = sys.argv[3] |
|
201 | METHOD = sys.argv[3] | |
202 | except IndexError: |
|
202 | except IndexError: | |
203 | pass |
|
203 | pass | |
204 |
|
204 | |||
205 | try: |
|
205 | try: | |
206 | backend = sys.argv[4] |
|
206 | backend = sys.argv[4] | |
207 | except IndexError: |
|
207 | except IndexError: | |
208 | backend = 'hg' |
|
208 | backend = 'hg' | |
209 |
|
209 | |||
210 | if METHOD == 'pull': |
|
210 | if METHOD == 'pull': | |
211 | seq = _RandomNameSequence().next() |
|
211 | seq = _RandomNameSequence().next() | |
212 | test_clone_with_credentials(repo=sys.argv[1], method='clone', |
|
212 | test_clone_with_credentials(repo=sys.argv[1], method='clone', | |
213 | seq=seq, backend=backend) |
|
213 | seq=seq, backend=backend) | |
214 | s = time.time() |
|
214 | s = time.time() | |
215 | for i in range(1, int(sys.argv[2]) + 1): |
|
215 | for i in range(1, int(sys.argv[2]) + 1): | |
216 | print 'take', i |
|
216 | print 'take', i | |
217 | test_clone_with_credentials(repo=sys.argv[1], method=METHOD, |
|
217 | test_clone_with_credentials(repo=sys.argv[1], method=METHOD, | |
218 | seq=seq, backend=backend) |
|
218 | seq=seq, backend=backend) | |
219 | print 'time taken %.3f' % (time.time() - s) |
|
219 | print 'time taken %.3f' % (time.time() - s) | |
220 | except Exception as e: |
|
220 | except Exception as e: | |
221 | sys.exit('stop on %s' % e) |
|
221 | sys.exit('stop on %s' % e) |
@@ -1,190 +1,190 b'' | |||||
1 | #!/usr/bin/env python2 |
|
1 | #!/usr/bin/env python2 | |
2 | # -*- coding: utf-8 -*- |
|
2 | # -*- coding: utf-8 -*- | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU General Public License as published by |
|
4 | # it under the terms of the GNU General Public License as published by | |
5 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | # the Free Software Foundation, either version 3 of the License, or | |
6 | # (at your option) any later version. |
|
6 | # (at your option) any later version. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU General Public License |
|
13 | # You should have received a copy of the GNU General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | """ |
|
15 | """ | |
16 | kallithea.tests.scripts.manual_test_crawler |
|
16 | kallithea.tests.scripts.manual_test_crawler | |
17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
18 |
|
18 | |||
19 | Test for crawling a project for memory usage |
|
19 | Test for crawling a project for memory usage | |
20 | This should be runned just as regular script together |
|
20 | This should be runned just as regular script together | |
21 | with a watch script that will show memory usage. |
|
21 | with a watch script that will show memory usage. | |
22 |
|
22 | |||
23 | watch -n1 ./kallithea/tests/mem_watch |
|
23 | watch -n1 ./kallithea/tests/mem_watch | |
24 |
|
24 | |||
25 | This file was forked by the Kallithea project in July 2014. |
|
25 | This file was forked by the Kallithea project in July 2014. | |
26 | Original author and date, and relevant copyright and licensing information is below: |
|
26 | Original author and date, and relevant copyright and licensing information is below: | |
27 | :created_on: Apr 21, 2010 |
|
27 | :created_on: Apr 21, 2010 | |
28 | :author: marcink |
|
28 | :author: marcink | |
29 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
29 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
30 | :license: GPLv3, see LICENSE.md for more details. |
|
30 | :license: GPLv3, see LICENSE.md for more details. | |
31 | """ |
|
31 | """ | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | import cookielib |
|
34 | import cookielib | |
35 | import urllib |
|
35 | import urllib | |
36 | import urllib2 |
|
36 | import urllib2 | |
37 | import time |
|
37 | import time | |
38 | import os |
|
38 | import os | |
39 | import sys |
|
39 | import sys | |
40 | import tempfile |
|
40 | import tempfile | |
41 |
from os.path import dirname |
|
41 | from os.path import dirname | |
42 |
|
42 | |||
43 | __here__ = os.path.abspath(__file__) |
|
43 | __here__ = os.path.abspath(__file__) | |
44 | __root__ = dn(dn(dn(__here__))) |
|
44 | __root__ = dirname(dirname(dirname(__here__))) | |
45 | sys.path.append(__root__) |
|
45 | sys.path.append(__root__) | |
46 |
|
46 | |||
47 | from kallithea.lib import vcs |
|
47 | from kallithea.lib import vcs | |
48 | from kallithea.lib.compat import OrderedSet |
|
48 | from kallithea.lib.compat import OrderedSet | |
49 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
49 | from kallithea.lib.vcs.exceptions import RepositoryError | |
50 |
|
50 | |||
51 | PASES = 3 |
|
51 | PASES = 3 | |
52 | HOST = 'http://127.0.0.1' |
|
52 | HOST = 'http://127.0.0.1' | |
53 | PORT = 5000 |
|
53 | PORT = 5000 | |
54 | BASE_URI = '%s:%s/' % (HOST, PORT) |
|
54 | BASE_URI = '%s:%s/' % (HOST, PORT) | |
55 |
|
55 | |||
56 | if len(sys.argv) == 2: |
|
56 | if len(sys.argv) == 2: | |
57 | BASE_URI = sys.argv[1] |
|
57 | BASE_URI = sys.argv[1] | |
58 |
|
58 | |||
59 | if not BASE_URI.endswith('/'): |
|
59 | if not BASE_URI.endswith('/'): | |
60 | BASE_URI += '/' |
|
60 | BASE_URI += '/' | |
61 |
|
61 | |||
62 | print 'Crawling @ %s' % BASE_URI |
|
62 | print 'Crawling @ %s' % BASE_URI | |
63 | BASE_URI += '%s' |
|
63 | BASE_URI += '%s' | |
64 | PROJECT_PATH = os.path.join('/', 'home', 'username', 'repos') |
|
64 | PROJECT_PATH = os.path.join('/', 'home', 'username', 'repos') | |
65 | PROJECTS = [ |
|
65 | PROJECTS = [ | |
66 | #'linux-magx-pbranch', |
|
66 | #'linux-magx-pbranch', | |
67 | 'CPython', |
|
67 | 'CPython', | |
68 | 'kallithea', |
|
68 | 'kallithea', | |
69 | ] |
|
69 | ] | |
70 |
|
70 | |||
71 |
|
71 | |||
72 | cj = cookielib.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) |
|
72 | cj = cookielib.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) | |
73 | o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) |
|
73 | o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) | |
74 | o.addheaders = [ |
|
74 | o.addheaders = [ | |
75 | ('User-agent', 'kallithea-crawler'), |
|
75 | ('User-agent', 'kallithea-crawler'), | |
76 | ('Accept-Language', 'en - us, en;q = 0.5') |
|
76 | ('Accept-Language', 'en - us, en;q = 0.5') | |
77 | ] |
|
77 | ] | |
78 |
|
78 | |||
79 | urllib2.install_opener(o) |
|
79 | urllib2.install_opener(o) | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | def _get_repo(proj): |
|
82 | def _get_repo(proj): | |
83 | if isinstance(proj, basestring): |
|
83 | if isinstance(proj, basestring): | |
84 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj)) |
|
84 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj)) | |
85 | proj = proj |
|
85 | proj = proj | |
86 | else: |
|
86 | else: | |
87 | repo = proj |
|
87 | repo = proj | |
88 | proj = repo.name |
|
88 | proj = repo.name | |
89 |
|
89 | |||
90 | return repo, proj |
|
90 | return repo, proj | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | def test_changelog_walk(proj, pages=100): |
|
93 | def test_changelog_walk(proj, pages=100): | |
94 | repo, proj = _get_repo(proj) |
|
94 | repo, proj = _get_repo(proj) | |
95 |
|
95 | |||
96 | total_time = 0 |
|
96 | total_time = 0 | |
97 | for i in range(1, pages): |
|
97 | for i in range(1, pages): | |
98 |
|
98 | |||
99 | page = '/'.join((proj, 'changelog',)) |
|
99 | page = '/'.join((proj, 'changelog',)) | |
100 |
|
100 | |||
101 | full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page': i}) |
|
101 | full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page': i}) | |
102 | s = time.time() |
|
102 | s = time.time() | |
103 | f = o.open(full_uri) |
|
103 | f = o.open(full_uri) | |
104 |
|
104 | |||
105 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) |
|
105 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) | |
106 |
|
106 | |||
107 | size = len(f.read()) |
|
107 | size = len(f.read()) | |
108 | e = time.time() - s |
|
108 | e = time.time() - s | |
109 | total_time += e |
|
109 | total_time += e | |
110 | print 'visited %s size:%s req:%s ms' % (full_uri, size, e) |
|
110 | print 'visited %s size:%s req:%s ms' % (full_uri, size, e) | |
111 |
|
111 | |||
112 | print 'total_time', total_time |
|
112 | print 'total_time', total_time | |
113 | print 'average on req', total_time / float(pages) |
|
113 | print 'average on req', total_time / float(pages) | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | def test_changeset_walk(proj, limit=None): |
|
116 | def test_changeset_walk(proj, limit=None): | |
117 | repo, proj = _get_repo(proj) |
|
117 | repo, proj = _get_repo(proj) | |
118 |
|
118 | |||
119 | print 'processing', os.path.join(PROJECT_PATH, proj) |
|
119 | print 'processing', os.path.join(PROJECT_PATH, proj) | |
120 | total_time = 0 |
|
120 | total_time = 0 | |
121 |
|
121 | |||
122 | cnt = 0 |
|
122 | cnt = 0 | |
123 | for i in repo: |
|
123 | for i in repo: | |
124 | cnt += 1 |
|
124 | cnt += 1 | |
125 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) |
|
125 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) | |
126 | if limit and limit == cnt: |
|
126 | if limit and limit == cnt: | |
127 | break |
|
127 | break | |
128 |
|
128 | |||
129 | full_uri = (BASE_URI % raw_cs) |
|
129 | full_uri = (BASE_URI % raw_cs) | |
130 | print '%s visiting %s\%s' % (cnt, full_uri, i) |
|
130 | print '%s visiting %s\%s' % (cnt, full_uri, i) | |
131 | s = time.time() |
|
131 | s = time.time() | |
132 | f = o.open(full_uri) |
|
132 | f = o.open(full_uri) | |
133 | size = len(f.read()) |
|
133 | size = len(f.read()) | |
134 | e = time.time() - s |
|
134 | e = time.time() - s | |
135 | total_time += e |
|
135 | total_time += e | |
136 | print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e) |
|
136 | print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e) | |
137 |
|
137 | |||
138 | print 'total_time', total_time |
|
138 | print 'total_time', total_time | |
139 | print 'average on req', total_time / float(cnt) |
|
139 | print 'average on req', total_time / float(cnt) | |
140 |
|
140 | |||
141 |
|
141 | |||
142 | def test_files_walk(proj, limit=100): |
|
142 | def test_files_walk(proj, limit=100): | |
143 | repo, proj = _get_repo(proj) |
|
143 | repo, proj = _get_repo(proj) | |
144 |
|
144 | |||
145 | print 'processing', os.path.join(PROJECT_PATH, proj) |
|
145 | print 'processing', os.path.join(PROJECT_PATH, proj) | |
146 | total_time = 0 |
|
146 | total_time = 0 | |
147 |
|
147 | |||
148 | paths_ = OrderedSet(['']) |
|
148 | paths_ = OrderedSet(['']) | |
149 | try: |
|
149 | try: | |
150 | tip = repo.get_changeset('tip') |
|
150 | tip = repo.get_changeset('tip') | |
151 | for topnode, dirs, files in tip.walk('/'): |
|
151 | for topnode, dirs, files in tip.walk('/'): | |
152 |
|
152 | |||
153 | for dir in dirs: |
|
153 | for dir in dirs: | |
154 | paths_.add(dir.path) |
|
154 | paths_.add(dir.path) | |
155 | for f in dir: |
|
155 | for f in dir: | |
156 | paths_.add(f.path) |
|
156 | paths_.add(f.path) | |
157 |
|
157 | |||
158 | for f in files: |
|
158 | for f in files: | |
159 | paths_.add(f.path) |
|
159 | paths_.add(f.path) | |
160 |
|
160 | |||
161 | except RepositoryError as e: |
|
161 | except RepositoryError as e: | |
162 | pass |
|
162 | pass | |
163 |
|
163 | |||
164 | cnt = 0 |
|
164 | cnt = 0 | |
165 | for f in paths_: |
|
165 | for f in paths_: | |
166 | cnt += 1 |
|
166 | cnt += 1 | |
167 | if limit and limit == cnt: |
|
167 | if limit and limit == cnt: | |
168 | break |
|
168 | break | |
169 |
|
169 | |||
170 | file_path = '/'.join((proj, 'files', 'tip', f)) |
|
170 | file_path = '/'.join((proj, 'files', 'tip', f)) | |
171 | full_uri = (BASE_URI % file_path) |
|
171 | full_uri = (BASE_URI % file_path) | |
172 | print '%s visiting %s' % (cnt, full_uri) |
|
172 | print '%s visiting %s' % (cnt, full_uri) | |
173 | s = time.time() |
|
173 | s = time.time() | |
174 | f = o.open(full_uri) |
|
174 | f = o.open(full_uri) | |
175 | size = len(f.read()) |
|
175 | size = len(f.read()) | |
176 | e = time.time() - s |
|
176 | e = time.time() - s | |
177 | total_time += e |
|
177 | total_time += e | |
178 | print '%s visited OK size:%s req:%s ms' % (cnt, size, e) |
|
178 | print '%s visited OK size:%s req:%s ms' % (cnt, size, e) | |
179 |
|
179 | |||
180 | print 'total_time', total_time |
|
180 | print 'total_time', total_time | |
181 | print 'average on req', total_time / float(cnt) |
|
181 | print 'average on req', total_time / float(cnt) | |
182 |
|
182 | |||
183 | if __name__ == '__main__': |
|
183 | if __name__ == '__main__': | |
184 | for path in PROJECTS: |
|
184 | for path in PROJECTS: | |
185 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, path)) |
|
185 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, path)) | |
186 | for i in range(PASES): |
|
186 | for i in range(PASES): | |
187 | print 'PASS %s/%s' % (i, PASES) |
|
187 | print 'PASS %s/%s' % (i, PASES) | |
188 | test_changelog_walk(repo, pages=80) |
|
188 | test_changelog_walk(repo, pages=80) | |
189 | test_changeset_walk(repo, limit=100) |
|
189 | test_changeset_walk(repo, limit=100) | |
190 | test_files_walk(repo, limit=100) |
|
190 | test_files_walk(repo, limit=100) |
General Comments 0
You need to be logged in to leave comments.
Login now