##// END OF EJS Templates
Fixed methods for checking if path in routes is a repo...
marcink -
r1505:bb6ba744 beta
parent child Browse files
Show More
@@ -1,423 +1,441 b''
1 """
1 """
2 Routes configuration
2 Routes configuration
3
3
4 The more specific and detailed routes should be defined first so they
4 The more specific and detailed routes should be defined first so they
5 may take precedent over the more generic routes. For more information
5 may take precedent over the more generic routes. For more information
6 refer to the routes manual at http://routes.groovie.org/docs/
6 refer to the routes manual at http://routes.groovie.org/docs/
7 """
7 """
8 from __future__ import with_statement
8 from __future__ import with_statement
9 from routes import Mapper
9 from routes import Mapper
10 from rhodecode.lib.utils import check_repo_fast as cr
10
11
11
12 # prefix for non repository related links needs to be prefixed with `/`
12 # prefix for non repository related links needs to be prefixed with `/`
13 ADMIN_PREFIX = '/_admin'
13 ADMIN_PREFIX = '/_admin'
14
14
15
15
16 def make_map(config):
16 def make_map(config):
17 """Create, configure and return the routes Mapper"""
17 """Create, configure and return the routes Mapper"""
18 rmap = Mapper(directory=config['pylons.paths']['controllers'],
18 rmap = Mapper(directory=config['pylons.paths']['controllers'],
19 always_scan=config['debug'])
19 always_scan=config['debug'])
20 rmap.minimization = False
20 rmap.minimization = False
21 rmap.explicit = False
21 rmap.explicit = False
22
22
23 from rhodecode.lib.utils import check_repo_fast
24 from rhodecode.lib.utils import check_repos_group_fast
25
23 def check_repo(environ, match_dict):
26 def check_repo(environ, match_dict):
24 """
27 """
25 check for valid repository for proper 404 handling
28 check for valid repository for proper 404 handling
29
26 :param environ:
30 :param environ:
27 :param match_dict:
31 :param match_dict:
28 """
32 """
33
29 repo_name = match_dict.get('repo_name')
34 repo_name = match_dict.get('repo_name')
30 return not cr(repo_name, config['base_path'])
35 return check_repo_fast(repo_name, config['base_path'])
36
37 def check_group(environ, match_dict):
38 """
39 check for valid repositories group for proper 404 handling
40
41 :param environ:
42 :param match_dict:
43 """
44 repos_group_name = match_dict.get('group_name')
45
46 return check_repos_group_fast(repos_group_name, config['base_path'])
31
47
32
48
33 def check_int(environ, match_dict):
49 def check_int(environ, match_dict):
34 return match_dict.get('id').isdigit()
50 return match_dict.get('id').isdigit()
35
51
36
37
38
39 # The ErrorController route (handles 404/500 error pages); it should
52 # The ErrorController route (handles 404/500 error pages); it should
40 # likely stay at the top, ensuring it can always be resolved
53 # likely stay at the top, ensuring it can always be resolved
41 rmap.connect('/error/{action}', controller='error')
54 rmap.connect('/error/{action}', controller='error')
42 rmap.connect('/error/{action}/{id}', controller='error')
55 rmap.connect('/error/{action}/{id}', controller='error')
43
56
44 #==========================================================================
57 #==========================================================================
45 # CUSTOM ROUTES HERE
58 # CUSTOM ROUTES HERE
46 #==========================================================================
59 #==========================================================================
47
60
48 #MAIN PAGE
61 #MAIN PAGE
49 rmap.connect('home', '/', controller='home', action='index')
62 rmap.connect('home', '/', controller='home', action='index')
50 rmap.connect('repo_switcher', '/repos', controller='home',
63 rmap.connect('repo_switcher', '/repos', controller='home',
51 action='repo_switcher')
64 action='repo_switcher')
52 rmap.connect('bugtracker',
65 rmap.connect('bugtracker',
53 "http://bitbucket.org/marcinkuzminski/rhodecode/issues",
66 "http://bitbucket.org/marcinkuzminski/rhodecode/issues",
54 _static=True)
67 _static=True)
55 rmap.connect('rhodecode_official', "http://rhodecode.org", _static=True)
68 rmap.connect('rhodecode_official', "http://rhodecode.org", _static=True)
56
69
57 #ADMIN REPOSITORY REST ROUTES
70 #ADMIN REPOSITORY REST ROUTES
58 with rmap.submapper(path_prefix=ADMIN_PREFIX,
71 with rmap.submapper(path_prefix=ADMIN_PREFIX,
59 controller='admin/repos') as m:
72 controller='admin/repos') as m:
60 m.connect("repos", "/repos",
73 m.connect("repos", "/repos",
61 action="create", conditions=dict(method=["POST"]))
74 action="create", conditions=dict(method=["POST"]))
62 m.connect("repos", "/repos",
75 m.connect("repos", "/repos",
63 action="index", conditions=dict(method=["GET"]))
76 action="index", conditions=dict(method=["GET"]))
64 m.connect("formatted_repos", "/repos.{format}",
77 m.connect("formatted_repos", "/repos.{format}",
65 action="index",
78 action="index",
66 conditions=dict(method=["GET"]))
79 conditions=dict(method=["GET"]))
67 m.connect("new_repo", "/repos/new",
80 m.connect("new_repo", "/repos/new",
68 action="new", conditions=dict(method=["GET"]))
81 action="new", conditions=dict(method=["GET"]))
69 m.connect("formatted_new_repo", "/repos/new.{format}",
82 m.connect("formatted_new_repo", "/repos/new.{format}",
70 action="new", conditions=dict(method=["GET"]))
83 action="new", conditions=dict(method=["GET"]))
71 m.connect("/repos/{repo_name:.*}",
84 m.connect("/repos/{repo_name:.*}",
72 action="update", conditions=dict(method=["PUT"],
85 action="update", conditions=dict(method=["PUT"],
73 function=check_repo))
86 function=check_repo))
74 m.connect("/repos/{repo_name:.*}",
87 m.connect("/repos/{repo_name:.*}",
75 action="delete", conditions=dict(method=["DELETE"],
88 action="delete", conditions=dict(method=["DELETE"],
76 function=check_repo))
89 function=check_repo))
77 m.connect("edit_repo", "/repos/{repo_name:.*}/edit",
90 m.connect("edit_repo", "/repos/{repo_name:.*}/edit",
78 action="edit", conditions=dict(method=["GET"],
91 action="edit", conditions=dict(method=["GET"],
79 function=check_repo))
92 function=check_repo))
80 m.connect("formatted_edit_repo", "/repos/{repo_name:.*}.{format}/edit",
93 m.connect("formatted_edit_repo", "/repos/{repo_name:.*}.{format}/edit",
81 action="edit", conditions=dict(method=["GET"],
94 action="edit", conditions=dict(method=["GET"],
82 function=check_repo))
95 function=check_repo))
83 m.connect("repo", "/repos/{repo_name:.*}",
96 m.connect("repo", "/repos/{repo_name:.*}",
84 action="show", conditions=dict(method=["GET"],
97 action="show", conditions=dict(method=["GET"],
85 function=check_repo))
98 function=check_repo))
86 m.connect("formatted_repo", "/repos/{repo_name:.*}.{format}",
99 m.connect("formatted_repo", "/repos/{repo_name:.*}.{format}",
87 action="show", conditions=dict(method=["GET"],
100 action="show", conditions=dict(method=["GET"],
88 function=check_repo))
101 function=check_repo))
89 #ajax delete repo perm user
102 #ajax delete repo perm user
90 m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*}",
103 m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*}",
91 action="delete_perm_user", conditions=dict(method=["DELETE"],
104 action="delete_perm_user", conditions=dict(method=["DELETE"],
92 function=check_repo))
105 function=check_repo))
93 #ajax delete repo perm users_group
106 #ajax delete repo perm users_group
94 m.connect('delete_repo_users_group',
107 m.connect('delete_repo_users_group',
95 "/repos_delete_users_group/{repo_name:.*}",
108 "/repos_delete_users_group/{repo_name:.*}",
96 action="delete_perm_users_group",
109 action="delete_perm_users_group",
97 conditions=dict(method=["DELETE"], function=check_repo))
110 conditions=dict(method=["DELETE"], function=check_repo))
98
111
99 #settings actions
112 #settings actions
100 m.connect('repo_stats', "/repos_stats/{repo_name:.*}",
113 m.connect('repo_stats', "/repos_stats/{repo_name:.*}",
101 action="repo_stats", conditions=dict(method=["DELETE"],
114 action="repo_stats", conditions=dict(method=["DELETE"],
102 function=check_repo))
115 function=check_repo))
103 m.connect('repo_cache', "/repos_cache/{repo_name:.*}",
116 m.connect('repo_cache', "/repos_cache/{repo_name:.*}",
104 action="repo_cache", conditions=dict(method=["DELETE"],
117 action="repo_cache", conditions=dict(method=["DELETE"],
105 function=check_repo))
118 function=check_repo))
106 m.connect('repo_public_journal',
119 m.connect('repo_public_journal',
107 "/repos_public_journal/{repo_name:.*}",
120 "/repos_public_journal/{repo_name:.*}",
108 action="repo_public_journal", conditions=dict(method=["PUT"],
121 action="repo_public_journal", conditions=dict(method=["PUT"],
109 function=check_repo))
122 function=check_repo))
110 m.connect('repo_pull', "/repo_pull/{repo_name:.*}",
123 m.connect('repo_pull', "/repo_pull/{repo_name:.*}",
111 action="repo_pull", conditions=dict(method=["PUT"],
124 action="repo_pull", conditions=dict(method=["PUT"],
112 function=check_repo))
125 function=check_repo))
113
126
114 with rmap.submapper(path_prefix=ADMIN_PREFIX,
127 with rmap.submapper(path_prefix=ADMIN_PREFIX,
115 controller='admin/repos_groups') as m:
128 controller='admin/repos_groups') as m:
116 m.connect("repos_groups", "/repos_groups",
129 m.connect("repos_groups", "/repos_groups",
117 action="create", conditions=dict(method=["POST"]))
130 action="create", conditions=dict(method=["POST"]))
118 m.connect("repos_groups", "/repos_groups",
131 m.connect("repos_groups", "/repos_groups",
119 action="index", conditions=dict(method=["GET"]))
132 action="index", conditions=dict(method=["GET"]))
120 m.connect("formatted_repos_groups", "/repos_groups.{format}",
133 m.connect("formatted_repos_groups", "/repos_groups.{format}",
121 action="index", conditions=dict(method=["GET"]))
134 action="index", conditions=dict(method=["GET"]))
122 m.connect("new_repos_group", "/repos_groups/new",
135 m.connect("new_repos_group", "/repos_groups/new",
123 action="new", conditions=dict(method=["GET"]))
136 action="new", conditions=dict(method=["GET"]))
124 m.connect("formatted_new_repos_group", "/repos_groups/new.{format}",
137 m.connect("formatted_new_repos_group", "/repos_groups/new.{format}",
125 action="new", conditions=dict(method=["GET"]))
138 action="new", conditions=dict(method=["GET"]))
126 m.connect("update_repos_group", "/repos_groups/{id}",
139 m.connect("update_repos_group", "/repos_groups/{id}",
127 action="update", conditions=dict(method=["PUT"],
140 action="update", conditions=dict(method=["PUT"],
128 function=check_int))
141 function=check_int))
129 m.connect("delete_repos_group", "/repos_groups/{id}",
142 m.connect("delete_repos_group", "/repos_groups/{id}",
130 action="delete", conditions=dict(method=["DELETE"],
143 action="delete", conditions=dict(method=["DELETE"],
131 function=check_int))
144 function=check_int))
132 m.connect("edit_repos_group", "/repos_groups/{id}/edit",
145 m.connect("edit_repos_group", "/repos_groups/{id}/edit",
133 action="edit", conditions=dict(method=["GET"],
146 action="edit", conditions=dict(method=["GET"],
134 function=check_int))
147 function=check_int))
135 m.connect("formatted_edit_repos_group",
148 m.connect("formatted_edit_repos_group",
136 "/repos_groups/{id}.{format}/edit",
149 "/repos_groups/{id}.{format}/edit",
137 action="edit", conditions=dict(method=["GET"],
150 action="edit", conditions=dict(method=["GET"],
138 function=check_int))
151 function=check_int))
139 m.connect("repos_group", "/repos_groups/{id}",
152 m.connect("repos_group", "/repos_groups/{id}",
140 action="show", conditions=dict(method=["GET"],
153 action="show", conditions=dict(method=["GET"],
141 function=check_int))
154 function=check_int))
142 m.connect("formatted_repos_group", "/repos_groups/{id}.{format}",
155 m.connect("formatted_repos_group", "/repos_groups/{id}.{format}",
143 action="show", conditions=dict(method=["GET"],
156 action="show", conditions=dict(method=["GET"],
144 function=check_int))
157 function=check_int))
145
158
146 #ADMIN USER REST ROUTES
159 #ADMIN USER REST ROUTES
147 with rmap.submapper(path_prefix=ADMIN_PREFIX,
160 with rmap.submapper(path_prefix=ADMIN_PREFIX,
148 controller='admin/users') as m:
161 controller='admin/users') as m:
149 m.connect("users", "/users",
162 m.connect("users", "/users",
150 action="create", conditions=dict(method=["POST"]))
163 action="create", conditions=dict(method=["POST"]))
151 m.connect("users", "/users",
164 m.connect("users", "/users",
152 action="index", conditions=dict(method=["GET"]))
165 action="index", conditions=dict(method=["GET"]))
153 m.connect("formatted_users", "/users.{format}",
166 m.connect("formatted_users", "/users.{format}",
154 action="index", conditions=dict(method=["GET"]))
167 action="index", conditions=dict(method=["GET"]))
155 m.connect("new_user", "/users/new",
168 m.connect("new_user", "/users/new",
156 action="new", conditions=dict(method=["GET"]))
169 action="new", conditions=dict(method=["GET"]))
157 m.connect("formatted_new_user", "/users/new.{format}",
170 m.connect("formatted_new_user", "/users/new.{format}",
158 action="new", conditions=dict(method=["GET"]))
171 action="new", conditions=dict(method=["GET"]))
159 m.connect("update_user", "/users/{id}",
172 m.connect("update_user", "/users/{id}",
160 action="update", conditions=dict(method=["PUT"]))
173 action="update", conditions=dict(method=["PUT"]))
161 m.connect("delete_user", "/users/{id}",
174 m.connect("delete_user", "/users/{id}",
162 action="delete", conditions=dict(method=["DELETE"]))
175 action="delete", conditions=dict(method=["DELETE"]))
163 m.connect("edit_user", "/users/{id}/edit",
176 m.connect("edit_user", "/users/{id}/edit",
164 action="edit", conditions=dict(method=["GET"]))
177 action="edit", conditions=dict(method=["GET"]))
165 m.connect("formatted_edit_user",
178 m.connect("formatted_edit_user",
166 "/users/{id}.{format}/edit",
179 "/users/{id}.{format}/edit",
167 action="edit", conditions=dict(method=["GET"]))
180 action="edit", conditions=dict(method=["GET"]))
168 m.connect("user", "/users/{id}",
181 m.connect("user", "/users/{id}",
169 action="show", conditions=dict(method=["GET"]))
182 action="show", conditions=dict(method=["GET"]))
170 m.connect("formatted_user", "/users/{id}.{format}",
183 m.connect("formatted_user", "/users/{id}.{format}",
171 action="show", conditions=dict(method=["GET"]))
184 action="show", conditions=dict(method=["GET"]))
172
185
173 #EXTRAS USER ROUTES
186 #EXTRAS USER ROUTES
174 m.connect("user_perm", "/users_perm/{id}",
187 m.connect("user_perm", "/users_perm/{id}",
175 action="update_perm", conditions=dict(method=["PUT"]))
188 action="update_perm", conditions=dict(method=["PUT"]))
176
189
177 #ADMIN USERS REST ROUTES
190 #ADMIN USERS REST ROUTES
178 with rmap.submapper(path_prefix=ADMIN_PREFIX,
191 with rmap.submapper(path_prefix=ADMIN_PREFIX,
179 controller='admin/users_groups') as m:
192 controller='admin/users_groups') as m:
180 m.connect("users_groups", "/users_groups",
193 m.connect("users_groups", "/users_groups",
181 action="create", conditions=dict(method=["POST"]))
194 action="create", conditions=dict(method=["POST"]))
182 m.connect("users_groups", "/users_groups",
195 m.connect("users_groups", "/users_groups",
183 action="index", conditions=dict(method=["GET"]))
196 action="index", conditions=dict(method=["GET"]))
184 m.connect("formatted_users_groups", "/users_groups.{format}",
197 m.connect("formatted_users_groups", "/users_groups.{format}",
185 action="index", conditions=dict(method=["GET"]))
198 action="index", conditions=dict(method=["GET"]))
186 m.connect("new_users_group", "/users_groups/new",
199 m.connect("new_users_group", "/users_groups/new",
187 action="new", conditions=dict(method=["GET"]))
200 action="new", conditions=dict(method=["GET"]))
188 m.connect("formatted_new_users_group", "/users_groups/new.{format}",
201 m.connect("formatted_new_users_group", "/users_groups/new.{format}",
189 action="new", conditions=dict(method=["GET"]))
202 action="new", conditions=dict(method=["GET"]))
190 m.connect("update_users_group", "/users_groups/{id}",
203 m.connect("update_users_group", "/users_groups/{id}",
191 action="update", conditions=dict(method=["PUT"]))
204 action="update", conditions=dict(method=["PUT"]))
192 m.connect("delete_users_group", "/users_groups/{id}",
205 m.connect("delete_users_group", "/users_groups/{id}",
193 action="delete", conditions=dict(method=["DELETE"]))
206 action="delete", conditions=dict(method=["DELETE"]))
194 m.connect("edit_users_group", "/users_groups/{id}/edit",
207 m.connect("edit_users_group", "/users_groups/{id}/edit",
195 action="edit", conditions=dict(method=["GET"]))
208 action="edit", conditions=dict(method=["GET"]))
196 m.connect("formatted_edit_users_group",
209 m.connect("formatted_edit_users_group",
197 "/users_groups/{id}.{format}/edit",
210 "/users_groups/{id}.{format}/edit",
198 action="edit", conditions=dict(method=["GET"]))
211 action="edit", conditions=dict(method=["GET"]))
199 m.connect("users_group", "/users_groups/{id}",
212 m.connect("users_group", "/users_groups/{id}",
200 action="show", conditions=dict(method=["GET"]))
213 action="show", conditions=dict(method=["GET"]))
201 m.connect("formatted_users_group", "/users_groups/{id}.{format}",
214 m.connect("formatted_users_group", "/users_groups/{id}.{format}",
202 action="show", conditions=dict(method=["GET"]))
215 action="show", conditions=dict(method=["GET"]))
203
216
204 #EXTRAS USER ROUTES
217 #EXTRAS USER ROUTES
205 m.connect("users_group_perm", "/users_groups_perm/{id}",
218 m.connect("users_group_perm", "/users_groups_perm/{id}",
206 action="update_perm", conditions=dict(method=["PUT"]))
219 action="update_perm", conditions=dict(method=["PUT"]))
207
220
208 #ADMIN GROUP REST ROUTES
221 #ADMIN GROUP REST ROUTES
209 rmap.resource('group', 'groups',
222 rmap.resource('group', 'groups',
210 controller='admin/groups', path_prefix=ADMIN_PREFIX)
223 controller='admin/groups', path_prefix=ADMIN_PREFIX)
211
224
212 #ADMIN PERMISSIONS REST ROUTES
225 #ADMIN PERMISSIONS REST ROUTES
213 rmap.resource('permission', 'permissions',
226 rmap.resource('permission', 'permissions',
214 controller='admin/permissions', path_prefix=ADMIN_PREFIX)
227 controller='admin/permissions', path_prefix=ADMIN_PREFIX)
215
228
216 ##ADMIN LDAP SETTINGS
229 ##ADMIN LDAP SETTINGS
217 rmap.connect('ldap_settings', '%s/ldap' % ADMIN_PREFIX,
230 rmap.connect('ldap_settings', '%s/ldap' % ADMIN_PREFIX,
218 controller='admin/ldap_settings', action='ldap_settings',
231 controller='admin/ldap_settings', action='ldap_settings',
219 conditions=dict(method=["POST"]))
232 conditions=dict(method=["POST"]))
220
233
221 rmap.connect('ldap_home', '%s/ldap' % ADMIN_PREFIX,
234 rmap.connect('ldap_home', '%s/ldap' % ADMIN_PREFIX,
222 controller='admin/ldap_settings')
235 controller='admin/ldap_settings')
223
236
224 #ADMIN SETTINGS REST ROUTES
237 #ADMIN SETTINGS REST ROUTES
225 with rmap.submapper(path_prefix=ADMIN_PREFIX,
238 with rmap.submapper(path_prefix=ADMIN_PREFIX,
226 controller='admin/settings') as m:
239 controller='admin/settings') as m:
227 m.connect("admin_settings", "/settings",
240 m.connect("admin_settings", "/settings",
228 action="create", conditions=dict(method=["POST"]))
241 action="create", conditions=dict(method=["POST"]))
229 m.connect("admin_settings", "/settings",
242 m.connect("admin_settings", "/settings",
230 action="index", conditions=dict(method=["GET"]))
243 action="index", conditions=dict(method=["GET"]))
231 m.connect("formatted_admin_settings", "/settings.{format}",
244 m.connect("formatted_admin_settings", "/settings.{format}",
232 action="index", conditions=dict(method=["GET"]))
245 action="index", conditions=dict(method=["GET"]))
233 m.connect("admin_new_setting", "/settings/new",
246 m.connect("admin_new_setting", "/settings/new",
234 action="new", conditions=dict(method=["GET"]))
247 action="new", conditions=dict(method=["GET"]))
235 m.connect("formatted_admin_new_setting", "/settings/new.{format}",
248 m.connect("formatted_admin_new_setting", "/settings/new.{format}",
236 action="new", conditions=dict(method=["GET"]))
249 action="new", conditions=dict(method=["GET"]))
237 m.connect("/settings/{setting_id}",
250 m.connect("/settings/{setting_id}",
238 action="update", conditions=dict(method=["PUT"]))
251 action="update", conditions=dict(method=["PUT"]))
239 m.connect("/settings/{setting_id}",
252 m.connect("/settings/{setting_id}",
240 action="delete", conditions=dict(method=["DELETE"]))
253 action="delete", conditions=dict(method=["DELETE"]))
241 m.connect("admin_edit_setting", "/settings/{setting_id}/edit",
254 m.connect("admin_edit_setting", "/settings/{setting_id}/edit",
242 action="edit", conditions=dict(method=["GET"]))
255 action="edit", conditions=dict(method=["GET"]))
243 m.connect("formatted_admin_edit_setting",
256 m.connect("formatted_admin_edit_setting",
244 "/settings/{setting_id}.{format}/edit",
257 "/settings/{setting_id}.{format}/edit",
245 action="edit", conditions=dict(method=["GET"]))
258 action="edit", conditions=dict(method=["GET"]))
246 m.connect("admin_setting", "/settings/{setting_id}",
259 m.connect("admin_setting", "/settings/{setting_id}",
247 action="show", conditions=dict(method=["GET"]))
260 action="show", conditions=dict(method=["GET"]))
248 m.connect("formatted_admin_setting", "/settings/{setting_id}.{format}",
261 m.connect("formatted_admin_setting", "/settings/{setting_id}.{format}",
249 action="show", conditions=dict(method=["GET"]))
262 action="show", conditions=dict(method=["GET"]))
250 m.connect("admin_settings_my_account", "/my_account",
263 m.connect("admin_settings_my_account", "/my_account",
251 action="my_account", conditions=dict(method=["GET"]))
264 action="my_account", conditions=dict(method=["GET"]))
252 m.connect("admin_settings_my_account_update", "/my_account_update",
265 m.connect("admin_settings_my_account_update", "/my_account_update",
253 action="my_account_update", conditions=dict(method=["PUT"]))
266 action="my_account_update", conditions=dict(method=["PUT"]))
254 m.connect("admin_settings_create_repository", "/create_repository",
267 m.connect("admin_settings_create_repository", "/create_repository",
255 action="create_repository", conditions=dict(method=["GET"]))
268 action="create_repository", conditions=dict(method=["GET"]))
256
269
257
270
258 #ADMIN MAIN PAGES
271 #ADMIN MAIN PAGES
259 with rmap.submapper(path_prefix=ADMIN_PREFIX,
272 with rmap.submapper(path_prefix=ADMIN_PREFIX,
260 controller='admin/admin') as m:
273 controller='admin/admin') as m:
261 m.connect('admin_home', '', action='index')
274 m.connect('admin_home', '', action='index')
262 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
275 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
263 action='add_repo')
276 action='add_repo')
264
277
265 #==========================================================================
278 #==========================================================================
266 # API V1
279 # API V1
267 #==========================================================================
280 #==========================================================================
268 with rmap.submapper(path_prefix=ADMIN_PREFIX,
281 with rmap.submapper(path_prefix=ADMIN_PREFIX,
269 controller='api/api') as m:
282 controller='api/api') as m:
270 m.connect('api', '/api')
283 m.connect('api', '/api')
271
284
272
285
273 #USER JOURNAL
286 #USER JOURNAL
274 rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, controller='journal')
287 rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, controller='journal')
275
288
276 rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
289 rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
277 controller='journal', action="public_journal")
290 controller='journal', action="public_journal")
278
291
279 rmap.connect('public_journal_rss', '%s/public_journal_rss' % ADMIN_PREFIX,
292 rmap.connect('public_journal_rss', '%s/public_journal_rss' % ADMIN_PREFIX,
280 controller='journal', action="public_journal_rss")
293 controller='journal', action="public_journal_rss")
281
294
282 rmap.connect('public_journal_atom',
295 rmap.connect('public_journal_atom',
283 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
296 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
284 action="public_journal_atom")
297 action="public_journal_atom")
285
298
286 rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
299 rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
287 controller='journal', action='toggle_following',
300 controller='journal', action='toggle_following',
288 conditions=dict(method=["POST"]))
301 conditions=dict(method=["POST"]))
289
302
290 #SEARCH
303 #SEARCH
291 rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
304 rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
292 rmap.connect('search_repo', '%s/search/{search_repo:.*}' % ADMIN_PREFIX,
305 rmap.connect('search_repo', '%s/search/{search_repo:.*}' % ADMIN_PREFIX,
293 controller='search')
306 controller='search')
294
307
295 #LOGIN/LOGOUT/REGISTER/SIGN IN
308 #LOGIN/LOGOUT/REGISTER/SIGN IN
296 rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
309 rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
297 rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
310 rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
298 action='logout')
311 action='logout')
299
312
300 rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
313 rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
301 action='register')
314 action='register')
302
315
303 rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
316 rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
304 controller='login', action='password_reset')
317 controller='login', action='password_reset')
305
318
306 rmap.connect('reset_password_confirmation',
319 rmap.connect('reset_password_confirmation',
307 '%s/password_reset_confirmation' % ADMIN_PREFIX,
320 '%s/password_reset_confirmation' % ADMIN_PREFIX,
308 controller='login', action='password_reset_confirmation')
321 controller='login', action='password_reset_confirmation')
309
322
310 #FEEDS
323 #FEEDS
311 rmap.connect('rss_feed_home', '/{repo_name:.*}/feed/rss',
324 rmap.connect('rss_feed_home', '/{repo_name:.*}/feed/rss',
312 controller='feed', action='rss',
325 controller='feed', action='rss',
313 conditions=dict(function=check_repo))
326 conditions=dict(function=check_repo))
314
327
315 rmap.connect('atom_feed_home', '/{repo_name:.*}/feed/atom',
328 rmap.connect('atom_feed_home', '/{repo_name:.*}/feed/atom',
316 controller='feed', action='atom',
329 controller='feed', action='atom',
317 conditions=dict(function=check_repo))
330 conditions=dict(function=check_repo))
318
331
319 #==========================================================================
332 #==========================================================================
320 # REPOSITORY ROUTES
333 # REPOSITORY ROUTES
321 #==========================================================================
334 #==========================================================================
335 rmap.connect('summary_home', '/{repo_name:.*}',
336 controller='summary',
337 conditions=dict(function=check_repo))
338
339 # rmap.connect('repo_group_home', '/{group_name:.*}',
340 # controller='admin/repos_groups',action="show_by_name",
341 # conditions=dict(function=check_group))
342
322 rmap.connect('changeset_home', '/{repo_name:.*}/changeset/{revision}',
343 rmap.connect('changeset_home', '/{repo_name:.*}/changeset/{revision}',
323 controller='changeset', revision='tip',
344 controller='changeset', revision='tip',
324 conditions=dict(function=check_repo))
345 conditions=dict(function=check_repo))
325
346
326 rmap.connect('raw_changeset_home',
347 rmap.connect('raw_changeset_home',
327 '/{repo_name:.*}/raw-changeset/{revision}',
348 '/{repo_name:.*}/raw-changeset/{revision}',
328 controller='changeset', action='raw_changeset',
349 controller='changeset', action='raw_changeset',
329 revision='tip', conditions=dict(function=check_repo))
350 revision='tip', conditions=dict(function=check_repo))
330
351
331 rmap.connect('summary_home', '/{repo_name:.*}',
332 controller='summary', conditions=dict(function=check_repo))
333
334 rmap.connect('summary_home', '/{repo_name:.*}/summary',
352 rmap.connect('summary_home', '/{repo_name:.*}/summary',
335 controller='summary', conditions=dict(function=check_repo))
353 controller='summary', conditions=dict(function=check_repo))
336
354
337 rmap.connect('shortlog_home', '/{repo_name:.*}/shortlog',
355 rmap.connect('shortlog_home', '/{repo_name:.*}/shortlog',
338 controller='shortlog', conditions=dict(function=check_repo))
356 controller='shortlog', conditions=dict(function=check_repo))
339
357
340 rmap.connect('branches_home', '/{repo_name:.*}/branches',
358 rmap.connect('branches_home', '/{repo_name:.*}/branches',
341 controller='branches', conditions=dict(function=check_repo))
359 controller='branches', conditions=dict(function=check_repo))
342
360
343 rmap.connect('tags_home', '/{repo_name:.*}/tags',
361 rmap.connect('tags_home', '/{repo_name:.*}/tags',
344 controller='tags', conditions=dict(function=check_repo))
362 controller='tags', conditions=dict(function=check_repo))
345
363
346 rmap.connect('changelog_home', '/{repo_name:.*}/changelog',
364 rmap.connect('changelog_home', '/{repo_name:.*}/changelog',
347 controller='changelog', conditions=dict(function=check_repo))
365 controller='changelog', conditions=dict(function=check_repo))
348
366
349 rmap.connect('changelog_details', '/{repo_name:.*}/changelog_details/{cs}',
367 rmap.connect('changelog_details', '/{repo_name:.*}/changelog_details/{cs}',
350 controller='changelog', action='changelog_details',
368 controller='changelog', action='changelog_details',
351 conditions=dict(function=check_repo))
369 conditions=dict(function=check_repo))
352
370
353 rmap.connect('files_home', '/{repo_name:.*}/files/{revision}/{f_path:.*}',
371 rmap.connect('files_home', '/{repo_name:.*}/files/{revision}/{f_path:.*}',
354 controller='files', revision='tip', f_path='',
372 controller='files', revision='tip', f_path='',
355 conditions=dict(function=check_repo))
373 conditions=dict(function=check_repo))
356
374
357 rmap.connect('files_diff_home', '/{repo_name:.*}/diff/{f_path:.*}',
375 rmap.connect('files_diff_home', '/{repo_name:.*}/diff/{f_path:.*}',
358 controller='files', action='diff', revision='tip', f_path='',
376 controller='files', action='diff', revision='tip', f_path='',
359 conditions=dict(function=check_repo))
377 conditions=dict(function=check_repo))
360
378
361 rmap.connect('files_rawfile_home',
379 rmap.connect('files_rawfile_home',
362 '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}',
380 '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}',
363 controller='files', action='rawfile', revision='tip',
381 controller='files', action='rawfile', revision='tip',
364 f_path='', conditions=dict(function=check_repo))
382 f_path='', conditions=dict(function=check_repo))
365
383
366 rmap.connect('files_raw_home',
384 rmap.connect('files_raw_home',
367 '/{repo_name:.*}/raw/{revision}/{f_path:.*}',
385 '/{repo_name:.*}/raw/{revision}/{f_path:.*}',
368 controller='files', action='raw', revision='tip', f_path='',
386 controller='files', action='raw', revision='tip', f_path='',
369 conditions=dict(function=check_repo))
387 conditions=dict(function=check_repo))
370
388
371 rmap.connect('files_annotate_home',
389 rmap.connect('files_annotate_home',
372 '/{repo_name:.*}/annotate/{revision}/{f_path:.*}',
390 '/{repo_name:.*}/annotate/{revision}/{f_path:.*}',
373 controller='files', action='annotate', revision='tip',
391 controller='files', action='annotate', revision='tip',
374 f_path='', conditions=dict(function=check_repo))
392 f_path='', conditions=dict(function=check_repo))
375
393
376 rmap.connect('files_edit_home',
394 rmap.connect('files_edit_home',
377 '/{repo_name:.*}/edit/{revision}/{f_path:.*}',
395 '/{repo_name:.*}/edit/{revision}/{f_path:.*}',
378 controller='files', action='edit', revision='tip',
396 controller='files', action='edit', revision='tip',
379 f_path='', conditions=dict(function=check_repo))
397 f_path='', conditions=dict(function=check_repo))
380
398
381 rmap.connect('files_add_home',
399 rmap.connect('files_add_home',
382 '/{repo_name:.*}/add/{revision}/{f_path:.*}',
400 '/{repo_name:.*}/add/{revision}/{f_path:.*}',
383 controller='files', action='add', revision='tip',
401 controller='files', action='add', revision='tip',
384 f_path='', conditions=dict(function=check_repo))
402 f_path='', conditions=dict(function=check_repo))
385
403
386 rmap.connect('files_archive_home', '/{repo_name:.*}/archive/{fname}',
404 rmap.connect('files_archive_home', '/{repo_name:.*}/archive/{fname}',
387 controller='files', action='archivefile',
405 controller='files', action='archivefile',
388 conditions=dict(function=check_repo))
406 conditions=dict(function=check_repo))
389
407
390 rmap.connect('files_nodelist_home',
408 rmap.connect('files_nodelist_home',
391 '/{repo_name:.*}/nodelist/{revision}/{f_path:.*}',
409 '/{repo_name:.*}/nodelist/{revision}/{f_path:.*}',
392 controller='files', action='nodelist',
410 controller='files', action='nodelist',
393 conditions=dict(function=check_repo))
411 conditions=dict(function=check_repo))
394
412
395 rmap.connect('repo_settings_delete', '/{repo_name:.*}/settings',
413 rmap.connect('repo_settings_delete', '/{repo_name:.*}/settings',
396 controller='settings', action="delete",
414 controller='settings', action="delete",
397 conditions=dict(method=["DELETE"], function=check_repo))
415 conditions=dict(method=["DELETE"], function=check_repo))
398
416
399 rmap.connect('repo_settings_update', '/{repo_name:.*}/settings',
417 rmap.connect('repo_settings_update', '/{repo_name:.*}/settings',
400 controller='settings', action="update",
418 controller='settings', action="update",
401 conditions=dict(method=["PUT"], function=check_repo))
419 conditions=dict(method=["PUT"], function=check_repo))
402
420
403 rmap.connect('repo_settings_home', '/{repo_name:.*}/settings',
421 rmap.connect('repo_settings_home', '/{repo_name:.*}/settings',
404 controller='settings', action='index',
422 controller='settings', action='index',
405 conditions=dict(function=check_repo))
423 conditions=dict(function=check_repo))
406
424
407 rmap.connect('repo_fork_create_home', '/{repo_name:.*}/fork',
425 rmap.connect('repo_fork_create_home', '/{repo_name:.*}/fork',
408 controller='settings', action='fork_create',
426 controller='settings', action='fork_create',
409 conditions=dict(function=check_repo, method=["POST"]))
427 conditions=dict(function=check_repo, method=["POST"]))
410
428
411 rmap.connect('repo_fork_home', '/{repo_name:.*}/fork',
429 rmap.connect('repo_fork_home', '/{repo_name:.*}/fork',
412 controller='settings', action='fork',
430 controller='settings', action='fork',
413 conditions=dict(function=check_repo))
431 conditions=dict(function=check_repo))
414
432
415 rmap.connect('repo_followers_home', '/{repo_name:.*}/followers',
433 rmap.connect('repo_followers_home', '/{repo_name:.*}/followers',
416 controller='followers', action='followers',
434 controller='followers', action='followers',
417 conditions=dict(function=check_repo))
435 conditions=dict(function=check_repo))
418
436
419 rmap.connect('repo_forks_home', '/{repo_name:.*}/forks',
437 rmap.connect('repo_forks_home', '/{repo_name:.*}/forks',
420 controller='forks', action='forks',
438 controller='forks', action='forks',
421 conditions=dict(function=check_repo))
439 conditions=dict(function=check_repo))
422
440
423 return rmap
441 return rmap
@@ -1,611 +1,611 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import logging
27 import logging
28 import datetime
28 import datetime
29 import traceback
29 import traceback
30 import paste
30 import paste
31 import beaker
31 import beaker
32 from os.path import dirname as dn, join as jn
32 from os.path import dirname as dn, join as jn
33
33
34 from paste.script.command import Command, BadCommand
34 from paste.script.command import Command, BadCommand
35
35
36 from UserDict import DictMixin
36 from mercurial import ui, config
37
38 from mercurial import ui, config, hg
39 from mercurial.error import RepoError
40
37
41 from webhelpers.text import collapse, remove_formatting, strip_tags
38 from webhelpers.text import collapse, remove_formatting, strip_tags
42
39
40 from vcs import get_backend
43 from vcs.backends.base import BaseChangeset
41 from vcs.backends.base import BaseChangeset
44 from vcs.utils.lazy import LazyProperty
42 from vcs.utils.lazy import LazyProperty
45 from vcs import get_backend
43 from vcs.utils.helpers import get_scm
44 from vcs.exceptions import VCSError
46
45
47 from rhodecode.model import meta
46 from rhodecode.model import meta
48 from rhodecode.model.caching_query import FromCache
47 from rhodecode.model.caching_query import FromCache
49 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \
48 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \
50 RhodeCodeSettings
49 RhodeCodeSettings
51 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.user import UserModel
53
51
54 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
55
53
56
54
57 def recursive_replace(str, replace=' '):
55 def recursive_replace(str, replace=' '):
58 """Recursive replace of given sign to just one instance
56 """Recursive replace of given sign to just one instance
59
57
60 :param str: given string
58 :param str: given string
61 :param replace: char to find and replace multiple instances
59 :param replace: char to find and replace multiple instances
62
60
63 Examples::
61 Examples::
64 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
62 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
65 'Mighty-Mighty-Bo-sstones'
63 'Mighty-Mighty-Bo-sstones'
66 """
64 """
67
65
68 if str.find(replace * 2) == -1:
66 if str.find(replace * 2) == -1:
69 return str
67 return str
70 else:
68 else:
71 str = str.replace(replace * 2, replace)
69 str = str.replace(replace * 2, replace)
72 return recursive_replace(str, replace)
70 return recursive_replace(str, replace)
73
71
74
72
75 def repo_name_slug(value):
73 def repo_name_slug(value):
76 """Return slug of name of repository
74 """Return slug of name of repository
77 This function is called on each creation/modification
75 This function is called on each creation/modification
78 of repository to prevent bad names in repo
76 of repository to prevent bad names in repo
79 """
77 """
80
78
81 slug = remove_formatting(value)
79 slug = remove_formatting(value)
82 slug = strip_tags(slug)
80 slug = strip_tags(slug)
83
81
84 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
82 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
85 slug = slug.replace(c, '-')
83 slug = slug.replace(c, '-')
86 slug = recursive_replace(slug, '-')
84 slug = recursive_replace(slug, '-')
87 slug = collapse(slug, '-')
85 slug = collapse(slug, '-')
88 return slug
86 return slug
89
87
90
88
91 def get_repo_slug(request):
89 def get_repo_slug(request):
92 return request.environ['pylons.routes_dict'].get('repo_name')
90 return request.environ['pylons.routes_dict'].get('repo_name')
93
91
94
92
95 def action_logger(user, action, repo, ipaddr='', sa=None):
93 def action_logger(user, action, repo, ipaddr='', sa=None):
96 """
94 """
97 Action logger for various actions made by users
95 Action logger for various actions made by users
98
96
99 :param user: user that made this action, can be a unique username string or
97 :param user: user that made this action, can be a unique username string or
100 object containing user_id attribute
98 object containing user_id attribute
101 :param action: action to log, should be on of predefined unique actions for
99 :param action: action to log, should be on of predefined unique actions for
102 easy translations
100 easy translations
103 :param repo: string name of repository or object containing repo_id,
101 :param repo: string name of repository or object containing repo_id,
104 that action was made on
102 that action was made on
105 :param ipaddr: optional ip address from what the action was made
103 :param ipaddr: optional ip address from what the action was made
106 :param sa: optional sqlalchemy session
104 :param sa: optional sqlalchemy session
107
105
108 """
106 """
109
107
110 if not sa:
108 if not sa:
111 sa = meta.Session()
109 sa = meta.Session()
112
110
113 try:
111 try:
114 um = UserModel()
115 if hasattr(user, 'user_id'):
112 if hasattr(user, 'user_id'):
116 user_obj = user
113 user_obj = user
117 elif isinstance(user, basestring):
114 elif isinstance(user, basestring):
118 user_obj = um.get_by_username(user, cache=False)
115 user_obj = User.by_username(user, cache=False)
119 else:
116 else:
120 raise Exception('You have to provide user object or username')
117 raise Exception('You have to provide user object or username')
121
118
122 rm = RepoModel()
119 rm = RepoModel()
123 if hasattr(repo, 'repo_id'):
120 if hasattr(repo, 'repo_id'):
124 repo_obj = rm.get(repo.repo_id, cache=False)
121 repo_obj = rm.get(repo.repo_id, cache=False)
125 repo_name = repo_obj.repo_name
122 repo_name = repo_obj.repo_name
126 elif isinstance(repo, basestring):
123 elif isinstance(repo, basestring):
127 repo_name = repo.lstrip('/')
124 repo_name = repo.lstrip('/')
128 repo_obj = rm.get_by_repo_name(repo_name, cache=False)
125 repo_obj = rm.get_by_repo_name(repo_name, cache=False)
129 else:
126 else:
130 raise Exception('You have to provide repository to action logger')
127 raise Exception('You have to provide repository to action logger')
131
128
132 user_log = UserLog()
129 user_log = UserLog()
133 user_log.user_id = user_obj.user_id
130 user_log.user_id = user_obj.user_id
134 user_log.action = action
131 user_log.action = action
135
132
136 user_log.repository_id = repo_obj.repo_id
133 user_log.repository_id = repo_obj.repo_id
137 user_log.repository_name = repo_name
134 user_log.repository_name = repo_name
138
135
139 user_log.action_date = datetime.datetime.now()
136 user_log.action_date = datetime.datetime.now()
140 user_log.user_ip = ipaddr
137 user_log.user_ip = ipaddr
141 sa.add(user_log)
138 sa.add(user_log)
142 sa.commit()
139 sa.commit()
143
140
144 log.info('Adding user %s, action %s on %s', user_obj, action, repo)
141 log.info('Adding user %s, action %s on %s', user_obj, action, repo)
145 except:
142 except:
146 log.error(traceback.format_exc())
143 log.error(traceback.format_exc())
147 sa.rollback()
144 sa.rollback()
148
145
149
146
150 def get_repos(path, recursive=False):
147 def get_repos(path, recursive=False):
151 """
148 """
152 Scans given path for repos and return (name,(type,path)) tuple
149 Scans given path for repos and return (name,(type,path)) tuple
153
150
154 :param path: path to scann for repositories
151 :param path: path to scann for repositories
155 :param recursive: recursive search and return names with subdirs in front
152 :param recursive: recursive search and return names with subdirs in front
156 """
153 """
157 from vcs.utils.helpers import get_scm
154 from vcs.utils.helpers import get_scm
158 from vcs.exceptions import VCSError
155 from vcs.exceptions import VCSError
159
156
160 if path.endswith(os.sep):
157 if path.endswith(os.sep):
161 #remove ending slash for better results
158 #remove ending slash for better results
162 path = path[:-1]
159 path = path[:-1]
163
160
164 def _get_repos(p):
161 def _get_repos(p):
165 if not os.access(p, os.W_OK):
162 if not os.access(p, os.W_OK):
166 return
163 return
167 for dirpath in os.listdir(p):
164 for dirpath in os.listdir(p):
168 if os.path.isfile(os.path.join(p, dirpath)):
165 if os.path.isfile(os.path.join(p, dirpath)):
169 continue
166 continue
170 cur_path = os.path.join(p, dirpath)
167 cur_path = os.path.join(p, dirpath)
171 try:
168 try:
172 scm_info = get_scm(cur_path)
169 scm_info = get_scm(cur_path)
173 yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info
170 yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info
174 except VCSError:
171 except VCSError:
175 if not recursive:
172 if not recursive:
176 continue
173 continue
177 #check if this dir containts other repos for recursive scan
174 #check if this dir containts other repos for recursive scan
178 rec_path = os.path.join(p, dirpath)
175 rec_path = os.path.join(p, dirpath)
179 if os.path.isdir(rec_path):
176 if os.path.isdir(rec_path):
180 for inner_scm in _get_repos(rec_path):
177 for inner_scm in _get_repos(rec_path):
181 yield inner_scm
178 yield inner_scm
182
179
183 return _get_repos(path)
180 return _get_repos(path)
184
181
185
182
186 def check_repo_fast(repo_name, base_path):
183 def check_repo_fast(repo_name, base_path):
187 """
184 """
188 Check given path for existence of directory
185 Returns True if given path is a valid repository False otherwise
189 :param repo_name:
186 :param repo_name:
190 :param base_path:
187 :param base_path:
191
188
192 :return False: if this directory is present
189 :return True: if given path is a valid repository
193 """
190 """
194 if os.path.isdir(os.path.join(base_path, repo_name)):
191 full_path = os.path.join(base_path, repo_name)
195 return False
192
196 return True
197
198
199 def check_repo(repo_name, base_path, verify=True):
200
201 repo_path = os.path.join(base_path, repo_name)
202
203 try:
193 try:
204 if not check_repo_fast(repo_name, base_path):
194 get_scm(full_path)
205 return False
195 return True
206 r = hg.repository(ui.ui(), repo_path)
196 except VCSError:
207 if verify:
208 hg.verify(r)
209 #here we hnow that repo exists it was verified
210 log.info('%s repo is already created', repo_name)
211 return False
197 return False
212 except RepoError:
198
213 #it means that there is no valid repo there...
199 def check_repos_group_fast(repos_group_name, base_path):
214 log.info('%s repo is free for creation', repo_name)
200 """
201 Returns True if given path is a repos group False otherwise
202
203 :param repo_name:
204 :param base_path:
205 """
206 full_path = os.path.join(base_path, repos_group_name)
207
208 # check if it's not a repo
209 if check_repo_fast(repos_group_name, base_path):
210 return False
211
212 # check if it's a valid path
213 if os.path.isdir(full_path):
215 return True
214 return True
216
215
217
216 return False
217
218 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
218 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
219 while True:
219 while True:
220 ok = raw_input(prompt)
220 ok = raw_input(prompt)
221 if ok in ('y', 'ye', 'yes'):
221 if ok in ('y', 'ye', 'yes'):
222 return True
222 return True
223 if ok in ('n', 'no', 'nop', 'nope'):
223 if ok in ('n', 'no', 'nop', 'nope'):
224 return False
224 return False
225 retries = retries - 1
225 retries = retries - 1
226 if retries < 0:
226 if retries < 0:
227 raise IOError
227 raise IOError
228 print complaint
228 print complaint
229
229
230 #propagated from mercurial documentation
230 #propagated from mercurial documentation
231 ui_sections = ['alias', 'auth',
231 ui_sections = ['alias', 'auth',
232 'decode/encode', 'defaults',
232 'decode/encode', 'defaults',
233 'diff', 'email',
233 'diff', 'email',
234 'extensions', 'format',
234 'extensions', 'format',
235 'merge-patterns', 'merge-tools',
235 'merge-patterns', 'merge-tools',
236 'hooks', 'http_proxy',
236 'hooks', 'http_proxy',
237 'smtp', 'patch',
237 'smtp', 'patch',
238 'paths', 'profiling',
238 'paths', 'profiling',
239 'server', 'trusted',
239 'server', 'trusted',
240 'ui', 'web', ]
240 'ui', 'web', ]
241
241
242
242
243 def make_ui(read_from='file', path=None, checkpaths=True):
243 def make_ui(read_from='file', path=None, checkpaths=True):
244 """A function that will read python rc files or database
244 """A function that will read python rc files or database
245 and make an mercurial ui object from read options
245 and make an mercurial ui object from read options
246
246
247 :param path: path to mercurial config file
247 :param path: path to mercurial config file
248 :param checkpaths: check the path
248 :param checkpaths: check the path
249 :param read_from: read from 'file' or 'db'
249 :param read_from: read from 'file' or 'db'
250 """
250 """
251
251
252 baseui = ui.ui()
252 baseui = ui.ui()
253
253
254 #clean the baseui object
254 #clean the baseui object
255 baseui._ocfg = config.config()
255 baseui._ocfg = config.config()
256 baseui._ucfg = config.config()
256 baseui._ucfg = config.config()
257 baseui._tcfg = config.config()
257 baseui._tcfg = config.config()
258
258
259 if read_from == 'file':
259 if read_from == 'file':
260 if not os.path.isfile(path):
260 if not os.path.isfile(path):
261 log.warning('Unable to read config file %s' % path)
261 log.warning('Unable to read config file %s' % path)
262 return False
262 return False
263 log.debug('reading hgrc from %s', path)
263 log.debug('reading hgrc from %s', path)
264 cfg = config.config()
264 cfg = config.config()
265 cfg.read(path)
265 cfg.read(path)
266 for section in ui_sections:
266 for section in ui_sections:
267 for k, v in cfg.items(section):
267 for k, v in cfg.items(section):
268 log.debug('settings ui from file[%s]%s:%s', section, k, v)
268 log.debug('settings ui from file[%s]%s:%s', section, k, v)
269 baseui.setconfig(section, k, v)
269 baseui.setconfig(section, k, v)
270
270
271 elif read_from == 'db':
271 elif read_from == 'db':
272 sa = meta.Session()
272 sa = meta.Session()
273 ret = sa.query(RhodeCodeUi)\
273 ret = sa.query(RhodeCodeUi)\
274 .options(FromCache("sql_cache_short",
274 .options(FromCache("sql_cache_short",
275 "get_hg_ui_settings")).all()
275 "get_hg_ui_settings")).all()
276
276
277 hg_ui = ret
277 hg_ui = ret
278 for ui_ in hg_ui:
278 for ui_ in hg_ui:
279 if ui_.ui_active:
279 if ui_.ui_active:
280 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
280 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
281 ui_.ui_key, ui_.ui_value)
281 ui_.ui_key, ui_.ui_value)
282 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
282 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
283
283
284 meta.Session.remove()
284 meta.Session.remove()
285 return baseui
285 return baseui
286
286
287
287
288 def set_rhodecode_config(config):
288 def set_rhodecode_config(config):
289 """Updates pylons config with new settings from database
289 """Updates pylons config with new settings from database
290
290
291 :param config:
291 :param config:
292 """
292 """
293 hgsettings = RhodeCodeSettings.get_app_settings()
293 hgsettings = RhodeCodeSettings.get_app_settings()
294
294
295 for k, v in hgsettings.items():
295 for k, v in hgsettings.items():
296 config[k] = v
296 config[k] = v
297
297
298
298
299 def invalidate_cache(cache_key, *args):
299 def invalidate_cache(cache_key, *args):
300 """Puts cache invalidation task into db for
300 """Puts cache invalidation task into db for
301 further global cache invalidation
301 further global cache invalidation
302 """
302 """
303
303
304 from rhodecode.model.scm import ScmModel
304 from rhodecode.model.scm import ScmModel
305
305
306 if cache_key.startswith('get_repo_cached_'):
306 if cache_key.startswith('get_repo_cached_'):
307 name = cache_key.split('get_repo_cached_')[-1]
307 name = cache_key.split('get_repo_cached_')[-1]
308 ScmModel().mark_for_invalidation(name)
308 ScmModel().mark_for_invalidation(name)
309
309
310
310
311 class EmptyChangeset(BaseChangeset):
311 class EmptyChangeset(BaseChangeset):
312 """
312 """
313 An dummy empty changeset. It's possible to pass hash when creating
313 An dummy empty changeset. It's possible to pass hash when creating
314 an EmptyChangeset
314 an EmptyChangeset
315 """
315 """
316
316
317 def __init__(self, cs='0' * 40, repo=None, requested_revision=None, alias=None):
317 def __init__(self, cs='0' * 40, repo=None, requested_revision=None, alias=None):
318 self._empty_cs = cs
318 self._empty_cs = cs
319 self.revision = -1
319 self.revision = -1
320 self.message = ''
320 self.message = ''
321 self.author = ''
321 self.author = ''
322 self.date = ''
322 self.date = ''
323 self.repository = repo
323 self.repository = repo
324 self.requested_revision = requested_revision
324 self.requested_revision = requested_revision
325 self.alias = alias
325 self.alias = alias
326
326
327 @LazyProperty
327 @LazyProperty
328 def raw_id(self):
328 def raw_id(self):
329 """Returns raw string identifying this changeset, useful for web
329 """Returns raw string identifying this changeset, useful for web
330 representation.
330 representation.
331 """
331 """
332
332
333 return self._empty_cs
333 return self._empty_cs
334
334
335 @LazyProperty
335 @LazyProperty
336 def branch(self):
336 def branch(self):
337 return get_backend(self.alias).DEFAULT_BRANCH_NAME
337 return get_backend(self.alias).DEFAULT_BRANCH_NAME
338
338
339 @LazyProperty
339 @LazyProperty
340 def short_id(self):
340 def short_id(self):
341 return self.raw_id[:12]
341 return self.raw_id[:12]
342
342
343 def get_file_changeset(self, path):
343 def get_file_changeset(self, path):
344 return self
344 return self
345
345
346 def get_file_content(self, path):
346 def get_file_content(self, path):
347 return u''
347 return u''
348
348
349 def get_file_size(self, path):
349 def get_file_size(self, path):
350 return 0
350 return 0
351
351
352
352
353 def map_groups(groups):
353 def map_groups(groups):
354 """Checks for groups existence, and creates groups structures.
354 """Checks for groups existence, and creates groups structures.
355 It returns last group in structure
355 It returns last group in structure
356
356
357 :param groups: list of groups structure
357 :param groups: list of groups structure
358 """
358 """
359 sa = meta.Session()
359 sa = meta.Session()
360
360
361 parent = None
361 parent = None
362 group = None
362 group = None
363 for lvl, group_name in enumerate(groups[:-1]):
363 for lvl, group_name in enumerate(groups[:-1]):
364 group = sa.query(Group).filter(Group.group_name == group_name).scalar()
364 group = sa.query(Group).filter(Group.group_name == group_name).scalar()
365
365
366 if group is None:
366 if group is None:
367 group = Group(group_name, parent)
367 group = Group(group_name, parent)
368 sa.add(group)
368 sa.add(group)
369 sa.commit()
369 sa.commit()
370
370
371 parent = group
371 parent = group
372
372
373 return group
373 return group
374
374
375
375
376 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
376 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
377 """maps all repos given in initial_repo_list, non existing repositories
377 """maps all repos given in initial_repo_list, non existing repositories
378 are created, if remove_obsolete is True it also check for db entries
378 are created, if remove_obsolete is True it also check for db entries
379 that are not in initial_repo_list and removes them.
379 that are not in initial_repo_list and removes them.
380
380
381 :param initial_repo_list: list of repositories found by scanning methods
381 :param initial_repo_list: list of repositories found by scanning methods
382 :param remove_obsolete: check for obsolete entries in database
382 :param remove_obsolete: check for obsolete entries in database
383 """
383 """
384
384
385 sa = meta.Session()
385 sa = meta.Session()
386 rm = RepoModel()
386 rm = RepoModel()
387 user = sa.query(User).filter(User.admin == True).first()
387 user = sa.query(User).filter(User.admin == True).first()
388 added = []
388 added = []
389 for name, repo in initial_repo_list.items():
389 for name, repo in initial_repo_list.items():
390 group = map_groups(name.split(os.sep))
390 group = map_groups(name.split(os.sep))
391 if not rm.get_by_repo_name(name, cache=False):
391 if not rm.get_by_repo_name(name, cache=False):
392 log.info('repository %s not found creating default', name)
392 log.info('repository %s not found creating default', name)
393 added.append(name)
393 added.append(name)
394 form_data = {
394 form_data = {
395 'repo_name': name,
395 'repo_name': name,
396 'repo_name_full': name,
396 'repo_name_full': name,
397 'repo_type': repo.alias,
397 'repo_type': repo.alias,
398 'description': repo.description \
398 'description': repo.description \
399 if repo.description != 'unknown' else \
399 if repo.description != 'unknown' else \
400 '%s repository' % name,
400 '%s repository' % name,
401 'private': False,
401 'private': False,
402 'group_id': getattr(group, 'group_id', None)
402 'group_id': getattr(group, 'group_id', None)
403 }
403 }
404 rm.create(form_data, user, just_db=True)
404 rm.create(form_data, user, just_db=True)
405
405
406 removed = []
406 removed = []
407 if remove_obsolete:
407 if remove_obsolete:
408 #remove from database those repositories that are not in the filesystem
408 #remove from database those repositories that are not in the filesystem
409 for repo in sa.query(Repository).all():
409 for repo in sa.query(Repository).all():
410 if repo.repo_name not in initial_repo_list.keys():
410 if repo.repo_name not in initial_repo_list.keys():
411 removed.append(repo.repo_name)
411 removed.append(repo.repo_name)
412 sa.delete(repo)
412 sa.delete(repo)
413 sa.commit()
413 sa.commit()
414
414
415 return added, removed
415 return added, removed
416
416
417 #set cache regions for beaker so celery can utilise it
417 #set cache regions for beaker so celery can utilise it
418 def add_cache(settings):
418 def add_cache(settings):
419 cache_settings = {'regions': None}
419 cache_settings = {'regions': None}
420 for key in settings.keys():
420 for key in settings.keys():
421 for prefix in ['beaker.cache.', 'cache.']:
421 for prefix in ['beaker.cache.', 'cache.']:
422 if key.startswith(prefix):
422 if key.startswith(prefix):
423 name = key.split(prefix)[1].strip()
423 name = key.split(prefix)[1].strip()
424 cache_settings[name] = settings[key].strip()
424 cache_settings[name] = settings[key].strip()
425 if cache_settings['regions']:
425 if cache_settings['regions']:
426 for region in cache_settings['regions'].split(','):
426 for region in cache_settings['regions'].split(','):
427 region = region.strip()
427 region = region.strip()
428 region_settings = {}
428 region_settings = {}
429 for key, value in cache_settings.items():
429 for key, value in cache_settings.items():
430 if key.startswith(region):
430 if key.startswith(region):
431 region_settings[key.split('.')[1]] = value
431 region_settings[key.split('.')[1]] = value
432 region_settings['expire'] = int(region_settings.get('expire',
432 region_settings['expire'] = int(region_settings.get('expire',
433 60))
433 60))
434 region_settings.setdefault('lock_dir',
434 region_settings.setdefault('lock_dir',
435 cache_settings.get('lock_dir'))
435 cache_settings.get('lock_dir'))
436 region_settings.setdefault('data_dir',
436 region_settings.setdefault('data_dir',
437 cache_settings.get('data_dir'))
437 cache_settings.get('data_dir'))
438
438
439 if 'type' not in region_settings:
439 if 'type' not in region_settings:
440 region_settings['type'] = cache_settings.get('type',
440 region_settings['type'] = cache_settings.get('type',
441 'memory')
441 'memory')
442 beaker.cache.cache_regions[region] = region_settings
442 beaker.cache.cache_regions[region] = region_settings
443
443
444
444
445 def get_current_revision():
445 def get_current_revision():
446 """Returns tuple of (number, id) from repository containing this package
446 """Returns tuple of (number, id) from repository containing this package
447 or None if repository could not be found.
447 or None if repository could not be found.
448 """
448 """
449
449
450 try:
450 try:
451 from vcs import get_repo
451 from vcs import get_repo
452 from vcs.utils.helpers import get_scm
452 from vcs.utils.helpers import get_scm
453 from vcs.exceptions import RepositoryError, VCSError
453 from vcs.exceptions import RepositoryError, VCSError
454 repopath = os.path.join(os.path.dirname(__file__), '..', '..')
454 repopath = os.path.join(os.path.dirname(__file__), '..', '..')
455 scm = get_scm(repopath)[0]
455 scm = get_scm(repopath)[0]
456 repo = get_repo(path=repopath, alias=scm)
456 repo = get_repo(path=repopath, alias=scm)
457 tip = repo.get_changeset()
457 tip = repo.get_changeset()
458 return (tip.revision, tip.short_id)
458 return (tip.revision, tip.short_id)
459 except (ImportError, RepositoryError, VCSError), err:
459 except (ImportError, RepositoryError, VCSError), err:
460 logging.debug("Cannot retrieve rhodecode's revision. Original error "
460 logging.debug("Cannot retrieve rhodecode's revision. Original error "
461 "was: %s" % err)
461 "was: %s" % err)
462 return None
462 return None
463
463
464
464
465 #==============================================================================
465 #==============================================================================
466 # TEST FUNCTIONS AND CREATORS
466 # TEST FUNCTIONS AND CREATORS
467 #==============================================================================
467 #==============================================================================
468 def create_test_index(repo_location, config, full_index):
468 def create_test_index(repo_location, config, full_index):
469 """
469 """
470 Makes default test index
470 Makes default test index
471
471
472 :param config: test config
472 :param config: test config
473 :param full_index:
473 :param full_index:
474 """
474 """
475
475
476 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
476 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
477 from rhodecode.lib.pidlock import DaemonLock, LockHeld
477 from rhodecode.lib.pidlock import DaemonLock, LockHeld
478
478
479 repo_location = repo_location
479 repo_location = repo_location
480
480
481 index_location = os.path.join(config['app_conf']['index_dir'])
481 index_location = os.path.join(config['app_conf']['index_dir'])
482 if not os.path.exists(index_location):
482 if not os.path.exists(index_location):
483 os.makedirs(index_location)
483 os.makedirs(index_location)
484
484
485 try:
485 try:
486 l = DaemonLock(file=jn(dn(index_location), 'make_index.lock'))
486 l = DaemonLock(file=jn(dn(index_location), 'make_index.lock'))
487 WhooshIndexingDaemon(index_location=index_location,
487 WhooshIndexingDaemon(index_location=index_location,
488 repo_location=repo_location)\
488 repo_location=repo_location)\
489 .run(full_index=full_index)
489 .run(full_index=full_index)
490 l.release()
490 l.release()
491 except LockHeld:
491 except LockHeld:
492 pass
492 pass
493
493
494
494
495 def create_test_env(repos_test_path, config):
495 def create_test_env(repos_test_path, config):
496 """Makes a fresh database and
496 """Makes a fresh database and
497 install test repository into tmp dir
497 install test repository into tmp dir
498 """
498 """
499 from rhodecode.lib.db_manage import DbManage
499 from rhodecode.lib.db_manage import DbManage
500 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
500 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
501 HG_FORK, GIT_FORK, TESTS_TMP_PATH
501 HG_FORK, GIT_FORK, TESTS_TMP_PATH
502 import tarfile
502 import tarfile
503 import shutil
503 import shutil
504 from os.path import abspath
504 from os.path import abspath
505
505
506 # PART ONE create db
506 # PART ONE create db
507 dbconf = config['sqlalchemy.db1.url']
507 dbconf = config['sqlalchemy.db1.url']
508 log.debug('making test db %s', dbconf)
508 log.debug('making test db %s', dbconf)
509
509
510 # create test dir if it doesn't exist
510 # create test dir if it doesn't exist
511 if not os.path.isdir(repos_test_path):
511 if not os.path.isdir(repos_test_path):
512 log.debug('Creating testdir %s' % repos_test_path)
512 log.debug('Creating testdir %s' % repos_test_path)
513 os.makedirs(repos_test_path)
513 os.makedirs(repos_test_path)
514
514
515 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
515 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
516 tests=True)
516 tests=True)
517 dbmanage.create_tables(override=True)
517 dbmanage.create_tables(override=True)
518 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
518 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
519 dbmanage.create_default_user()
519 dbmanage.create_default_user()
520 dbmanage.admin_prompt()
520 dbmanage.admin_prompt()
521 dbmanage.create_permissions()
521 dbmanage.create_permissions()
522 dbmanage.populate_default_permissions()
522 dbmanage.populate_default_permissions()
523
523
524 # PART TWO make test repo
524 # PART TWO make test repo
525 log.debug('making test vcs repositories')
525 log.debug('making test vcs repositories')
526
526
527 idx_path = config['app_conf']['index_dir']
527 idx_path = config['app_conf']['index_dir']
528 data_path = config['app_conf']['cache_dir']
528 data_path = config['app_conf']['cache_dir']
529
529
530 #clean index and data
530 #clean index and data
531 if idx_path and os.path.exists(idx_path):
531 if idx_path and os.path.exists(idx_path):
532 log.debug('remove %s' % idx_path)
532 log.debug('remove %s' % idx_path)
533 shutil.rmtree(idx_path)
533 shutil.rmtree(idx_path)
534
534
535 if data_path and os.path.exists(data_path):
535 if data_path and os.path.exists(data_path):
536 log.debug('remove %s' % data_path)
536 log.debug('remove %s' % data_path)
537 shutil.rmtree(data_path)
537 shutil.rmtree(data_path)
538
538
539 #CREATE DEFAULT HG REPOSITORY
539 #CREATE DEFAULT HG REPOSITORY
540 cur_dir = dn(dn(abspath(__file__)))
540 cur_dir = dn(dn(abspath(__file__)))
541 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
541 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
542 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
542 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
543 tar.close()
543 tar.close()
544
544
545
545
546 #==============================================================================
546 #==============================================================================
547 # PASTER COMMANDS
547 # PASTER COMMANDS
548 #==============================================================================
548 #==============================================================================
549 class BasePasterCommand(Command):
549 class BasePasterCommand(Command):
550 """
550 """
551 Abstract Base Class for paster commands.
551 Abstract Base Class for paster commands.
552
552
553 The celery commands are somewhat aggressive about loading
553 The celery commands are somewhat aggressive about loading
554 celery.conf, and since our module sets the `CELERY_LOADER`
554 celery.conf, and since our module sets the `CELERY_LOADER`
555 environment variable to our loader, we have to bootstrap a bit and
555 environment variable to our loader, we have to bootstrap a bit and
556 make sure we've had a chance to load the pylons config off of the
556 make sure we've had a chance to load the pylons config off of the
557 command line, otherwise everything fails.
557 command line, otherwise everything fails.
558 """
558 """
559 min_args = 1
559 min_args = 1
560 min_args_error = "Please provide a paster config file as an argument."
560 min_args_error = "Please provide a paster config file as an argument."
561 takes_config_file = 1
561 takes_config_file = 1
562 requires_config_file = True
562 requires_config_file = True
563
563
564 def notify_msg(self, msg, log=False):
564 def notify_msg(self, msg, log=False):
565 """Make a notification to user, additionally if logger is passed
565 """Make a notification to user, additionally if logger is passed
566 it logs this action using given logger
566 it logs this action using given logger
567
567
568 :param msg: message that will be printed to user
568 :param msg: message that will be printed to user
569 :param log: logging instance, to use to additionally log this message
569 :param log: logging instance, to use to additionally log this message
570
570
571 """
571 """
572 if log and isinstance(log, logging):
572 if log and isinstance(log, logging):
573 log(msg)
573 log(msg)
574
574
575 def run(self, args):
575 def run(self, args):
576 """
576 """
577 Overrides Command.run
577 Overrides Command.run
578
578
579 Checks for a config file argument and loads it.
579 Checks for a config file argument and loads it.
580 """
580 """
581 if len(args) < self.min_args:
581 if len(args) < self.min_args:
582 raise BadCommand(
582 raise BadCommand(
583 self.min_args_error % {'min_args': self.min_args,
583 self.min_args_error % {'min_args': self.min_args,
584 'actual_args': len(args)})
584 'actual_args': len(args)})
585
585
586 # Decrement because we're going to lob off the first argument.
586 # Decrement because we're going to lob off the first argument.
587 # @@ This is hacky
587 # @@ This is hacky
588 self.min_args -= 1
588 self.min_args -= 1
589 self.bootstrap_config(args[0])
589 self.bootstrap_config(args[0])
590 self.update_parser()
590 self.update_parser()
591 return super(BasePasterCommand, self).run(args[1:])
591 return super(BasePasterCommand, self).run(args[1:])
592
592
593 def update_parser(self):
593 def update_parser(self):
594 """
594 """
595 Abstract method. Allows for the class's parser to be updated
595 Abstract method. Allows for the class's parser to be updated
596 before the superclass's `run` method is called. Necessary to
596 before the superclass's `run` method is called. Necessary to
597 allow options/arguments to be passed through to the underlying
597 allow options/arguments to be passed through to the underlying
598 celery command.
598 celery command.
599 """
599 """
600 raise NotImplementedError("Abstract Method.")
600 raise NotImplementedError("Abstract Method.")
601
601
602 def bootstrap_config(self, conf):
602 def bootstrap_config(self, conf):
603 """
603 """
604 Loads the pylons configuration.
604 Loads the pylons configuration.
605 """
605 """
606 from pylons import config as pylonsconfig
606 from pylons import config as pylonsconfig
607
607
608 path_to_ini_file = os.path.realpath(conf)
608 path_to_ini_file = os.path.realpath(conf)
609 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
609 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
610 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
610 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
611
611
@@ -1,358 +1,358 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.repo
3 rhodecode.model.repo
4 ~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~
5
5
6 Repository model for rhodecode
6 Repository model for rhodecode
7
7
8 :created_on: Jun 5, 2010
8 :created_on: Jun 5, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 import os
25 import os
26 import shutil
26 import shutil
27 import logging
27 import logging
28 import traceback
28 import traceback
29 from datetime import datetime
29 from datetime import datetime
30
30
31 from sqlalchemy.orm import joinedload, make_transient
31 from sqlalchemy.orm import joinedload, make_transient
32
32
33 from vcs.utils.lazy import LazyProperty
33 from vcs.utils.lazy import LazyProperty
34 from vcs.backends import get_backend
34 from vcs.backends import get_backend
35
35
36 from rhodecode.lib import safe_str
36 from rhodecode.lib import safe_str
37
37
38 from rhodecode.model import BaseModel
38 from rhodecode.model import BaseModel
39 from rhodecode.model.caching_query import FromCache
39 from rhodecode.model.caching_query import FromCache
40 from rhodecode.model.db import Repository, RepoToPerm, User, Permission, \
40 from rhodecode.model.db import Repository, RepoToPerm, User, Permission, \
41 Statistics, UsersGroup, UsersGroupRepoToPerm, RhodeCodeUi, Group
41 Statistics, UsersGroup, UsersGroupRepoToPerm, RhodeCodeUi, Group
42 from rhodecode.model.user import UserModel
42 from rhodecode.model.user import UserModel
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class RepoModel(BaseModel):
47 class RepoModel(BaseModel):
48
48
49 @LazyProperty
49 @LazyProperty
50 def repos_path(self):
50 def repos_path(self):
51 """Get's the repositories root path from database
51 """Get's the repositories root path from database
52 """
52 """
53
53
54 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
54 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
55 return q.ui_value
55 return q.ui_value
56
56
57 def get(self, repo_id, cache=False):
57 def get(self, repo_id, cache=False):
58 repo = self.sa.query(Repository)\
58 repo = self.sa.query(Repository)\
59 .filter(Repository.repo_id == repo_id)
59 .filter(Repository.repo_id == repo_id)
60
60
61 if cache:
61 if cache:
62 repo = repo.options(FromCache("sql_cache_short",
62 repo = repo.options(FromCache("sql_cache_short",
63 "get_repo_%s" % repo_id))
63 "get_repo_%s" % repo_id))
64 return repo.scalar()
64 return repo.scalar()
65
65
66 def get_by_repo_name(self, repo_name, cache=False):
66 def get_by_repo_name(self, repo_name, cache=False):
67 repo = self.sa.query(Repository)\
67 repo = self.sa.query(Repository)\
68 .filter(Repository.repo_name == repo_name)
68 .filter(Repository.repo_name == repo_name)
69
69
70 if cache:
70 if cache:
71 repo = repo.options(FromCache("sql_cache_short",
71 repo = repo.options(FromCache("sql_cache_short",
72 "get_repo_%s" % repo_name))
72 "get_repo_%s" % repo_name))
73 return repo.scalar()
73 return repo.scalar()
74
74
75
75
76 def get_users_js(self):
76 def get_users_js(self):
77
77
78 users = self.sa.query(User).filter(User.active == True).all()
78 users = self.sa.query(User).filter(User.active == True).all()
79 u_tmpl = '''{id:%s, fname:"%s", lname:"%s", nname:"%s"},'''
79 u_tmpl = '''{id:%s, fname:"%s", lname:"%s", nname:"%s"},'''
80 users_array = '[%s]' % '\n'.join([u_tmpl % (u.user_id, u.name,
80 users_array = '[%s]' % '\n'.join([u_tmpl % (u.user_id, u.name,
81 u.lastname, u.username)
81 u.lastname, u.username)
82 for u in users])
82 for u in users])
83 return users_array
83 return users_array
84
84
85 def get_users_groups_js(self):
85 def get_users_groups_js(self):
86 users_groups = self.sa.query(UsersGroup)\
86 users_groups = self.sa.query(UsersGroup)\
87 .filter(UsersGroup.users_group_active == True).all()
87 .filter(UsersGroup.users_group_active == True).all()
88
88
89 g_tmpl = '''{id:%s, grname:"%s",grmembers:"%s"},'''
89 g_tmpl = '''{id:%s, grname:"%s",grmembers:"%s"},'''
90
90
91 users_groups_array = '[%s]' % '\n'.join([g_tmpl % \
91 users_groups_array = '[%s]' % '\n'.join([g_tmpl % \
92 (gr.users_group_id, gr.users_group_name,
92 (gr.users_group_id, gr.users_group_name,
93 len(gr.members))
93 len(gr.members))
94 for gr in users_groups])
94 for gr in users_groups])
95 return users_groups_array
95 return users_groups_array
96
96
97 def update(self, repo_name, form_data):
97 def update(self, repo_name, form_data):
98 try:
98 try:
99 cur_repo = self.get_by_repo_name(repo_name, cache=False)
99 cur_repo = self.get_by_repo_name(repo_name, cache=False)
100
100
101 #update permissions
101 #update permissions
102 for member, perm, member_type in form_data['perms_updates']:
102 for member, perm, member_type in form_data['perms_updates']:
103 if member_type == 'user':
103 if member_type == 'user':
104 r2p = self.sa.query(RepoToPerm)\
104 r2p = self.sa.query(RepoToPerm)\
105 .filter(RepoToPerm.user == User.by_username(member))\
105 .filter(RepoToPerm.user == User.by_username(member))\
106 .filter(RepoToPerm.repository == cur_repo)\
106 .filter(RepoToPerm.repository == cur_repo)\
107 .one()
107 .one()
108
108
109 r2p.permission = self.sa.query(Permission)\
109 r2p.permission = self.sa.query(Permission)\
110 .filter(Permission.permission_name ==
110 .filter(Permission.permission_name ==
111 perm).scalar()
111 perm).scalar()
112 self.sa.add(r2p)
112 self.sa.add(r2p)
113 else:
113 else:
114 g2p = self.sa.query(UsersGroupRepoToPerm)\
114 g2p = self.sa.query(UsersGroupRepoToPerm)\
115 .filter(UsersGroupRepoToPerm.users_group ==
115 .filter(UsersGroupRepoToPerm.users_group ==
116 UsersGroup.get_by_group_name(member))\
116 UsersGroup.get_by_group_name(member))\
117 .filter(UsersGroupRepoToPerm.repository ==
117 .filter(UsersGroupRepoToPerm.repository ==
118 cur_repo).one()
118 cur_repo).one()
119
119
120 g2p.permission = self.sa.query(Permission)\
120 g2p.permission = self.sa.query(Permission)\
121 .filter(Permission.permission_name ==
121 .filter(Permission.permission_name ==
122 perm).scalar()
122 perm).scalar()
123 self.sa.add(g2p)
123 self.sa.add(g2p)
124
124
125 #set new permissions
125 #set new permissions
126 for member, perm, member_type in form_data['perms_new']:
126 for member, perm, member_type in form_data['perms_new']:
127 if member_type == 'user':
127 if member_type == 'user':
128 r2p = RepoToPerm()
128 r2p = RepoToPerm()
129 r2p.repository = cur_repo
129 r2p.repository = cur_repo
130 r2p.user = User.by_username(member)
130 r2p.user = User.by_username(member)
131
131
132 r2p.permission = self.sa.query(Permission)\
132 r2p.permission = self.sa.query(Permission)\
133 .filter(Permission.
133 .filter(Permission.
134 permission_name == perm)\
134 permission_name == perm)\
135 .scalar()
135 .scalar()
136 self.sa.add(r2p)
136 self.sa.add(r2p)
137 else:
137 else:
138 g2p = UsersGroupRepoToPerm()
138 g2p = UsersGroupRepoToPerm()
139 g2p.repository = cur_repo
139 g2p.repository = cur_repo
140 g2p.users_group = UsersGroup.get_by_group_name(member)
140 g2p.users_group = UsersGroup.get_by_group_name(member)
141 g2p.permission = self.sa.query(Permission)\
141 g2p.permission = self.sa.query(Permission)\
142 .filter(Permission.
142 .filter(Permission.
143 permission_name == perm)\
143 permission_name == perm)\
144 .scalar()
144 .scalar()
145 self.sa.add(g2p)
145 self.sa.add(g2p)
146
146
147 #update current repo
147 #update current repo
148 for k, v in form_data.items():
148 for k, v in form_data.items():
149 if k == 'user':
149 if k == 'user':
150 cur_repo.user = User.by_username(v)
150 cur_repo.user = User.by_username(v)
151 elif k == 'repo_name':
151 elif k == 'repo_name':
152 cur_repo.repo_name = form_data['repo_name_full']
152 cur_repo.repo_name = form_data['repo_name_full']
153 elif k == 'repo_group':
153 elif k == 'repo_group':
154 cur_repo.group_id = v
154 cur_repo.group_id = v
155
155
156 else:
156 else:
157 setattr(cur_repo, k, v)
157 setattr(cur_repo, k, v)
158
158
159 self.sa.add(cur_repo)
159 self.sa.add(cur_repo)
160
160
161 if repo_name != form_data['repo_name_full']:
161 if repo_name != form_data['repo_name_full']:
162 # rename repository
162 # rename repository
163 self.__rename_repo(old=repo_name,
163 self.__rename_repo(old=repo_name,
164 new=form_data['repo_name_full'])
164 new=form_data['repo_name_full'])
165
165
166 self.sa.commit()
166 self.sa.commit()
167 except:
167 except:
168 log.error(traceback.format_exc())
168 log.error(traceback.format_exc())
169 self.sa.rollback()
169 self.sa.rollback()
170 raise
170 raise
171
171
172 def create(self, form_data, cur_user, just_db=False, fork=False):
172 def create(self, form_data, cur_user, just_db=False, fork=False):
173
173
174 try:
174 try:
175 if fork:
175 if fork:
176 repo_name = form_data['fork_name']
176 repo_name = form_data['fork_name']
177 org_name = form_data['repo_name']
177 org_name = form_data['repo_name']
178 org_full_name = org_name
178 org_full_name = org_name
179
179
180 else:
180 else:
181 org_name = repo_name = form_data['repo_name']
181 org_name = repo_name = form_data['repo_name']
182 repo_name_full = form_data['repo_name_full']
182 repo_name_full = form_data['repo_name_full']
183
183
184 new_repo = Repository()
184 new_repo = Repository()
185 new_repo.enable_statistics = False
185 new_repo.enable_statistics = False
186 for k, v in form_data.items():
186 for k, v in form_data.items():
187 if k == 'repo_name':
187 if k == 'repo_name':
188 if fork:
188 if fork:
189 v = repo_name
189 v = repo_name
190 else:
190 else:
191 v = repo_name_full
191 v = repo_name_full
192 if k == 'repo_group':
192 if k == 'repo_group':
193 k = 'group_id'
193 k = 'group_id'
194
194
195 setattr(new_repo, k, v)
195 setattr(new_repo, k, v)
196
196
197 if fork:
197 if fork:
198 parent_repo = self.sa.query(Repository)\
198 parent_repo = self.sa.query(Repository)\
199 .filter(Repository.repo_name == org_full_name).one()
199 .filter(Repository.repo_name == org_full_name).one()
200 new_repo.fork = parent_repo
200 new_repo.fork = parent_repo
201
201
202 new_repo.user_id = cur_user.user_id
202 new_repo.user_id = cur_user.user_id
203 self.sa.add(new_repo)
203 self.sa.add(new_repo)
204
204
205 #create default permission
205 #create default permission
206 repo_to_perm = RepoToPerm()
206 repo_to_perm = RepoToPerm()
207 default = 'repository.read'
207 default = 'repository.read'
208 for p in UserModel(self.sa).get_by_username('default',
208 for p in UserModel(self.sa).get_by_username('default',
209 cache=False).user_perms:
209 cache=False).user_perms:
210 if p.permission.permission_name.startswith('repository.'):
210 if p.permission.permission_name.startswith('repository.'):
211 default = p.permission.permission_name
211 default = p.permission.permission_name
212 break
212 break
213
213
214 default_perm = 'repository.none' if form_data['private'] else default
214 default_perm = 'repository.none' if form_data['private'] else default
215
215
216 repo_to_perm.permission_id = self.sa.query(Permission)\
216 repo_to_perm.permission_id = self.sa.query(Permission)\
217 .filter(Permission.permission_name == default_perm)\
217 .filter(Permission.permission_name == default_perm)\
218 .one().permission_id
218 .one().permission_id
219
219
220 repo_to_perm.repository = new_repo
220 repo_to_perm.repository = new_repo
221 repo_to_perm.user_id = UserModel(self.sa)\
221 repo_to_perm.user_id = UserModel(self.sa)\
222 .get_by_username('default', cache=False).user_id
222 .get_by_username('default', cache=False).user_id
223
223
224 self.sa.add(repo_to_perm)
224 self.sa.add(repo_to_perm)
225
225
226 if not just_db:
226 if not just_db:
227 self.__create_repo(repo_name, form_data['repo_type'],
227 self.__create_repo(repo_name, form_data['repo_type'],
228 form_data['repo_group'],
228 form_data['repo_group'],
229 form_data['clone_uri'])
229 form_data['clone_uri'])
230
230
231 self.sa.commit()
231 self.sa.commit()
232
232
233 #now automatically start following this repository as owner
233 #now automatically start following this repository as owner
234 from rhodecode.model.scm import ScmModel
234 from rhodecode.model.scm import ScmModel
235 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
235 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
236 cur_user.user_id)
236 cur_user.user_id)
237
237
238 except:
238 except:
239 log.error(traceback.format_exc())
239 log.error(traceback.format_exc())
240 self.sa.rollback()
240 self.sa.rollback()
241 raise
241 raise
242
242
243 def create_fork(self, form_data, cur_user):
243 def create_fork(self, form_data, cur_user):
244 from rhodecode.lib.celerylib import tasks, run_task
244 from rhodecode.lib.celerylib import tasks, run_task
245 run_task(tasks.create_repo_fork, form_data, cur_user)
245 run_task(tasks.create_repo_fork, form_data, cur_user)
246
246
247 def delete(self, repo):
247 def delete(self, repo):
248 try:
248 try:
249 self.sa.delete(repo)
249 self.sa.delete(repo)
250 self.__delete_repo(repo)
250 self.__delete_repo(repo)
251 self.sa.commit()
251 self.sa.commit()
252 except:
252 except:
253 log.error(traceback.format_exc())
253 log.error(traceback.format_exc())
254 self.sa.rollback()
254 self.sa.rollback()
255 raise
255 raise
256
256
257 def delete_perm_user(self, form_data, repo_name):
257 def delete_perm_user(self, form_data, repo_name):
258 try:
258 try:
259 self.sa.query(RepoToPerm)\
259 self.sa.query(RepoToPerm)\
260 .filter(RepoToPerm.repository \
260 .filter(RepoToPerm.repository \
261 == self.get_by_repo_name(repo_name))\
261 == self.get_by_repo_name(repo_name))\
262 .filter(RepoToPerm.user_id == form_data['user_id']).delete()
262 .filter(RepoToPerm.user_id == form_data['user_id']).delete()
263 self.sa.commit()
263 self.sa.commit()
264 except:
264 except:
265 log.error(traceback.format_exc())
265 log.error(traceback.format_exc())
266 self.sa.rollback()
266 self.sa.rollback()
267 raise
267 raise
268
268
269 def delete_perm_users_group(self, form_data, repo_name):
269 def delete_perm_users_group(self, form_data, repo_name):
270 try:
270 try:
271 self.sa.query(UsersGroupRepoToPerm)\
271 self.sa.query(UsersGroupRepoToPerm)\
272 .filter(UsersGroupRepoToPerm.repository \
272 .filter(UsersGroupRepoToPerm.repository \
273 == self.get_by_repo_name(repo_name))\
273 == self.get_by_repo_name(repo_name))\
274 .filter(UsersGroupRepoToPerm.users_group_id \
274 .filter(UsersGroupRepoToPerm.users_group_id \
275 == form_data['users_group_id']).delete()
275 == form_data['users_group_id']).delete()
276 self.sa.commit()
276 self.sa.commit()
277 except:
277 except:
278 log.error(traceback.format_exc())
278 log.error(traceback.format_exc())
279 self.sa.rollback()
279 self.sa.rollback()
280 raise
280 raise
281
281
282 def delete_stats(self, repo_name):
282 def delete_stats(self, repo_name):
283 try:
283 try:
284 self.sa.query(Statistics)\
284 self.sa.query(Statistics)\
285 .filter(Statistics.repository == \
285 .filter(Statistics.repository == \
286 self.get_by_repo_name(repo_name)).delete()
286 self.get_by_repo_name(repo_name)).delete()
287 self.sa.commit()
287 self.sa.commit()
288 except:
288 except:
289 log.error(traceback.format_exc())
289 log.error(traceback.format_exc())
290 self.sa.rollback()
290 self.sa.rollback()
291 raise
291 raise
292
292
293 def __create_repo(self, repo_name, alias, new_parent_id, clone_uri=False):
293 def __create_repo(self, repo_name, alias, new_parent_id, clone_uri=False):
294 """
294 """
295 makes repository on filesystem. It's group aware means it'll create
295 makes repository on filesystem. It's group aware means it'll create
296 a repository within a group, and alter the paths accordingly of
296 a repository within a group, and alter the paths accordingly of
297 group location
297 group location
298
298
299 :param repo_name:
299 :param repo_name:
300 :param alias:
300 :param alias:
301 :param parent_id:
301 :param parent_id:
302 :param clone_uri:
302 :param clone_uri:
303 """
303 """
304 from rhodecode.lib.utils import check_repo
304 from rhodecode.lib.utils import check_repo_fast
305
305
306 if new_parent_id:
306 if new_parent_id:
307 paths = Group.get(new_parent_id).full_path.split(Group.url_sep())
307 paths = Group.get(new_parent_id).full_path.split(Group.url_sep())
308 new_parent_path = os.sep.join(paths)
308 new_parent_path = os.sep.join(paths)
309 else:
309 else:
310 new_parent_path = ''
310 new_parent_path = ''
311
311
312 repo_path = os.path.join(*map(lambda x:safe_str(x),
312 repo_path = os.path.join(*map(lambda x:safe_str(x),
313 [self.repos_path, new_parent_path, repo_name]))
313 [self.repos_path, new_parent_path, repo_name]))
314
314
315 if check_repo(repo_path, self.repos_path):
315 if check_repo_fast(repo_path, self.repos_path) is False:
316 log.info('creating repo %s in %s @ %s', repo_name, repo_path,
316 log.info('creating repo %s in %s @ %s', repo_name, repo_path,
317 clone_uri)
317 clone_uri)
318 backend = get_backend(alias)
318 backend = get_backend(alias)
319
319
320 backend(repo_path, create=True, src_url=clone_uri)
320 backend(repo_path, create=True, src_url=clone_uri)
321
321
322
322
323 def __rename_repo(self, old, new):
323 def __rename_repo(self, old, new):
324 """
324 """
325 renames repository on filesystem
325 renames repository on filesystem
326
326
327 :param old: old name
327 :param old: old name
328 :param new: new name
328 :param new: new name
329 """
329 """
330 log.info('renaming repo from %s to %s', old, new)
330 log.info('renaming repo from %s to %s', old, new)
331
331
332 old_path = os.path.join(self.repos_path, old)
332 old_path = os.path.join(self.repos_path, old)
333 new_path = os.path.join(self.repos_path, new)
333 new_path = os.path.join(self.repos_path, new)
334 if os.path.isdir(new_path):
334 if os.path.isdir(new_path):
335 raise Exception('Was trying to rename to already existing dir %s' \
335 raise Exception('Was trying to rename to already existing dir %s' \
336 % new_path)
336 % new_path)
337 shutil.move(old_path, new_path)
337 shutil.move(old_path, new_path)
338
338
339 def __delete_repo(self, repo):
339 def __delete_repo(self, repo):
340 """
340 """
341 removes repo from filesystem, the removal is acctually made by
341 removes repo from filesystem, the removal is acctually made by
342 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
342 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
343 repository is no longer valid for rhodecode, can be undeleted later on
343 repository is no longer valid for rhodecode, can be undeleted later on
344 by reverting the renames on this repository
344 by reverting the renames on this repository
345
345
346 :param repo: repo object
346 :param repo: repo object
347 """
347 """
348 rm_path = os.path.join(self.repos_path, repo.repo_name)
348 rm_path = os.path.join(self.repos_path, repo.repo_name)
349 log.info("Removing %s", rm_path)
349 log.info("Removing %s", rm_path)
350 #disable hg/git
350 #disable hg/git
351 alias = repo.repo_type
351 alias = repo.repo_type
352 shutil.move(os.path.join(rm_path, '.%s' % alias),
352 shutil.move(os.path.join(rm_path, '.%s' % alias),
353 os.path.join(rm_path, 'rm__.%s' % alias))
353 os.path.join(rm_path, 'rm__.%s' % alias))
354 #disable repo
354 #disable repo
355 shutil.move(rm_path, os.path.join(self.repos_path, 'rm__%s__%s' \
355 shutil.move(rm_path, os.path.join(self.repos_path, 'rm__%s__%s' \
356 % (datetime.today()\
356 % (datetime.today()\
357 .strftime('%Y%m%d_%H%M%S_%f'),
357 .strftime('%Y%m%d_%H%M%S_%f'),
358 repo.repo_name)))
358 repo.repo_name)))
General Comments 0
You need to be logged in to leave comments. Login now