##// END OF EJS Templates
integrations: add integration support...
dan -
r411:df8dc98d default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,52 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22 from rhodecode.integrations.registry import IntegrationTypeRegistry
23 from rhodecode.integrations.types import slack
24
25 log = logging.getLogger(__name__)
26
27
28 # TODO: dan: This is currently global until we figure out what to do about
29 # VCS's not having a pyramid context - move it to pyramid app configuration
30 # includeme level later to allow per instance integration setup
31 integration_type_registry = IntegrationTypeRegistry()
32 integration_type_registry.register_integration_type(slack.SlackIntegrationType)
33
34 def integrations_event_handler(event):
35 """
36 Takes an event and passes it to all enabled integrations
37 """
38 from rhodecode.model.integration import IntegrationModel
39
40 integration_model = IntegrationModel()
41 integrations = integration_model.get_for_event(event)
42 for integration in integrations:
43 try:
44 integration_model.send_event(integration, event)
45 except Exception:
46 log.exception(
47 'failure occured when sending event %s to integration %s' % (
48 event, integration))
49
50
51 def includeme(config):
52 config.include('rhodecode.integrations.routes')
@@ -0,0 +1,37 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2012-2016 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 import logging
21
22 log = logging.getLogger()
23
24
25 class IntegrationTypeRegistry(dict):
26 """
27 Registry Class to hold IntegrationTypes
28 """
29 def register_integration_type(self, IntegrationType):
30 key = IntegrationType.key
31 if key in self:
32 log.warning(
33 'Overriding existing integration type %s (%s) with %s' % (
34 self[key], key, IntegrationType))
35
36 self[key] = IntegrationType
37
@@ -0,0 +1,133 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22
23 from rhodecode.model.db import Repository, Integration
24 from rhodecode.config.routing import (
25 ADMIN_PREFIX, add_route_requirements, URL_NAME_REQUIREMENTS)
26 from rhodecode.integrations import integration_type_registry
27
28 log = logging.getLogger(__name__)
29
30
31 def includeme(config):
32 config.add_route('global_integrations_home',
33 ADMIN_PREFIX + '/integrations')
34 config.add_route('global_integrations_list',
35 ADMIN_PREFIX + '/integrations/{integration}')
36 for route_name in ['global_integrations_home', 'global_integrations_list']:
37 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
38 attr='index',
39 renderer='rhodecode:templates/admin/integrations/list.html',
40 request_method='GET',
41 route_name=route_name)
42
43 config.add_route('global_integrations_create',
44 ADMIN_PREFIX + '/integrations/{integration}/new',
45 custom_predicates=(valid_integration,))
46 config.add_route('global_integrations_edit',
47 ADMIN_PREFIX + '/integrations/{integration}/{integration_id}',
48 custom_predicates=(valid_integration,))
49 for route_name in ['global_integrations_create', 'global_integrations_edit']:
50 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
51 attr='settings_get',
52 renderer='rhodecode:templates/admin/integrations/edit.html',
53 request_method='GET',
54 route_name=route_name)
55 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
56 attr='settings_post',
57 renderer='rhodecode:templates/admin/integrations/edit.html',
58 request_method='POST',
59 route_name=route_name)
60
61 config.add_route('repo_integrations_home',
62 add_route_requirements(
63 '{repo_name}/settings/integrations',
64 URL_NAME_REQUIREMENTS
65 ),
66 custom_predicates=(valid_repo,))
67 config.add_route('repo_integrations_list',
68 add_route_requirements(
69 '{repo_name}/settings/integrations/{integration}',
70 URL_NAME_REQUIREMENTS
71 ),
72 custom_predicates=(valid_repo, valid_integration))
73 for route_name in ['repo_integrations_home', 'repo_integrations_list']:
74 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
75 attr='index',
76 request_method='GET',
77 route_name=route_name)
78
79 config.add_route('repo_integrations_create',
80 add_route_requirements(
81 '{repo_name}/settings/integrations/{integration}/new',
82 URL_NAME_REQUIREMENTS
83 ),
84 custom_predicates=(valid_repo, valid_integration))
85 config.add_route('repo_integrations_edit',
86 add_route_requirements(
87 '{repo_name}/settings/integrations/{integration}/{integration_id}',
88 URL_NAME_REQUIREMENTS
89 ),
90 custom_predicates=(valid_repo, valid_integration))
91 for route_name in ['repo_integrations_edit', 'repo_integrations_create']:
92 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
93 attr='settings_get',
94 renderer='rhodecode:templates/admin/integrations/edit.html',
95 request_method='GET',
96 route_name=route_name)
97 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
98 attr='settings_post',
99 renderer='rhodecode:templates/admin/integrations/edit.html',
100 request_method='POST',
101 route_name=route_name)
102
103
104 def valid_repo(info, request):
105 repo = Repository.get_by_repo_name(info['match']['repo_name'])
106 if repo:
107 return True
108
109
110 def valid_integration(info, request):
111 integration_type = info['match']['integration']
112 integration_id = info['match'].get('integration_id')
113 repo_name = info['match'].get('repo_name')
114
115 if integration_type not in integration_type_registry:
116 return False
117
118 repo = None
119 if repo_name:
120 repo = Repository.get_by_repo_name(info['match']['repo_name'])
121 if not repo:
122 return False
123
124 if integration_id:
125 integration = Integration.get(integration_id)
126 if not integration:
127 return False
128 if integration.integration_type != integration_type:
129 return False
130 if repo and repo.repo_id != integration.repo_id:
131 return False
132
133 return True
@@ -0,0 +1,48 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22
23 from rhodecode.translation import lazy_ugettext
24
25
26 class IntegrationSettingsSchemaBase(colander.MappingSchema):
27 """
28 This base schema is intended for use in integrations.
29 It adds a few default settings (e.g., "enabled"), so that integration
30 authors don't have to maintain a bunch of boilerplate.
31 """
32 enabled = colander.SchemaNode(
33 colander.Bool(),
34 default=True,
35 description=lazy_ugettext('Enable or disable this integration.'),
36 missing=False,
37 title=lazy_ugettext('Enabled'),
38 widget='bool',
39 )
40
41 name = colander.SchemaNode(
42 colander.String(),
43 description=lazy_ugettext('Short name for this integration.'),
44 missing=colander.required,
45 title=lazy_ugettext('Integration name'),
46 widget='string',
47 )
48
@@ -0,0 +1,19 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,43 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 from rhodecode.integrations.schema import IntegrationSettingsSchemaBase
22
23
24 class IntegrationTypeBase(object):
25 """ Base class for IntegrationType plugins """
26
27 def __init__(self, settings):
28 """
29 :param settings: dict of settings to be used for the integration
30 """
31 self.settings = settings
32
33
34 @classmethod
35 def settings_schema(cls):
36 """
37 A colander schema of settings for the integration type
38
39 Subclasses can return their own schema but should always
40 inherit from IntegrationSettingsSchemaBase
41 """
42 return IntegrationSettingsSchemaBase()
43
@@ -0,0 +1,199 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 from __future__ import unicode_literals
22
23 import re
24 import logging
25 import requests
26 import colander
27 from celery.task import task
28 from mako.template import Template
29
30 from rhodecode import events
31 from rhodecode.translation import lazy_ugettext
32 from rhodecode.lib import helpers as h
33 from rhodecode.lib.celerylib import run_task
34 from rhodecode.lib.colander_utils import strip_whitespace
35 from rhodecode.integrations.types.base import IntegrationTypeBase
36 from rhodecode.integrations.schema import IntegrationSettingsSchemaBase
37
38 log = logging.getLogger()
39
40
41 class SlackSettingsSchema(IntegrationSettingsSchemaBase):
42 service = colander.SchemaNode(
43 colander.String(),
44 title=lazy_ugettext('Slack service URL'),
45 description=h.literal(lazy_ugettext(
46 'This can be setup at the '
47 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
48 'slack app manager</a>')),
49 default='',
50 placeholder='https://hooks.slack.com/services/...',
51 preparer=strip_whitespace,
52 validator=colander.url,
53 widget='string'
54 )
55 username = colander.SchemaNode(
56 colander.String(),
57 title=lazy_ugettext('Username'),
58 description=lazy_ugettext('Username to show notifications coming from.'),
59 missing='Rhodecode',
60 preparer=strip_whitespace,
61 widget='string',
62 placeholder='Rhodecode'
63 )
64 channel = colander.SchemaNode(
65 colander.String(),
66 title=lazy_ugettext('Channel'),
67 description=lazy_ugettext('Channel to send notifications to.'),
68 missing='',
69 preparer=strip_whitespace,
70 widget='string',
71 placeholder='#general'
72 )
73 icon_emoji = colander.SchemaNode(
74 colander.String(),
75 title=lazy_ugettext('Emoji'),
76 description=lazy_ugettext('Emoji to use eg. :studio_microphone:'),
77 missing='',
78 preparer=strip_whitespace,
79 widget='string',
80 placeholder=':studio_microphone:'
81 )
82
83
84 repo_push_template = Template(r'''
85 *${data['actor']['username']}* pushed to \
86 %if data['push']['branches']:
87 ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} \
88 ${', '.join('<%s|%s>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} \
89 %else:
90 unknown branch \
91 %endif
92 in <${data['repo']['url']}|${data['repo']['repo_name']}>
93 >>>
94 %for commit in data['push']['commits']:
95 <${commit['url']}|${commit['short_id']}> - ${commit['message_html']|html_to_slack_links}
96 %endfor
97 ''')
98
99
100 class SlackIntegrationType(IntegrationTypeBase):
101 key = 'slack'
102 display_name = lazy_ugettext('Slack')
103 SettingsSchema = SlackSettingsSchema
104 valid_events = [
105 events.PullRequestCloseEvent,
106 events.PullRequestMergeEvent,
107 events.PullRequestUpdateEvent,
108 events.PullRequestReviewEvent,
109 events.PullRequestCreateEvent,
110 events.RepoPushEvent,
111 events.RepoCreateEvent,
112 ]
113
114 def send_event(self, event):
115 if event.__class__ not in self.valid_events:
116 log.debug('event not valid: %r' % event)
117 return
118
119 if event.name not in self.settings['events']:
120 log.debug('event ignored: %r' % event)
121 return
122
123 data = event.as_dict()
124
125 text = '*%s* caused a *%s* event' % (
126 data['actor']['username'], event.name)
127
128 if isinstance(event, events.PullRequestEvent):
129 text = self.format_pull_request_event(event, data)
130 elif isinstance(event, events.RepoPushEvent):
131 text = self.format_repo_push_event(data)
132 elif isinstance(event, events.RepoCreateEvent):
133 text = self.format_repo_create_event(data)
134 else:
135 log.error('unhandled event type: %r' % event)
136
137 run_task(post_text_to_slack, self.settings, text)
138
139 @classmethod
140 def settings_schema(cls):
141 schema = SlackSettingsSchema()
142 schema.add(colander.SchemaNode(
143 colander.Set(),
144 widget='checkbox_list',
145 choices=sorted([e.name for e in cls.valid_events]),
146 description="Events activated for this integration",
147 default=[e.name for e in cls.valid_events],
148 name='events'
149 ))
150 return schema
151
152 def format_pull_request_event(self, event, data):
153 action = {
154 events.PullRequestCloseEvent: 'closed',
155 events.PullRequestMergeEvent: 'merged',
156 events.PullRequestUpdateEvent: 'updated',
157 events.PullRequestReviewEvent: 'reviewed',
158 events.PullRequestCreateEvent: 'created',
159 }.get(event.__class__, '<unknown action>')
160
161 return ('Pull request <{url}|#{number}> ({title}) '
162 '{action} by {user}').format(
163 user=data['actor']['username'],
164 number=data['pullrequest']['pull_request_id'],
165 url=data['pullrequest']['url'],
166 title=data['pullrequest']['title'],
167 action=action
168 )
169
170 def format_repo_push_event(self, data):
171 result = repo_push_template.render(
172 data=data,
173 html_to_slack_links=html_to_slack_links,
174 )
175 return result
176
177 def format_repo_create_msg(self, data):
178 return '<{}|{}> ({}) repository created by *{}*'.format(
179 data['repo']['url'],
180 data['repo']['repo_name'],
181 data['repo']['repo_type'],
182 data['actor']['username'],
183 )
184
185
186 def html_to_slack_links(message):
187 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
188 r'<\1|\2>', message)
189
190
191 @task(ignore_result=True)
192 def post_text_to_slack(settings, text):
193 resp = requests.post(settings['service'], json={
194 "channel": settings.get('channel', ''),
195 "username": settings.get('username', 'Rhodecode'),
196 "text": text,
197 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:')
198 })
199 resp.raise_for_status() # raise exception on a failed request
@@ -0,0 +1,257 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22 import logging
23 import pylons
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 from pyramid.renderers import render
27 from pyramid.response import Response
28
29 from rhodecode.lib import auth
30 from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
31 from rhodecode.model.db import Repository, Session, Integration
32 from rhodecode.model.scm import ScmModel
33 from rhodecode.model.integration import IntegrationModel
34 from rhodecode.admin.navigation import navigation_list
35 from rhodecode.translation import _
36 from rhodecode.integrations import integration_type_registry
37
38 log = logging.getLogger(__name__)
39
40
41 class IntegrationSettingsViewBase(object):
42 """ Base Integration settings view used by both repo / global settings """
43
44 def __init__(self, context, request):
45 self.context = context
46 self.request = request
47 self._load_general_context()
48
49 if not self.perm_check(request.user):
50 raise HTTPForbidden()
51
52 def _load_general_context(self):
53 """
54 This avoids boilerplate for repo/global+list/edit+views/templates
55 by doing all possible contexts at the same time however it should
56 be split up into separate functions once more "contexts" exist
57 """
58
59 self.IntegrationType = None
60 self.repo = None
61 self.integration = None
62 self.integrations = {}
63
64 request = self.request
65
66 if 'repo_name' in request.matchdict: # we're in a repo context
67 repo_name = request.matchdict['repo_name']
68 self.repo = Repository.get_by_repo_name(repo_name)
69
70 if 'integration' in request.matchdict: # we're in integration context
71 integration_type = request.matchdict['integration']
72 self.IntegrationType = integration_type_registry[integration_type]
73
74 if 'integration_id' in request.matchdict: # single integration context
75 integration_id = request.matchdict['integration_id']
76 self.integration = Integration.get(integration_id)
77 else: # list integrations context
78 for integration in IntegrationModel().get_integrations(self.repo):
79 self.integrations.setdefault(integration.integration_type, []
80 ).append(integration)
81
82 self.settings = self.integration and self.integration.settings or {}
83
84 def _template_c_context(self):
85 # TODO: dan: this is a stopgap in order to inherit from current pylons
86 # based admin/repo settings templates - this should be removed entirely
87 # after port to pyramid
88
89 c = pylons.tmpl_context
90 c.active = 'integrations'
91 c.rhodecode_user = self.request.user
92 c.repo = self.repo
93 c.repo_name = self.repo and self.repo.repo_name or None
94 if self.repo:
95 c.repo_info = self.repo
96 c.rhodecode_db_repo = self.repo
97 c.repository_pull_requests = ScmModel().get_pull_requests(self.repo)
98 else:
99 c.navlist = navigation_list(self.request)
100
101 return c
102
103 def _form_schema(self):
104 return self.IntegrationType.settings_schema()
105
106 def settings_get(self, defaults=None, errors=None):
107 """
108 View that displays the plugin settings as a form.
109 """
110 defaults = defaults or {}
111 errors = errors or {}
112
113 schema = self._form_schema()
114
115 if not defaults:
116 if self.integration:
117 defaults['enabled'] = self.integration.enabled
118 defaults['name'] = self.integration.name
119 else:
120 if self.repo:
121 scope = self.repo.repo_name
122 else:
123 scope = _('Global')
124
125 defaults['name'] = '{} {} integration'.format(scope,
126 self.IntegrationType.display_name)
127 defaults['enabled'] = True
128
129 for node in schema:
130 setting = self.settings.get(node.name)
131 if setting is not None:
132 defaults.setdefault(node.name, setting)
133 else:
134 if node.default:
135 defaults.setdefault(node.name, node.default)
136
137 template_context = {
138 'defaults': defaults,
139 'errors': errors,
140 'schema': schema,
141 'current_IntegrationType': self.IntegrationType,
142 'integration': self.integration,
143 'settings': self.settings,
144 'resource': self.context,
145 'c': self._template_c_context(),
146 }
147
148 return template_context
149
150 @auth.CSRFRequired()
151 def settings_post(self):
152 """
153 View that validates and stores the plugin settings.
154 """
155 if self.request.params.get('delete'):
156 Session().delete(self.integration)
157 Session().commit()
158 self.request.session.flash(
159 _('Integration {integration_name} deleted successfully.').format(
160 integration_name=self.integration.name),
161 queue='success')
162 if self.repo:
163 redirect_to = self.request.route_url(
164 'repo_integrations_home', repo_name=self.repo.repo_name)
165 else:
166 redirect_to = self.request.route_url('global_integrations_home')
167 raise HTTPFound(redirect_to)
168
169 schema = self._form_schema()
170
171 params = {}
172 for node in schema.children:
173 if type(node.typ) in (colander.Set, colander.List):
174 val = self.request.params.getall(node.name)
175 else:
176 val = self.request.params.get(node.name)
177 if val:
178 params[node.name] = val
179
180 try:
181 valid_data = schema.deserialize(params)
182 except colander.Invalid, e:
183 # Display error message and display form again.
184 self.request.session.flash(
185 _('Errors exist when saving plugin settings. '
186 'Please check the form inputs.'),
187 queue='error')
188 return self.settings_get(errors=e.asdict(), defaults=params)
189
190 if not self.integration:
191 self.integration = Integration(
192 integration_type=self.IntegrationType.key)
193 if self.repo:
194 self.integration.repo = self.repo
195 Session.add(self.integration)
196
197 self.integration.enabled = valid_data.pop('enabled', False)
198 self.integration.name = valid_data.pop('name')
199 self.integration.settings = valid_data
200
201 Session.commit()
202
203 # Display success message and redirect.
204 self.request.session.flash(
205 _('Integration {integration_name} updated successfully.').format(
206 integration_name=self.IntegrationType.display_name,
207 queue='success'))
208 if self.repo:
209 redirect_to = self.request.route_url(
210 'repo_integrations_edit', repo_name=self.repo.repo_name,
211 integration=self.integration.integration_type,
212 integration_id=self.integration.integration_id)
213 else:
214 redirect_to = self.request.route_url(
215 'global_integrations_edit',
216 integration=self.integration.integration_type,
217 integration_id=self.integration.integration_id)
218
219 return HTTPFound(redirect_to)
220
221 def index(self):
222 current_integrations = self.integrations
223 if self.IntegrationType:
224 current_integrations = {
225 self.IntegrationType.key: self.integrations.get(
226 self.IntegrationType.key, [])
227 }
228
229 template_context = {
230 'current_IntegrationType': self.IntegrationType,
231 'current_integrations': current_integrations,
232 'current_integration': 'none',
233 'available_integrations': integration_type_registry,
234 'c': self._template_c_context()
235 }
236
237 if self.repo:
238 html = render('rhodecode:templates/admin/integrations/list.html',
239 template_context,
240 request=self.request)
241 else:
242 html = render('rhodecode:templates/admin/integrations/list.html',
243 template_context,
244 request=self.request)
245
246 return Response(html)
247
248
249 class GlobalIntegrationsView(IntegrationSettingsViewBase):
250 def perm_check(self, user):
251 return auth.HasPermissionAll('hg.admin').check_permissions(user=user)
252
253
254 class RepoIntegrationsView(IntegrationSettingsViewBase):
255 def perm_check(self, user):
256 return auth.HasRepoPermissionAll('repository.admin'
257 )(repo_name=self.repo.repo_name, user=user)
This diff has been collapsed as it changes many lines, (3516 lines changed) Show them Hide them
@@ -0,0 +1,3516 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 """
22 Database Models for RhodeCode Enterprise
23 """
24
25 import os
26 import sys
27 import time
28 import hashlib
29 import logging
30 import datetime
31 import warnings
32 import ipaddress
33 import functools
34 import traceback
35 import collections
36
37
38 from sqlalchemy import *
39 from sqlalchemy.exc import IntegrityError
40 from sqlalchemy.ext.declarative import declared_attr
41 from sqlalchemy.ext.hybrid import hybrid_property
42 from sqlalchemy.orm import (
43 relationship, joinedload, class_mapper, validates, aliased)
44 from sqlalchemy.sql.expression import true
45 from beaker.cache import cache_region, region_invalidate
46 from webob.exc import HTTPNotFound
47 from zope.cachedescriptors.property import Lazy as LazyProperty
48
49 from pylons import url
50 from pylons.i18n.translation import lazy_ugettext as _
51
52 from rhodecode.lib.vcs import get_backend
53 from rhodecode.lib.vcs.utils.helpers import get_scm
54 from rhodecode.lib.vcs.exceptions import VCSError
55 from rhodecode.lib.vcs.backends.base import (
56 EmptyCommit, Reference, MergeFailureReason)
57 from rhodecode.lib.utils2 import (
58 str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe,
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict)
60 from rhodecode.lib.ext_json import json
61 from rhodecode.lib.caching_query import FromCache
62 from rhodecode.lib.encrypt import AESCipher
63
64 from rhodecode.model.meta import Base, Session
65
66 URL_SEP = '/'
67 log = logging.getLogger(__name__)
68
69 # =============================================================================
70 # BASE CLASSES
71 # =============================================================================
72
73 # this is propagated from .ini file rhodecode.encrypted_values.secret or
74 # beaker.session.secret if first is not set.
75 # and initialized at environment.py
76 ENCRYPTION_KEY = None
77
78 # used to sort permissions by types, '#' used here is not allowed to be in
79 # usernames, and it's very early in sorted string.printable table.
80 PERMISSION_TYPE_SORT = {
81 'admin': '####',
82 'write': '###',
83 'read': '##',
84 'none': '#',
85 }
86
87
88 def display_sort(obj):
89 """
90 Sort function used to sort permissions in .permissions() function of
91 Repository, RepoGroup, UserGroup. Also it put the default user in front
92 of all other resources
93 """
94
95 if obj.username == User.DEFAULT_USER:
96 return '#####'
97 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
98 return prefix + obj.username
99
100
101 def _hash_key(k):
102 return md5_safe(k)
103
104
105 class EncryptedTextValue(TypeDecorator):
106 """
107 Special column for encrypted long text data, use like::
108
109 value = Column("encrypted_value", EncryptedValue(), nullable=False)
110
111 This column is intelligent so if value is in unencrypted form it return
112 unencrypted form, but on save it always encrypts
113 """
114 impl = Text
115
116 def process_bind_param(self, value, dialect):
117 if not value:
118 return value
119 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
120 # protect against double encrypting if someone manually starts
121 # doing
122 raise ValueError('value needs to be in unencrypted format, ie. '
123 'not starting with enc$aes')
124 return 'enc$aes_hmac$%s' % AESCipher(
125 ENCRYPTION_KEY, hmac=True).encrypt(value)
126
127 def process_result_value(self, value, dialect):
128 import rhodecode
129
130 if not value:
131 return value
132
133 parts = value.split('$', 3)
134 if not len(parts) == 3:
135 # probably not encrypted values
136 return value
137 else:
138 if parts[0] != 'enc':
139 # parts ok but without our header ?
140 return value
141 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
142 'rhodecode.encrypted_values.strict') or True)
143 # at that stage we know it's our encryption
144 if parts[1] == 'aes':
145 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
146 elif parts[1] == 'aes_hmac':
147 decrypted_data = AESCipher(
148 ENCRYPTION_KEY, hmac=True,
149 strict_verification=enc_strict_mode).decrypt(parts[2])
150 else:
151 raise ValueError(
152 'Encryption type part is wrong, must be `aes` '
153 'or `aes_hmac`, got `%s` instead' % (parts[1]))
154 return decrypted_data
155
156
157 class BaseModel(object):
158 """
159 Base Model for all classes
160 """
161
162 @classmethod
163 def _get_keys(cls):
164 """return column names for this model """
165 return class_mapper(cls).c.keys()
166
167 def get_dict(self):
168 """
169 return dict with keys and values corresponding
170 to this model data """
171
172 d = {}
173 for k in self._get_keys():
174 d[k] = getattr(self, k)
175
176 # also use __json__() if present to get additional fields
177 _json_attr = getattr(self, '__json__', None)
178 if _json_attr:
179 # update with attributes from __json__
180 if callable(_json_attr):
181 _json_attr = _json_attr()
182 for k, val in _json_attr.iteritems():
183 d[k] = val
184 return d
185
186 def get_appstruct(self):
187 """return list with keys and values tuples corresponding
188 to this model data """
189
190 l = []
191 for k in self._get_keys():
192 l.append((k, getattr(self, k),))
193 return l
194
195 def populate_obj(self, populate_dict):
196 """populate model with data from given populate_dict"""
197
198 for k in self._get_keys():
199 if k in populate_dict:
200 setattr(self, k, populate_dict[k])
201
202 @classmethod
203 def query(cls):
204 return Session().query(cls)
205
206 @classmethod
207 def get(cls, id_):
208 if id_:
209 return cls.query().get(id_)
210
211 @classmethod
212 def get_or_404(cls, id_):
213 try:
214 id_ = int(id_)
215 except (TypeError, ValueError):
216 raise HTTPNotFound
217
218 res = cls.query().get(id_)
219 if not res:
220 raise HTTPNotFound
221 return res
222
223 @classmethod
224 def getAll(cls):
225 # deprecated and left for backward compatibility
226 return cls.get_all()
227
228 @classmethod
229 def get_all(cls):
230 return cls.query().all()
231
232 @classmethod
233 def delete(cls, id_):
234 obj = cls.query().get(id_)
235 Session().delete(obj)
236
237 @classmethod
238 def identity_cache(cls, session, attr_name, value):
239 exist_in_session = []
240 for (item_cls, pkey), instance in session.identity_map.items():
241 if cls == item_cls and getattr(instance, attr_name) == value:
242 exist_in_session.append(instance)
243 if exist_in_session:
244 if len(exist_in_session) == 1:
245 return exist_in_session[0]
246 log.exception(
247 'multiple objects with attr %s and '
248 'value %s found with same name: %r',
249 attr_name, value, exist_in_session)
250
251 def __repr__(self):
252 if hasattr(self, '__unicode__'):
253 # python repr needs to return str
254 try:
255 return safe_str(self.__unicode__())
256 except UnicodeDecodeError:
257 pass
258 return '<DB:%s>' % (self.__class__.__name__)
259
260
261 class RhodeCodeSetting(Base, BaseModel):
262 __tablename__ = 'rhodecode_settings'
263 __table_args__ = (
264 UniqueConstraint('app_settings_name'),
265 {'extend_existing': True, 'mysql_engine': 'InnoDB',
266 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
267 )
268
269 SETTINGS_TYPES = {
270 'str': safe_str,
271 'int': safe_int,
272 'unicode': safe_unicode,
273 'bool': str2bool,
274 'list': functools.partial(aslist, sep=',')
275 }
276 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
277 GLOBAL_CONF_KEY = 'app_settings'
278
279 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
280 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
281 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
282 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
283
284 def __init__(self, key='', val='', type='unicode'):
285 self.app_settings_name = key
286 self.app_settings_type = type
287 self.app_settings_value = val
288
289 @validates('_app_settings_value')
290 def validate_settings_value(self, key, val):
291 assert type(val) == unicode
292 return val
293
294 @hybrid_property
295 def app_settings_value(self):
296 v = self._app_settings_value
297 _type = self.app_settings_type
298 if _type:
299 _type = self.app_settings_type.split('.')[0]
300 # decode the encrypted value
301 if 'encrypted' in self.app_settings_type:
302 cipher = EncryptedTextValue()
303 v = safe_unicode(cipher.process_result_value(v, None))
304
305 converter = self.SETTINGS_TYPES.get(_type) or \
306 self.SETTINGS_TYPES['unicode']
307 return converter(v)
308
309 @app_settings_value.setter
310 def app_settings_value(self, val):
311 """
312 Setter that will always make sure we use unicode in app_settings_value
313
314 :param val:
315 """
316 val = safe_unicode(val)
317 # encode the encrypted value
318 if 'encrypted' in self.app_settings_type:
319 cipher = EncryptedTextValue()
320 val = safe_unicode(cipher.process_bind_param(val, None))
321 self._app_settings_value = val
322
323 @hybrid_property
324 def app_settings_type(self):
325 return self._app_settings_type
326
327 @app_settings_type.setter
328 def app_settings_type(self, val):
329 if val.split('.')[0] not in self.SETTINGS_TYPES:
330 raise Exception('type must be one of %s got %s'
331 % (self.SETTINGS_TYPES.keys(), val))
332 self._app_settings_type = val
333
334 def __unicode__(self):
335 return u"<%s('%s:%s[%s]')>" % (
336 self.__class__.__name__,
337 self.app_settings_name, self.app_settings_value,
338 self.app_settings_type
339 )
340
341
342 class RhodeCodeUi(Base, BaseModel):
343 __tablename__ = 'rhodecode_ui'
344 __table_args__ = (
345 UniqueConstraint('ui_key'),
346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
348 )
349
350 HOOK_REPO_SIZE = 'changegroup.repo_size'
351 # HG
352 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
353 HOOK_PULL = 'outgoing.pull_logger'
354 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
355 HOOK_PUSH = 'changegroup.push_logger'
356
357 # TODO: johbo: Unify way how hooks are configured for git and hg,
358 # git part is currently hardcoded.
359
360 # SVN PATTERNS
361 SVN_BRANCH_ID = 'vcs_svn_branch'
362 SVN_TAG_ID = 'vcs_svn_tag'
363
364 ui_id = Column(
365 "ui_id", Integer(), nullable=False, unique=True, default=None,
366 primary_key=True)
367 ui_section = Column(
368 "ui_section", String(255), nullable=True, unique=None, default=None)
369 ui_key = Column(
370 "ui_key", String(255), nullable=True, unique=None, default=None)
371 ui_value = Column(
372 "ui_value", String(255), nullable=True, unique=None, default=None)
373 ui_active = Column(
374 "ui_active", Boolean(), nullable=True, unique=None, default=True)
375
376 def __repr__(self):
377 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
378 self.ui_key, self.ui_value)
379
380
381 class RepoRhodeCodeSetting(Base, BaseModel):
382 __tablename__ = 'repo_rhodecode_settings'
383 __table_args__ = (
384 UniqueConstraint(
385 'app_settings_name', 'repository_id',
386 name='uq_repo_rhodecode_setting_name_repo_id'),
387 {'extend_existing': True, 'mysql_engine': 'InnoDB',
388 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
389 )
390
391 repository_id = Column(
392 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
393 nullable=False)
394 app_settings_id = Column(
395 "app_settings_id", Integer(), nullable=False, unique=True,
396 default=None, primary_key=True)
397 app_settings_name = Column(
398 "app_settings_name", String(255), nullable=True, unique=None,
399 default=None)
400 _app_settings_value = Column(
401 "app_settings_value", String(4096), nullable=True, unique=None,
402 default=None)
403 _app_settings_type = Column(
404 "app_settings_type", String(255), nullable=True, unique=None,
405 default=None)
406
407 repository = relationship('Repository')
408
409 def __init__(self, repository_id, key='', val='', type='unicode'):
410 self.repository_id = repository_id
411 self.app_settings_name = key
412 self.app_settings_type = type
413 self.app_settings_value = val
414
415 @validates('_app_settings_value')
416 def validate_settings_value(self, key, val):
417 assert type(val) == unicode
418 return val
419
420 @hybrid_property
421 def app_settings_value(self):
422 v = self._app_settings_value
423 type_ = self.app_settings_type
424 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
425 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
426 return converter(v)
427
428 @app_settings_value.setter
429 def app_settings_value(self, val):
430 """
431 Setter that will always make sure we use unicode in app_settings_value
432
433 :param val:
434 """
435 self._app_settings_value = safe_unicode(val)
436
437 @hybrid_property
438 def app_settings_type(self):
439 return self._app_settings_type
440
441 @app_settings_type.setter
442 def app_settings_type(self, val):
443 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
444 if val not in SETTINGS_TYPES:
445 raise Exception('type must be one of %s got %s'
446 % (SETTINGS_TYPES.keys(), val))
447 self._app_settings_type = val
448
449 def __unicode__(self):
450 return u"<%s('%s:%s:%s[%s]')>" % (
451 self.__class__.__name__, self.repository.repo_name,
452 self.app_settings_name, self.app_settings_value,
453 self.app_settings_type
454 )
455
456
457 class RepoRhodeCodeUi(Base, BaseModel):
458 __tablename__ = 'repo_rhodecode_ui'
459 __table_args__ = (
460 UniqueConstraint(
461 'repository_id', 'ui_section', 'ui_key',
462 name='uq_repo_rhodecode_ui_repository_id_section_key'),
463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
465 )
466
467 repository_id = Column(
468 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
469 nullable=False)
470 ui_id = Column(
471 "ui_id", Integer(), nullable=False, unique=True, default=None,
472 primary_key=True)
473 ui_section = Column(
474 "ui_section", String(255), nullable=True, unique=None, default=None)
475 ui_key = Column(
476 "ui_key", String(255), nullable=True, unique=None, default=None)
477 ui_value = Column(
478 "ui_value", String(255), nullable=True, unique=None, default=None)
479 ui_active = Column(
480 "ui_active", Boolean(), nullable=True, unique=None, default=True)
481
482 repository = relationship('Repository')
483
484 def __repr__(self):
485 return '<%s[%s:%s]%s=>%s]>' % (
486 self.__class__.__name__, self.repository.repo_name,
487 self.ui_section, self.ui_key, self.ui_value)
488
489
490 class User(Base, BaseModel):
491 __tablename__ = 'users'
492 __table_args__ = (
493 UniqueConstraint('username'), UniqueConstraint('email'),
494 Index('u_username_idx', 'username'),
495 Index('u_email_idx', 'email'),
496 {'extend_existing': True, 'mysql_engine': 'InnoDB',
497 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
498 )
499 DEFAULT_USER = 'default'
500 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
501 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
502
503 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
504 username = Column("username", String(255), nullable=True, unique=None, default=None)
505 password = Column("password", String(255), nullable=True, unique=None, default=None)
506 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
507 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
508 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
509 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
510 _email = Column("email", String(255), nullable=True, unique=None, default=None)
511 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
512 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
513 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
514 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
515 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
516 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
517 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
518
519 user_log = relationship('UserLog')
520 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
521
522 repositories = relationship('Repository')
523 repository_groups = relationship('RepoGroup')
524 user_groups = relationship('UserGroup')
525
526 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
527 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
528
529 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
530 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
531 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
532
533 group_member = relationship('UserGroupMember', cascade='all')
534
535 notifications = relationship('UserNotification', cascade='all')
536 # notifications assigned to this user
537 user_created_notifications = relationship('Notification', cascade='all')
538 # comments created by this user
539 user_comments = relationship('ChangesetComment', cascade='all')
540 # user profile extra info
541 user_emails = relationship('UserEmailMap', cascade='all')
542 user_ip_map = relationship('UserIpMap', cascade='all')
543 user_auth_tokens = relationship('UserApiKeys', cascade='all')
544 # gists
545 user_gists = relationship('Gist', cascade='all')
546 # user pull requests
547 user_pull_requests = relationship('PullRequest', cascade='all')
548 # external identities
549 extenal_identities = relationship(
550 'ExternalIdentity',
551 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
552 cascade='all')
553
554 def __unicode__(self):
555 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
556 self.user_id, self.username)
557
558 @hybrid_property
559 def email(self):
560 return self._email
561
562 @email.setter
563 def email(self, val):
564 self._email = val.lower() if val else None
565
566 @property
567 def firstname(self):
568 # alias for future
569 return self.name
570
571 @property
572 def emails(self):
573 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
574 return [self.email] + [x.email for x in other]
575
576 @property
577 def auth_tokens(self):
578 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
579
580 @property
581 def extra_auth_tokens(self):
582 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
583
584 @property
585 def feed_token(self):
586 feed_tokens = UserApiKeys.query()\
587 .filter(UserApiKeys.user == self)\
588 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
589 .all()
590 if feed_tokens:
591 return feed_tokens[0].api_key
592 else:
593 # use the main token so we don't end up with nothing...
594 return self.api_key
595
596 @classmethod
597 def extra_valid_auth_tokens(cls, user, role=None):
598 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
599 .filter(or_(UserApiKeys.expires == -1,
600 UserApiKeys.expires >= time.time()))
601 if role:
602 tokens = tokens.filter(or_(UserApiKeys.role == role,
603 UserApiKeys.role == UserApiKeys.ROLE_ALL))
604 return tokens.all()
605
606 @property
607 def ip_addresses(self):
608 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
609 return [x.ip_addr for x in ret]
610
611 @property
612 def username_and_name(self):
613 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
614
615 @property
616 def username_or_name_or_email(self):
617 full_name = self.full_name if self.full_name is not ' ' else None
618 return self.username or full_name or self.email
619
620 @property
621 def full_name(self):
622 return '%s %s' % (self.firstname, self.lastname)
623
624 @property
625 def full_name_or_username(self):
626 return ('%s %s' % (self.firstname, self.lastname)
627 if (self.firstname and self.lastname) else self.username)
628
629 @property
630 def full_contact(self):
631 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
632
633 @property
634 def short_contact(self):
635 return '%s %s' % (self.firstname, self.lastname)
636
637 @property
638 def is_admin(self):
639 return self.admin
640
641 @property
642 def AuthUser(self):
643 """
644 Returns instance of AuthUser for this user
645 """
646 from rhodecode.lib.auth import AuthUser
647 return AuthUser(user_id=self.user_id, api_key=self.api_key,
648 username=self.username)
649
650 @hybrid_property
651 def user_data(self):
652 if not self._user_data:
653 return {}
654
655 try:
656 return json.loads(self._user_data)
657 except TypeError:
658 return {}
659
660 @user_data.setter
661 def user_data(self, val):
662 if not isinstance(val, dict):
663 raise Exception('user_data must be dict, got %s' % type(val))
664 try:
665 self._user_data = json.dumps(val)
666 except Exception:
667 log.error(traceback.format_exc())
668
669 @classmethod
670 def get_by_username(cls, username, case_insensitive=False,
671 cache=False, identity_cache=False):
672 session = Session()
673
674 if case_insensitive:
675 q = cls.query().filter(
676 func.lower(cls.username) == func.lower(username))
677 else:
678 q = cls.query().filter(cls.username == username)
679
680 if cache:
681 if identity_cache:
682 val = cls.identity_cache(session, 'username', username)
683 if val:
684 return val
685 else:
686 q = q.options(
687 FromCache("sql_cache_short",
688 "get_user_by_name_%s" % _hash_key(username)))
689
690 return q.scalar()
691
692 @classmethod
693 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
694 q = cls.query().filter(cls.api_key == auth_token)
695
696 if cache:
697 q = q.options(FromCache("sql_cache_short",
698 "get_auth_token_%s" % auth_token))
699 res = q.scalar()
700
701 if fallback and not res:
702 #fallback to additional keys
703 _res = UserApiKeys.query()\
704 .filter(UserApiKeys.api_key == auth_token)\
705 .filter(or_(UserApiKeys.expires == -1,
706 UserApiKeys.expires >= time.time()))\
707 .first()
708 if _res:
709 res = _res.user
710 return res
711
712 @classmethod
713 def get_by_email(cls, email, case_insensitive=False, cache=False):
714
715 if case_insensitive:
716 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
717
718 else:
719 q = cls.query().filter(cls.email == email)
720
721 if cache:
722 q = q.options(FromCache("sql_cache_short",
723 "get_email_key_%s" % email))
724
725 ret = q.scalar()
726 if ret is None:
727 q = UserEmailMap.query()
728 # try fetching in alternate email map
729 if case_insensitive:
730 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
731 else:
732 q = q.filter(UserEmailMap.email == email)
733 q = q.options(joinedload(UserEmailMap.user))
734 if cache:
735 q = q.options(FromCache("sql_cache_short",
736 "get_email_map_key_%s" % email))
737 ret = getattr(q.scalar(), 'user', None)
738
739 return ret
740
741 @classmethod
742 def get_from_cs_author(cls, author):
743 """
744 Tries to get User objects out of commit author string
745
746 :param author:
747 """
748 from rhodecode.lib.helpers import email, author_name
749 # Valid email in the attribute passed, see if they're in the system
750 _email = email(author)
751 if _email:
752 user = cls.get_by_email(_email, case_insensitive=True)
753 if user:
754 return user
755 # Maybe we can match by username?
756 _author = author_name(author)
757 user = cls.get_by_username(_author, case_insensitive=True)
758 if user:
759 return user
760
761 def update_userdata(self, **kwargs):
762 usr = self
763 old = usr.user_data
764 old.update(**kwargs)
765 usr.user_data = old
766 Session().add(usr)
767 log.debug('updated userdata with ', kwargs)
768
769 def update_lastlogin(self):
770 """Update user lastlogin"""
771 self.last_login = datetime.datetime.now()
772 Session().add(self)
773 log.debug('updated user %s lastlogin', self.username)
774
775 def update_lastactivity(self):
776 """Update user lastactivity"""
777 usr = self
778 old = usr.user_data
779 old.update({'last_activity': time.time()})
780 usr.user_data = old
781 Session().add(usr)
782 log.debug('updated user %s lastactivity', usr.username)
783
784 def update_password(self, new_password, change_api_key=False):
785 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
786
787 self.password = get_crypt_password(new_password)
788 if change_api_key:
789 self.api_key = generate_auth_token(self.username)
790 Session().add(self)
791
792 @classmethod
793 def get_first_super_admin(cls):
794 user = User.query().filter(User.admin == true()).first()
795 if user is None:
796 raise Exception('FATAL: Missing administrative account!')
797 return user
798
799 @classmethod
800 def get_all_super_admins(cls):
801 """
802 Returns all admin accounts sorted by username
803 """
804 return User.query().filter(User.admin == true())\
805 .order_by(User.username.asc()).all()
806
807 @classmethod
808 def get_default_user(cls, cache=False):
809 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
810 if user is None:
811 raise Exception('FATAL: Missing default account!')
812 return user
813
814 def _get_default_perms(self, user, suffix=''):
815 from rhodecode.model.permission import PermissionModel
816 return PermissionModel().get_default_perms(user.user_perms, suffix)
817
818 def get_default_perms(self, suffix=''):
819 return self._get_default_perms(self, suffix)
820
821 def get_api_data(self, include_secrets=False, details='full'):
822 """
823 Common function for generating user related data for API
824
825 :param include_secrets: By default secrets in the API data will be replaced
826 by a placeholder value to prevent exposing this data by accident. In case
827 this data shall be exposed, set this flag to ``True``.
828
829 :param details: details can be 'basic|full' basic gives only a subset of
830 the available user information that includes user_id, name and emails.
831 """
832 user = self
833 user_data = self.user_data
834 data = {
835 'user_id': user.user_id,
836 'username': user.username,
837 'firstname': user.name,
838 'lastname': user.lastname,
839 'email': user.email,
840 'emails': user.emails,
841 }
842 if details == 'basic':
843 return data
844
845 api_key_length = 40
846 api_key_replacement = '*' * api_key_length
847
848 extras = {
849 'api_key': api_key_replacement,
850 'api_keys': [api_key_replacement],
851 'active': user.active,
852 'admin': user.admin,
853 'extern_type': user.extern_type,
854 'extern_name': user.extern_name,
855 'last_login': user.last_login,
856 'ip_addresses': user.ip_addresses,
857 'language': user_data.get('language')
858 }
859 data.update(extras)
860
861 if include_secrets:
862 data['api_key'] = user.api_key
863 data['api_keys'] = user.auth_tokens
864 return data
865
866 def __json__(self):
867 data = {
868 'full_name': self.full_name,
869 'full_name_or_username': self.full_name_or_username,
870 'short_contact': self.short_contact,
871 'full_contact': self.full_contact,
872 }
873 data.update(self.get_api_data())
874 return data
875
876
877 class UserApiKeys(Base, BaseModel):
878 __tablename__ = 'user_api_keys'
879 __table_args__ = (
880 Index('uak_api_key_idx', 'api_key'),
881 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
882 UniqueConstraint('api_key'),
883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
885 )
886 __mapper_args__ = {}
887
888 # ApiKey role
889 ROLE_ALL = 'token_role_all'
890 ROLE_HTTP = 'token_role_http'
891 ROLE_VCS = 'token_role_vcs'
892 ROLE_API = 'token_role_api'
893 ROLE_FEED = 'token_role_feed'
894 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
895
896 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
897 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
898 api_key = Column("api_key", String(255), nullable=False, unique=True)
899 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
900 expires = Column('expires', Float(53), nullable=False)
901 role = Column('role', String(255), nullable=True)
902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
903
904 user = relationship('User', lazy='joined')
905
906 @classmethod
907 def _get_role_name(cls, role):
908 return {
909 cls.ROLE_ALL: _('all'),
910 cls.ROLE_HTTP: _('http/web interface'),
911 cls.ROLE_VCS: _('vcs (git/hg protocol)'),
912 cls.ROLE_API: _('api calls'),
913 cls.ROLE_FEED: _('feed access'),
914 }.get(role, role)
915
916 @property
917 def expired(self):
918 if self.expires == -1:
919 return False
920 return time.time() > self.expires
921
922 @property
923 def role_humanized(self):
924 return self._get_role_name(self.role)
925
926
927 class UserEmailMap(Base, BaseModel):
928 __tablename__ = 'user_email_map'
929 __table_args__ = (
930 Index('uem_email_idx', 'email'),
931 UniqueConstraint('email'),
932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
934 )
935 __mapper_args__ = {}
936
937 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
938 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
939 _email = Column("email", String(255), nullable=True, unique=False, default=None)
940 user = relationship('User', lazy='joined')
941
942 @validates('_email')
943 def validate_email(self, key, email):
944 # check if this email is not main one
945 main_email = Session().query(User).filter(User.email == email).scalar()
946 if main_email is not None:
947 raise AttributeError('email %s is present is user table' % email)
948 return email
949
950 @hybrid_property
951 def email(self):
952 return self._email
953
954 @email.setter
955 def email(self, val):
956 self._email = val.lower() if val else None
957
958
959 class UserIpMap(Base, BaseModel):
960 __tablename__ = 'user_ip_map'
961 __table_args__ = (
962 UniqueConstraint('user_id', 'ip_addr'),
963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
965 )
966 __mapper_args__ = {}
967
968 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
969 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
970 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
971 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
972 description = Column("description", String(10000), nullable=True, unique=None, default=None)
973 user = relationship('User', lazy='joined')
974
975 @classmethod
976 def _get_ip_range(cls, ip_addr):
977 net = ipaddress.ip_network(ip_addr, strict=False)
978 return [str(net.network_address), str(net.broadcast_address)]
979
980 def __json__(self):
981 return {
982 'ip_addr': self.ip_addr,
983 'ip_range': self._get_ip_range(self.ip_addr),
984 }
985
986 def __unicode__(self):
987 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
988 self.user_id, self.ip_addr)
989
990 class UserLog(Base, BaseModel):
991 __tablename__ = 'user_logs'
992 __table_args__ = (
993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
995 )
996 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
998 username = Column("username", String(255), nullable=True, unique=None, default=None)
999 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1000 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1001 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1002 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1003 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1004
1005 def __unicode__(self):
1006 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1007 self.repository_name,
1008 self.action)
1009
1010 @property
1011 def action_as_day(self):
1012 return datetime.date(*self.action_date.timetuple()[:3])
1013
1014 user = relationship('User')
1015 repository = relationship('Repository', cascade='')
1016
1017
1018 class UserGroup(Base, BaseModel):
1019 __tablename__ = 'users_groups'
1020 __table_args__ = (
1021 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1022 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1023 )
1024
1025 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1026 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1027 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1028 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1029 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1030 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1031 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1032 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1033
1034 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1035 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1036 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1037 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1038 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1039 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1040
1041 user = relationship('User')
1042
1043 @hybrid_property
1044 def group_data(self):
1045 if not self._group_data:
1046 return {}
1047
1048 try:
1049 return json.loads(self._group_data)
1050 except TypeError:
1051 return {}
1052
1053 @group_data.setter
1054 def group_data(self, val):
1055 try:
1056 self._group_data = json.dumps(val)
1057 except Exception:
1058 log.error(traceback.format_exc())
1059
1060 def __unicode__(self):
1061 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1062 self.users_group_id,
1063 self.users_group_name)
1064
1065 @classmethod
1066 def get_by_group_name(cls, group_name, cache=False,
1067 case_insensitive=False):
1068 if case_insensitive:
1069 q = cls.query().filter(func.lower(cls.users_group_name) ==
1070 func.lower(group_name))
1071
1072 else:
1073 q = cls.query().filter(cls.users_group_name == group_name)
1074 if cache:
1075 q = q.options(FromCache(
1076 "sql_cache_short",
1077 "get_group_%s" % _hash_key(group_name)))
1078 return q.scalar()
1079
1080 @classmethod
1081 def get(cls, user_group_id, cache=False):
1082 user_group = cls.query()
1083 if cache:
1084 user_group = user_group.options(FromCache("sql_cache_short",
1085 "get_users_group_%s" % user_group_id))
1086 return user_group.get(user_group_id)
1087
1088 def permissions(self, with_admins=True, with_owner=True):
1089 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1090 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1091 joinedload(UserUserGroupToPerm.user),
1092 joinedload(UserUserGroupToPerm.permission),)
1093
1094 # get owners and admins and permissions. We do a trick of re-writing
1095 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1096 # has a global reference and changing one object propagates to all
1097 # others. This means if admin is also an owner admin_row that change
1098 # would propagate to both objects
1099 perm_rows = []
1100 for _usr in q.all():
1101 usr = AttributeDict(_usr.user.get_dict())
1102 usr.permission = _usr.permission.permission_name
1103 perm_rows.append(usr)
1104
1105 # filter the perm rows by 'default' first and then sort them by
1106 # admin,write,read,none permissions sorted again alphabetically in
1107 # each group
1108 perm_rows = sorted(perm_rows, key=display_sort)
1109
1110 _admin_perm = 'usergroup.admin'
1111 owner_row = []
1112 if with_owner:
1113 usr = AttributeDict(self.user.get_dict())
1114 usr.owner_row = True
1115 usr.permission = _admin_perm
1116 owner_row.append(usr)
1117
1118 super_admin_rows = []
1119 if with_admins:
1120 for usr in User.get_all_super_admins():
1121 # if this admin is also owner, don't double the record
1122 if usr.user_id == owner_row[0].user_id:
1123 owner_row[0].admin_row = True
1124 else:
1125 usr = AttributeDict(usr.get_dict())
1126 usr.admin_row = True
1127 usr.permission = _admin_perm
1128 super_admin_rows.append(usr)
1129
1130 return super_admin_rows + owner_row + perm_rows
1131
1132 def permission_user_groups(self):
1133 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1134 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1135 joinedload(UserGroupUserGroupToPerm.target_user_group),
1136 joinedload(UserGroupUserGroupToPerm.permission),)
1137
1138 perm_rows = []
1139 for _user_group in q.all():
1140 usr = AttributeDict(_user_group.user_group.get_dict())
1141 usr.permission = _user_group.permission.permission_name
1142 perm_rows.append(usr)
1143
1144 return perm_rows
1145
1146 def _get_default_perms(self, user_group, suffix=''):
1147 from rhodecode.model.permission import PermissionModel
1148 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1149
1150 def get_default_perms(self, suffix=''):
1151 return self._get_default_perms(self, suffix)
1152
1153 def get_api_data(self, with_group_members=True, include_secrets=False):
1154 """
1155 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1156 basically forwarded.
1157
1158 """
1159 user_group = self
1160
1161 data = {
1162 'users_group_id': user_group.users_group_id,
1163 'group_name': user_group.users_group_name,
1164 'group_description': user_group.user_group_description,
1165 'active': user_group.users_group_active,
1166 'owner': user_group.user.username,
1167 }
1168 if with_group_members:
1169 users = []
1170 for user in user_group.members:
1171 user = user.user
1172 users.append(user.get_api_data(include_secrets=include_secrets))
1173 data['users'] = users
1174
1175 return data
1176
1177
1178 class UserGroupMember(Base, BaseModel):
1179 __tablename__ = 'users_groups_members'
1180 __table_args__ = (
1181 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1182 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1183 )
1184
1185 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1187 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1188
1189 user = relationship('User', lazy='joined')
1190 users_group = relationship('UserGroup')
1191
1192 def __init__(self, gr_id='', u_id=''):
1193 self.users_group_id = gr_id
1194 self.user_id = u_id
1195
1196
1197 class RepositoryField(Base, BaseModel):
1198 __tablename__ = 'repositories_fields'
1199 __table_args__ = (
1200 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1201 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1202 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1203 )
1204 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1205
1206 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1207 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1208 field_key = Column("field_key", String(250))
1209 field_label = Column("field_label", String(1024), nullable=False)
1210 field_value = Column("field_value", String(10000), nullable=False)
1211 field_desc = Column("field_desc", String(1024), nullable=False)
1212 field_type = Column("field_type", String(255), nullable=False, unique=None)
1213 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1214
1215 repository = relationship('Repository')
1216
1217 @property
1218 def field_key_prefixed(self):
1219 return 'ex_%s' % self.field_key
1220
1221 @classmethod
1222 def un_prefix_key(cls, key):
1223 if key.startswith(cls.PREFIX):
1224 return key[len(cls.PREFIX):]
1225 return key
1226
1227 @classmethod
1228 def get_by_key_name(cls, key, repo):
1229 row = cls.query()\
1230 .filter(cls.repository == repo)\
1231 .filter(cls.field_key == key).scalar()
1232 return row
1233
1234
1235 class Repository(Base, BaseModel):
1236 __tablename__ = 'repositories'
1237 __table_args__ = (
1238 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1241 )
1242 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1243 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1244
1245 STATE_CREATED = 'repo_state_created'
1246 STATE_PENDING = 'repo_state_pending'
1247 STATE_ERROR = 'repo_state_error'
1248
1249 LOCK_AUTOMATIC = 'lock_auto'
1250 LOCK_API = 'lock_api'
1251 LOCK_WEB = 'lock_web'
1252 LOCK_PULL = 'lock_pull'
1253
1254 NAME_SEP = URL_SEP
1255
1256 repo_id = Column(
1257 "repo_id", Integer(), nullable=False, unique=True, default=None,
1258 primary_key=True)
1259 _repo_name = Column(
1260 "repo_name", Text(), nullable=False, default=None)
1261 _repo_name_hash = Column(
1262 "repo_name_hash", String(255), nullable=False, unique=True)
1263 repo_state = Column("repo_state", String(255), nullable=True)
1264
1265 clone_uri = Column(
1266 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1267 default=None)
1268 repo_type = Column(
1269 "repo_type", String(255), nullable=False, unique=False, default=None)
1270 user_id = Column(
1271 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1272 unique=False, default=None)
1273 private = Column(
1274 "private", Boolean(), nullable=True, unique=None, default=None)
1275 enable_statistics = Column(
1276 "statistics", Boolean(), nullable=True, unique=None, default=True)
1277 enable_downloads = Column(
1278 "downloads", Boolean(), nullable=True, unique=None, default=True)
1279 description = Column(
1280 "description", String(10000), nullable=True, unique=None, default=None)
1281 created_on = Column(
1282 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1283 default=datetime.datetime.now)
1284 updated_on = Column(
1285 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1286 default=datetime.datetime.now)
1287 _landing_revision = Column(
1288 "landing_revision", String(255), nullable=False, unique=False,
1289 default=None)
1290 enable_locking = Column(
1291 "enable_locking", Boolean(), nullable=False, unique=None,
1292 default=False)
1293 _locked = Column(
1294 "locked", String(255), nullable=True, unique=False, default=None)
1295 _changeset_cache = Column(
1296 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1297
1298 fork_id = Column(
1299 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1300 nullable=True, unique=False, default=None)
1301 group_id = Column(
1302 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1303 unique=False, default=None)
1304
1305 user = relationship('User', lazy='joined')
1306 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1307 group = relationship('RepoGroup', lazy='joined')
1308 repo_to_perm = relationship(
1309 'UserRepoToPerm', cascade='all',
1310 order_by='UserRepoToPerm.repo_to_perm_id')
1311 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1312 stats = relationship('Statistics', cascade='all', uselist=False)
1313
1314 followers = relationship(
1315 'UserFollowing',
1316 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1317 cascade='all')
1318 extra_fields = relationship(
1319 'RepositoryField', cascade="all, delete, delete-orphan")
1320 logs = relationship('UserLog')
1321 comments = relationship(
1322 'ChangesetComment', cascade="all, delete, delete-orphan")
1323 pull_requests_source = relationship(
1324 'PullRequest',
1325 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1326 cascade="all, delete, delete-orphan")
1327 pull_requests_target = relationship(
1328 'PullRequest',
1329 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1330 cascade="all, delete, delete-orphan")
1331 ui = relationship('RepoRhodeCodeUi', cascade="all")
1332 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1333
1334 def __unicode__(self):
1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1336 safe_unicode(self.repo_name))
1337
1338 @hybrid_property
1339 def landing_rev(self):
1340 # always should return [rev_type, rev]
1341 if self._landing_revision:
1342 _rev_info = self._landing_revision.split(':')
1343 if len(_rev_info) < 2:
1344 _rev_info.insert(0, 'rev')
1345 return [_rev_info[0], _rev_info[1]]
1346 return [None, None]
1347
1348 @landing_rev.setter
1349 def landing_rev(self, val):
1350 if ':' not in val:
1351 raise ValueError('value must be delimited with `:` and consist '
1352 'of <rev_type>:<rev>, got %s instead' % val)
1353 self._landing_revision = val
1354
1355 @hybrid_property
1356 def locked(self):
1357 if self._locked:
1358 user_id, timelocked, reason = self._locked.split(':')
1359 lock_values = int(user_id), timelocked, reason
1360 else:
1361 lock_values = [None, None, None]
1362 return lock_values
1363
1364 @locked.setter
1365 def locked(self, val):
1366 if val and isinstance(val, (list, tuple)):
1367 self._locked = ':'.join(map(str, val))
1368 else:
1369 self._locked = None
1370
1371 @hybrid_property
1372 def changeset_cache(self):
1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1374 dummy = EmptyCommit().__json__()
1375 if not self._changeset_cache:
1376 return dummy
1377 try:
1378 return json.loads(self._changeset_cache)
1379 except TypeError:
1380 return dummy
1381 except Exception:
1382 log.error(traceback.format_exc())
1383 return dummy
1384
1385 @changeset_cache.setter
1386 def changeset_cache(self, val):
1387 try:
1388 self._changeset_cache = json.dumps(val)
1389 except Exception:
1390 log.error(traceback.format_exc())
1391
1392 @hybrid_property
1393 def repo_name(self):
1394 return self._repo_name
1395
1396 @repo_name.setter
1397 def repo_name(self, value):
1398 self._repo_name = value
1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1400
1401 @classmethod
1402 def normalize_repo_name(cls, repo_name):
1403 """
1404 Normalizes os specific repo_name to the format internally stored inside
1405 database using URL_SEP
1406
1407 :param cls:
1408 :param repo_name:
1409 """
1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1411
1412 @classmethod
1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1414 session = Session()
1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1416
1417 if cache:
1418 if identity_cache:
1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1420 if val:
1421 return val
1422 else:
1423 q = q.options(
1424 FromCache("sql_cache_short",
1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1426
1427 return q.scalar()
1428
1429 @classmethod
1430 def get_by_full_path(cls, repo_full_path):
1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1432 repo_name = cls.normalize_repo_name(repo_name)
1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1434
1435 @classmethod
1436 def get_repo_forks(cls, repo_id):
1437 return cls.query().filter(Repository.fork_id == repo_id)
1438
1439 @classmethod
1440 def base_path(cls):
1441 """
1442 Returns base path when all repos are stored
1443
1444 :param cls:
1445 """
1446 q = Session().query(RhodeCodeUi)\
1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1449 return q.one().ui_value
1450
1451 @classmethod
1452 def is_valid(cls, repo_name):
1453 """
1454 returns True if given repo name is a valid filesystem repository
1455
1456 :param cls:
1457 :param repo_name:
1458 """
1459 from rhodecode.lib.utils import is_valid_repo
1460
1461 return is_valid_repo(repo_name, cls.base_path())
1462
1463 @classmethod
1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1465 case_insensitive=True):
1466 q = Repository.query()
1467
1468 if not isinstance(user_id, Optional):
1469 q = q.filter(Repository.user_id == user_id)
1470
1471 if not isinstance(group_id, Optional):
1472 q = q.filter(Repository.group_id == group_id)
1473
1474 if case_insensitive:
1475 q = q.order_by(func.lower(Repository.repo_name))
1476 else:
1477 q = q.order_by(Repository.repo_name)
1478 return q.all()
1479
1480 @property
1481 def forks(self):
1482 """
1483 Return forks of this repo
1484 """
1485 return Repository.get_repo_forks(self.repo_id)
1486
1487 @property
1488 def parent(self):
1489 """
1490 Returns fork parent
1491 """
1492 return self.fork
1493
1494 @property
1495 def just_name(self):
1496 return self.repo_name.split(self.NAME_SEP)[-1]
1497
1498 @property
1499 def groups_with_parents(self):
1500 groups = []
1501 if self.group is None:
1502 return groups
1503
1504 cur_gr = self.group
1505 groups.insert(0, cur_gr)
1506 while 1:
1507 gr = getattr(cur_gr, 'parent_group', None)
1508 cur_gr = cur_gr.parent_group
1509 if gr is None:
1510 break
1511 groups.insert(0, gr)
1512
1513 return groups
1514
1515 @property
1516 def groups_and_repo(self):
1517 return self.groups_with_parents, self
1518
1519 @LazyProperty
1520 def repo_path(self):
1521 """
1522 Returns base full path for that repository means where it actually
1523 exists on a filesystem
1524 """
1525 q = Session().query(RhodeCodeUi).filter(
1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1528 return q.one().ui_value
1529
1530 @property
1531 def repo_full_path(self):
1532 p = [self.repo_path]
1533 # we need to split the name by / since this is how we store the
1534 # names in the database, but that eventually needs to be converted
1535 # into a valid system path
1536 p += self.repo_name.split(self.NAME_SEP)
1537 return os.path.join(*map(safe_unicode, p))
1538
1539 @property
1540 def cache_keys(self):
1541 """
1542 Returns associated cache keys for that repo
1543 """
1544 return CacheKey.query()\
1545 .filter(CacheKey.cache_args == self.repo_name)\
1546 .order_by(CacheKey.cache_key)\
1547 .all()
1548
1549 def get_new_name(self, repo_name):
1550 """
1551 returns new full repository name based on assigned group and new new
1552
1553 :param group_name:
1554 """
1555 path_prefix = self.group.full_path_splitted if self.group else []
1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1557
1558 @property
1559 def _config(self):
1560 """
1561 Returns db based config object.
1562 """
1563 from rhodecode.lib.utils import make_db_config
1564 return make_db_config(clear_session=False, repo=self)
1565
1566 def permissions(self, with_admins=True, with_owner=True):
1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1568 q = q.options(joinedload(UserRepoToPerm.repository),
1569 joinedload(UserRepoToPerm.user),
1570 joinedload(UserRepoToPerm.permission),)
1571
1572 # get owners and admins and permissions. We do a trick of re-writing
1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1574 # has a global reference and changing one object propagates to all
1575 # others. This means if admin is also an owner admin_row that change
1576 # would propagate to both objects
1577 perm_rows = []
1578 for _usr in q.all():
1579 usr = AttributeDict(_usr.user.get_dict())
1580 usr.permission = _usr.permission.permission_name
1581 perm_rows.append(usr)
1582
1583 # filter the perm rows by 'default' first and then sort them by
1584 # admin,write,read,none permissions sorted again alphabetically in
1585 # each group
1586 perm_rows = sorted(perm_rows, key=display_sort)
1587
1588 _admin_perm = 'repository.admin'
1589 owner_row = []
1590 if with_owner:
1591 usr = AttributeDict(self.user.get_dict())
1592 usr.owner_row = True
1593 usr.permission = _admin_perm
1594 owner_row.append(usr)
1595
1596 super_admin_rows = []
1597 if with_admins:
1598 for usr in User.get_all_super_admins():
1599 # if this admin is also owner, don't double the record
1600 if usr.user_id == owner_row[0].user_id:
1601 owner_row[0].admin_row = True
1602 else:
1603 usr = AttributeDict(usr.get_dict())
1604 usr.admin_row = True
1605 usr.permission = _admin_perm
1606 super_admin_rows.append(usr)
1607
1608 return super_admin_rows + owner_row + perm_rows
1609
1610 def permission_user_groups(self):
1611 q = UserGroupRepoToPerm.query().filter(
1612 UserGroupRepoToPerm.repository == self)
1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1614 joinedload(UserGroupRepoToPerm.users_group),
1615 joinedload(UserGroupRepoToPerm.permission),)
1616
1617 perm_rows = []
1618 for _user_group in q.all():
1619 usr = AttributeDict(_user_group.users_group.get_dict())
1620 usr.permission = _user_group.permission.permission_name
1621 perm_rows.append(usr)
1622
1623 return perm_rows
1624
1625 def get_api_data(self, include_secrets=False):
1626 """
1627 Common function for generating repo api data
1628
1629 :param include_secrets: See :meth:`User.get_api_data`.
1630
1631 """
1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1633 # move this methods on models level.
1634 from rhodecode.model.settings import SettingsModel
1635
1636 repo = self
1637 _user_id, _time, _reason = self.locked
1638
1639 data = {
1640 'repo_id': repo.repo_id,
1641 'repo_name': repo.repo_name,
1642 'repo_type': repo.repo_type,
1643 'clone_uri': repo.clone_uri or '',
1644 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1645 'private': repo.private,
1646 'created_on': repo.created_on,
1647 'description': repo.description,
1648 'landing_rev': repo.landing_rev,
1649 'owner': repo.user.username,
1650 'fork_of': repo.fork.repo_name if repo.fork else None,
1651 'enable_statistics': repo.enable_statistics,
1652 'enable_locking': repo.enable_locking,
1653 'enable_downloads': repo.enable_downloads,
1654 'last_changeset': repo.changeset_cache,
1655 'locked_by': User.get(_user_id).get_api_data(
1656 include_secrets=include_secrets) if _user_id else None,
1657 'locked_date': time_to_datetime(_time) if _time else None,
1658 'lock_reason': _reason if _reason else None,
1659 }
1660
1661 # TODO: mikhail: should be per-repo settings here
1662 rc_config = SettingsModel().get_all_settings()
1663 repository_fields = str2bool(
1664 rc_config.get('rhodecode_repository_fields'))
1665 if repository_fields:
1666 for f in self.extra_fields:
1667 data[f.field_key_prefixed] = f.field_value
1668
1669 return data
1670
1671 @classmethod
1672 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1673 if not lock_time:
1674 lock_time = time.time()
1675 if not lock_reason:
1676 lock_reason = cls.LOCK_AUTOMATIC
1677 repo.locked = [user_id, lock_time, lock_reason]
1678 Session().add(repo)
1679 Session().commit()
1680
1681 @classmethod
1682 def unlock(cls, repo):
1683 repo.locked = None
1684 Session().add(repo)
1685 Session().commit()
1686
1687 @classmethod
1688 def getlock(cls, repo):
1689 return repo.locked
1690
1691 def is_user_lock(self, user_id):
1692 if self.lock[0]:
1693 lock_user_id = safe_int(self.lock[0])
1694 user_id = safe_int(user_id)
1695 # both are ints, and they are equal
1696 return all([lock_user_id, user_id]) and lock_user_id == user_id
1697
1698 return False
1699
1700 def get_locking_state(self, action, user_id, only_when_enabled=True):
1701 """
1702 Checks locking on this repository, if locking is enabled and lock is
1703 present returns a tuple of make_lock, locked, locked_by.
1704 make_lock can have 3 states None (do nothing) True, make lock
1705 False release lock, This value is later propagated to hooks, which
1706 do the locking. Think about this as signals passed to hooks what to do.
1707
1708 """
1709 # TODO: johbo: This is part of the business logic and should be moved
1710 # into the RepositoryModel.
1711
1712 if action not in ('push', 'pull'):
1713 raise ValueError("Invalid action value: %s" % repr(action))
1714
1715 # defines if locked error should be thrown to user
1716 currently_locked = False
1717 # defines if new lock should be made, tri-state
1718 make_lock = None
1719 repo = self
1720 user = User.get(user_id)
1721
1722 lock_info = repo.locked
1723
1724 if repo and (repo.enable_locking or not only_when_enabled):
1725 if action == 'push':
1726 # check if it's already locked !, if it is compare users
1727 locked_by_user_id = lock_info[0]
1728 if user.user_id == locked_by_user_id:
1729 log.debug(
1730 'Got `push` action from user %s, now unlocking', user)
1731 # unlock if we have push from user who locked
1732 make_lock = False
1733 else:
1734 # we're not the same user who locked, ban with
1735 # code defined in settings (default is 423 HTTP Locked) !
1736 log.debug('Repo %s is currently locked by %s', repo, user)
1737 currently_locked = True
1738 elif action == 'pull':
1739 # [0] user [1] date
1740 if lock_info[0] and lock_info[1]:
1741 log.debug('Repo %s is currently locked by %s', repo, user)
1742 currently_locked = True
1743 else:
1744 log.debug('Setting lock on repo %s by %s', repo, user)
1745 make_lock = True
1746
1747 else:
1748 log.debug('Repository %s do not have locking enabled', repo)
1749
1750 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1751 make_lock, currently_locked, lock_info)
1752
1753 from rhodecode.lib.auth import HasRepoPermissionAny
1754 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1755 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1756 # if we don't have at least write permission we cannot make a lock
1757 log.debug('lock state reset back to FALSE due to lack '
1758 'of at least read permission')
1759 make_lock = False
1760
1761 return make_lock, currently_locked, lock_info
1762
1763 @property
1764 def last_db_change(self):
1765 return self.updated_on
1766
1767 @property
1768 def clone_uri_hidden(self):
1769 clone_uri = self.clone_uri
1770 if clone_uri:
1771 import urlobject
1772 url_obj = urlobject.URLObject(clone_uri)
1773 if url_obj.password:
1774 clone_uri = url_obj.with_password('*****')
1775 return clone_uri
1776
1777 def clone_url(self, **override):
1778 qualified_home_url = url('home', qualified=True)
1779
1780 uri_tmpl = None
1781 if 'with_id' in override:
1782 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1783 del override['with_id']
1784
1785 if 'uri_tmpl' in override:
1786 uri_tmpl = override['uri_tmpl']
1787 del override['uri_tmpl']
1788
1789 # we didn't override our tmpl from **overrides
1790 if not uri_tmpl:
1791 uri_tmpl = self.DEFAULT_CLONE_URI
1792 try:
1793 from pylons import tmpl_context as c
1794 uri_tmpl = c.clone_uri_tmpl
1795 except Exception:
1796 # in any case if we call this outside of request context,
1797 # ie, not having tmpl_context set up
1798 pass
1799
1800 return get_clone_url(uri_tmpl=uri_tmpl,
1801 qualifed_home_url=qualified_home_url,
1802 repo_name=self.repo_name,
1803 repo_id=self.repo_id, **override)
1804
1805 def set_state(self, state):
1806 self.repo_state = state
1807 Session().add(self)
1808 #==========================================================================
1809 # SCM PROPERTIES
1810 #==========================================================================
1811
1812 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1813 return get_commit_safe(
1814 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1815
1816 def get_changeset(self, rev=None, pre_load=None):
1817 warnings.warn("Use get_commit", DeprecationWarning)
1818 commit_id = None
1819 commit_idx = None
1820 if isinstance(rev, basestring):
1821 commit_id = rev
1822 else:
1823 commit_idx = rev
1824 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1825 pre_load=pre_load)
1826
1827 def get_landing_commit(self):
1828 """
1829 Returns landing commit, or if that doesn't exist returns the tip
1830 """
1831 _rev_type, _rev = self.landing_rev
1832 commit = self.get_commit(_rev)
1833 if isinstance(commit, EmptyCommit):
1834 return self.get_commit()
1835 return commit
1836
1837 def update_commit_cache(self, cs_cache=None, config=None):
1838 """
1839 Update cache of last changeset for repository, keys should be::
1840
1841 short_id
1842 raw_id
1843 revision
1844 parents
1845 message
1846 date
1847 author
1848
1849 :param cs_cache:
1850 """
1851 from rhodecode.lib.vcs.backends.base import BaseChangeset
1852 if cs_cache is None:
1853 # use no-cache version here
1854 scm_repo = self.scm_instance(cache=False, config=config)
1855 if scm_repo:
1856 cs_cache = scm_repo.get_commit(
1857 pre_load=["author", "date", "message", "parents"])
1858 else:
1859 cs_cache = EmptyCommit()
1860
1861 if isinstance(cs_cache, BaseChangeset):
1862 cs_cache = cs_cache.__json__()
1863
1864 def is_outdated(new_cs_cache):
1865 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1866 new_cs_cache['revision'] != self.changeset_cache['revision']):
1867 return True
1868 return False
1869
1870 # check if we have maybe already latest cached revision
1871 if is_outdated(cs_cache) or not self.changeset_cache:
1872 _default = datetime.datetime.fromtimestamp(0)
1873 last_change = cs_cache.get('date') or _default
1874 log.debug('updated repo %s with new cs cache %s',
1875 self.repo_name, cs_cache)
1876 self.updated_on = last_change
1877 self.changeset_cache = cs_cache
1878 Session().add(self)
1879 Session().commit()
1880 else:
1881 log.debug('Skipping update_commit_cache for repo:`%s` '
1882 'commit already with latest changes', self.repo_name)
1883
1884 @property
1885 def tip(self):
1886 return self.get_commit('tip')
1887
1888 @property
1889 def author(self):
1890 return self.tip.author
1891
1892 @property
1893 def last_change(self):
1894 return self.scm_instance().last_change
1895
1896 def get_comments(self, revisions=None):
1897 """
1898 Returns comments for this repository grouped by revisions
1899
1900 :param revisions: filter query by revisions only
1901 """
1902 cmts = ChangesetComment.query()\
1903 .filter(ChangesetComment.repo == self)
1904 if revisions:
1905 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1906 grouped = collections.defaultdict(list)
1907 for cmt in cmts.all():
1908 grouped[cmt.revision].append(cmt)
1909 return grouped
1910
1911 def statuses(self, revisions=None):
1912 """
1913 Returns statuses for this repository
1914
1915 :param revisions: list of revisions to get statuses for
1916 """
1917 statuses = ChangesetStatus.query()\
1918 .filter(ChangesetStatus.repo == self)\
1919 .filter(ChangesetStatus.version == 0)
1920
1921 if revisions:
1922 # Try doing the filtering in chunks to avoid hitting limits
1923 size = 500
1924 status_results = []
1925 for chunk in xrange(0, len(revisions), size):
1926 status_results += statuses.filter(
1927 ChangesetStatus.revision.in_(
1928 revisions[chunk: chunk+size])
1929 ).all()
1930 else:
1931 status_results = statuses.all()
1932
1933 grouped = {}
1934
1935 # maybe we have open new pullrequest without a status?
1936 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1937 status_lbl = ChangesetStatus.get_status_lbl(stat)
1938 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1939 for rev in pr.revisions:
1940 pr_id = pr.pull_request_id
1941 pr_repo = pr.target_repo.repo_name
1942 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1943
1944 for stat in status_results:
1945 pr_id = pr_repo = None
1946 if stat.pull_request:
1947 pr_id = stat.pull_request.pull_request_id
1948 pr_repo = stat.pull_request.target_repo.repo_name
1949 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1950 pr_id, pr_repo]
1951 return grouped
1952
1953 # ==========================================================================
1954 # SCM CACHE INSTANCE
1955 # ==========================================================================
1956
1957 def scm_instance(self, **kwargs):
1958 import rhodecode
1959
1960 # Passing a config will not hit the cache currently only used
1961 # for repo2dbmapper
1962 config = kwargs.pop('config', None)
1963 cache = kwargs.pop('cache', None)
1964 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1965 # if cache is NOT defined use default global, else we have a full
1966 # control over cache behaviour
1967 if cache is None and full_cache and not config:
1968 return self._get_instance_cached()
1969 return self._get_instance(cache=bool(cache), config=config)
1970
1971 def _get_instance_cached(self):
1972 @cache_region('long_term')
1973 def _get_repo(cache_key):
1974 return self._get_instance()
1975
1976 invalidator_context = CacheKey.repo_context_cache(
1977 _get_repo, self.repo_name, None)
1978
1979 with invalidator_context as context:
1980 context.invalidate()
1981 repo = context.compute()
1982
1983 return repo
1984
1985 def _get_instance(self, cache=True, config=None):
1986 repo_full_path = self.repo_full_path
1987 try:
1988 vcs_alias = get_scm(repo_full_path)[0]
1989 log.debug(
1990 'Creating instance of %s repository from %s',
1991 vcs_alias, repo_full_path)
1992 backend = get_backend(vcs_alias)
1993 except VCSError:
1994 log.exception(
1995 'Perhaps this repository is in db and not in '
1996 'filesystem run rescan repositories with '
1997 '"destroy old data" option from admin panel')
1998 return
1999
2000 config = config or self._config
2001 custom_wire = {
2002 'cache': cache # controls the vcs.remote cache
2003 }
2004 repo = backend(
2005 safe_str(repo_full_path), config=config, create=False,
2006 with_wire=custom_wire)
2007
2008 return repo
2009
2010 def __json__(self):
2011 return {'landing_rev': self.landing_rev}
2012
2013 def get_dict(self):
2014
2015 # Since we transformed `repo_name` to a hybrid property, we need to
2016 # keep compatibility with the code which uses `repo_name` field.
2017
2018 result = super(Repository, self).get_dict()
2019 result['repo_name'] = result.pop('_repo_name', None)
2020 return result
2021
2022
2023 class RepoGroup(Base, BaseModel):
2024 __tablename__ = 'groups'
2025 __table_args__ = (
2026 UniqueConstraint('group_name', 'group_parent_id'),
2027 CheckConstraint('group_id != group_parent_id'),
2028 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2029 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2030 )
2031 __mapper_args__ = {'order_by': 'group_name'}
2032
2033 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2034
2035 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2036 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2037 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2038 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2039 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2040 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2041 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2042
2043 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2044 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2045 parent_group = relationship('RepoGroup', remote_side=group_id)
2046 user = relationship('User')
2047
2048 def __init__(self, group_name='', parent_group=None):
2049 self.group_name = group_name
2050 self.parent_group = parent_group
2051
2052 def __unicode__(self):
2053 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2054 self.group_name)
2055
2056 @classmethod
2057 def _generate_choice(cls, repo_group):
2058 from webhelpers.html import literal as _literal
2059 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2060 return repo_group.group_id, _name(repo_group.full_path_splitted)
2061
2062 @classmethod
2063 def groups_choices(cls, groups=None, show_empty_group=True):
2064 if not groups:
2065 groups = cls.query().all()
2066
2067 repo_groups = []
2068 if show_empty_group:
2069 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2070
2071 repo_groups.extend([cls._generate_choice(x) for x in groups])
2072
2073 repo_groups = sorted(
2074 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2075 return repo_groups
2076
2077 @classmethod
2078 def url_sep(cls):
2079 return URL_SEP
2080
2081 @classmethod
2082 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2083 if case_insensitive:
2084 gr = cls.query().filter(func.lower(cls.group_name)
2085 == func.lower(group_name))
2086 else:
2087 gr = cls.query().filter(cls.group_name == group_name)
2088 if cache:
2089 gr = gr.options(FromCache(
2090 "sql_cache_short",
2091 "get_group_%s" % _hash_key(group_name)))
2092 return gr.scalar()
2093
2094 @classmethod
2095 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2096 case_insensitive=True):
2097 q = RepoGroup.query()
2098
2099 if not isinstance(user_id, Optional):
2100 q = q.filter(RepoGroup.user_id == user_id)
2101
2102 if not isinstance(group_id, Optional):
2103 q = q.filter(RepoGroup.group_parent_id == group_id)
2104
2105 if case_insensitive:
2106 q = q.order_by(func.lower(RepoGroup.group_name))
2107 else:
2108 q = q.order_by(RepoGroup.group_name)
2109 return q.all()
2110
2111 @property
2112 def parents(self):
2113 parents_recursion_limit = 10
2114 groups = []
2115 if self.parent_group is None:
2116 return groups
2117 cur_gr = self.parent_group
2118 groups.insert(0, cur_gr)
2119 cnt = 0
2120 while 1:
2121 cnt += 1
2122 gr = getattr(cur_gr, 'parent_group', None)
2123 cur_gr = cur_gr.parent_group
2124 if gr is None:
2125 break
2126 if cnt == parents_recursion_limit:
2127 # this will prevent accidental infinit loops
2128 log.error(('more than %s parents found for group %s, stopping '
2129 'recursive parent fetching' % (parents_recursion_limit, self)))
2130 break
2131
2132 groups.insert(0, gr)
2133 return groups
2134
2135 @property
2136 def children(self):
2137 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2138
2139 @property
2140 def name(self):
2141 return self.group_name.split(RepoGroup.url_sep())[-1]
2142
2143 @property
2144 def full_path(self):
2145 return self.group_name
2146
2147 @property
2148 def full_path_splitted(self):
2149 return self.group_name.split(RepoGroup.url_sep())
2150
2151 @property
2152 def repositories(self):
2153 return Repository.query()\
2154 .filter(Repository.group == self)\
2155 .order_by(Repository.repo_name)
2156
2157 @property
2158 def repositories_recursive_count(self):
2159 cnt = self.repositories.count()
2160
2161 def children_count(group):
2162 cnt = 0
2163 for child in group.children:
2164 cnt += child.repositories.count()
2165 cnt += children_count(child)
2166 return cnt
2167
2168 return cnt + children_count(self)
2169
2170 def _recursive_objects(self, include_repos=True):
2171 all_ = []
2172
2173 def _get_members(root_gr):
2174 if include_repos:
2175 for r in root_gr.repositories:
2176 all_.append(r)
2177 childs = root_gr.children.all()
2178 if childs:
2179 for gr in childs:
2180 all_.append(gr)
2181 _get_members(gr)
2182
2183 _get_members(self)
2184 return [self] + all_
2185
2186 def recursive_groups_and_repos(self):
2187 """
2188 Recursive return all groups, with repositories in those groups
2189 """
2190 return self._recursive_objects()
2191
2192 def recursive_groups(self):
2193 """
2194 Returns all children groups for this group including children of children
2195 """
2196 return self._recursive_objects(include_repos=False)
2197
2198 def get_new_name(self, group_name):
2199 """
2200 returns new full group name based on parent and new name
2201
2202 :param group_name:
2203 """
2204 path_prefix = (self.parent_group.full_path_splitted if
2205 self.parent_group else [])
2206 return RepoGroup.url_sep().join(path_prefix + [group_name])
2207
2208 def permissions(self, with_admins=True, with_owner=True):
2209 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2210 q = q.options(joinedload(UserRepoGroupToPerm.group),
2211 joinedload(UserRepoGroupToPerm.user),
2212 joinedload(UserRepoGroupToPerm.permission),)
2213
2214 # get owners and admins and permissions. We do a trick of re-writing
2215 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2216 # has a global reference and changing one object propagates to all
2217 # others. This means if admin is also an owner admin_row that change
2218 # would propagate to both objects
2219 perm_rows = []
2220 for _usr in q.all():
2221 usr = AttributeDict(_usr.user.get_dict())
2222 usr.permission = _usr.permission.permission_name
2223 perm_rows.append(usr)
2224
2225 # filter the perm rows by 'default' first and then sort them by
2226 # admin,write,read,none permissions sorted again alphabetically in
2227 # each group
2228 perm_rows = sorted(perm_rows, key=display_sort)
2229
2230 _admin_perm = 'group.admin'
2231 owner_row = []
2232 if with_owner:
2233 usr = AttributeDict(self.user.get_dict())
2234 usr.owner_row = True
2235 usr.permission = _admin_perm
2236 owner_row.append(usr)
2237
2238 super_admin_rows = []
2239 if with_admins:
2240 for usr in User.get_all_super_admins():
2241 # if this admin is also owner, don't double the record
2242 if usr.user_id == owner_row[0].user_id:
2243 owner_row[0].admin_row = True
2244 else:
2245 usr = AttributeDict(usr.get_dict())
2246 usr.admin_row = True
2247 usr.permission = _admin_perm
2248 super_admin_rows.append(usr)
2249
2250 return super_admin_rows + owner_row + perm_rows
2251
2252 def permission_user_groups(self):
2253 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2254 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2255 joinedload(UserGroupRepoGroupToPerm.users_group),
2256 joinedload(UserGroupRepoGroupToPerm.permission),)
2257
2258 perm_rows = []
2259 for _user_group in q.all():
2260 usr = AttributeDict(_user_group.users_group.get_dict())
2261 usr.permission = _user_group.permission.permission_name
2262 perm_rows.append(usr)
2263
2264 return perm_rows
2265
2266 def get_api_data(self):
2267 """
2268 Common function for generating api data
2269
2270 """
2271 group = self
2272 data = {
2273 'group_id': group.group_id,
2274 'group_name': group.group_name,
2275 'group_description': group.group_description,
2276 'parent_group': group.parent_group.group_name if group.parent_group else None,
2277 'repositories': [x.repo_name for x in group.repositories],
2278 'owner': group.user.username,
2279 }
2280 return data
2281
2282
2283 class Permission(Base, BaseModel):
2284 __tablename__ = 'permissions'
2285 __table_args__ = (
2286 Index('p_perm_name_idx', 'permission_name'),
2287 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2288 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2289 )
2290 PERMS = [
2291 ('hg.admin', _('RhodeCode Super Administrator')),
2292
2293 ('repository.none', _('Repository no access')),
2294 ('repository.read', _('Repository read access')),
2295 ('repository.write', _('Repository write access')),
2296 ('repository.admin', _('Repository admin access')),
2297
2298 ('group.none', _('Repository group no access')),
2299 ('group.read', _('Repository group read access')),
2300 ('group.write', _('Repository group write access')),
2301 ('group.admin', _('Repository group admin access')),
2302
2303 ('usergroup.none', _('User group no access')),
2304 ('usergroup.read', _('User group read access')),
2305 ('usergroup.write', _('User group write access')),
2306 ('usergroup.admin', _('User group admin access')),
2307
2308 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2309 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2310
2311 ('hg.usergroup.create.false', _('User Group creation disabled')),
2312 ('hg.usergroup.create.true', _('User Group creation enabled')),
2313
2314 ('hg.create.none', _('Repository creation disabled')),
2315 ('hg.create.repository', _('Repository creation enabled')),
2316 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2317 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2318
2319 ('hg.fork.none', _('Repository forking disabled')),
2320 ('hg.fork.repository', _('Repository forking enabled')),
2321
2322 ('hg.register.none', _('Registration disabled')),
2323 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2324 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2325
2326 ('hg.extern_activate.manual', _('Manual activation of external account')),
2327 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2328
2329 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2330 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2331 ]
2332
2333 # definition of system default permissions for DEFAULT user
2334 DEFAULT_USER_PERMISSIONS = [
2335 'repository.read',
2336 'group.read',
2337 'usergroup.read',
2338 'hg.create.repository',
2339 'hg.repogroup.create.false',
2340 'hg.usergroup.create.false',
2341 'hg.create.write_on_repogroup.true',
2342 'hg.fork.repository',
2343 'hg.register.manual_activate',
2344 'hg.extern_activate.auto',
2345 'hg.inherit_default_perms.true',
2346 ]
2347
2348 # defines which permissions are more important higher the more important
2349 # Weight defines which permissions are more important.
2350 # The higher number the more important.
2351 PERM_WEIGHTS = {
2352 'repository.none': 0,
2353 'repository.read': 1,
2354 'repository.write': 3,
2355 'repository.admin': 4,
2356
2357 'group.none': 0,
2358 'group.read': 1,
2359 'group.write': 3,
2360 'group.admin': 4,
2361
2362 'usergroup.none': 0,
2363 'usergroup.read': 1,
2364 'usergroup.write': 3,
2365 'usergroup.admin': 4,
2366
2367 'hg.repogroup.create.false': 0,
2368 'hg.repogroup.create.true': 1,
2369
2370 'hg.usergroup.create.false': 0,
2371 'hg.usergroup.create.true': 1,
2372
2373 'hg.fork.none': 0,
2374 'hg.fork.repository': 1,
2375 'hg.create.none': 0,
2376 'hg.create.repository': 1
2377 }
2378
2379 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2380 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2381 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2382
2383 def __unicode__(self):
2384 return u"<%s('%s:%s')>" % (
2385 self.__class__.__name__, self.permission_id, self.permission_name
2386 )
2387
2388 @classmethod
2389 def get_by_key(cls, key):
2390 return cls.query().filter(cls.permission_name == key).scalar()
2391
2392 @classmethod
2393 def get_default_repo_perms(cls, user_id, repo_id=None):
2394 q = Session().query(UserRepoToPerm, Repository, Permission)\
2395 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2396 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2397 .filter(UserRepoToPerm.user_id == user_id)
2398 if repo_id:
2399 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2400 return q.all()
2401
2402 @classmethod
2403 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2404 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2405 .join(
2406 Permission,
2407 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2408 .join(
2409 Repository,
2410 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2411 .join(
2412 UserGroup,
2413 UserGroupRepoToPerm.users_group_id ==
2414 UserGroup.users_group_id)\
2415 .join(
2416 UserGroupMember,
2417 UserGroupRepoToPerm.users_group_id ==
2418 UserGroupMember.users_group_id)\
2419 .filter(
2420 UserGroupMember.user_id == user_id,
2421 UserGroup.users_group_active == true())
2422 if repo_id:
2423 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2424 return q.all()
2425
2426 @classmethod
2427 def get_default_group_perms(cls, user_id, repo_group_id=None):
2428 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2429 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2430 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2431 .filter(UserRepoGroupToPerm.user_id == user_id)
2432 if repo_group_id:
2433 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2434 return q.all()
2435
2436 @classmethod
2437 def get_default_group_perms_from_user_group(
2438 cls, user_id, repo_group_id=None):
2439 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2440 .join(
2441 Permission,
2442 UserGroupRepoGroupToPerm.permission_id ==
2443 Permission.permission_id)\
2444 .join(
2445 RepoGroup,
2446 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2447 .join(
2448 UserGroup,
2449 UserGroupRepoGroupToPerm.users_group_id ==
2450 UserGroup.users_group_id)\
2451 .join(
2452 UserGroupMember,
2453 UserGroupRepoGroupToPerm.users_group_id ==
2454 UserGroupMember.users_group_id)\
2455 .filter(
2456 UserGroupMember.user_id == user_id,
2457 UserGroup.users_group_active == true())
2458 if repo_group_id:
2459 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2460 return q.all()
2461
2462 @classmethod
2463 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2464 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2465 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2466 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2467 .filter(UserUserGroupToPerm.user_id == user_id)
2468 if user_group_id:
2469 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2470 return q.all()
2471
2472 @classmethod
2473 def get_default_user_group_perms_from_user_group(
2474 cls, user_id, user_group_id=None):
2475 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2476 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2477 .join(
2478 Permission,
2479 UserGroupUserGroupToPerm.permission_id ==
2480 Permission.permission_id)\
2481 .join(
2482 TargetUserGroup,
2483 UserGroupUserGroupToPerm.target_user_group_id ==
2484 TargetUserGroup.users_group_id)\
2485 .join(
2486 UserGroup,
2487 UserGroupUserGroupToPerm.user_group_id ==
2488 UserGroup.users_group_id)\
2489 .join(
2490 UserGroupMember,
2491 UserGroupUserGroupToPerm.user_group_id ==
2492 UserGroupMember.users_group_id)\
2493 .filter(
2494 UserGroupMember.user_id == user_id,
2495 UserGroup.users_group_active == true())
2496 if user_group_id:
2497 q = q.filter(
2498 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2499
2500 return q.all()
2501
2502
2503 class UserRepoToPerm(Base, BaseModel):
2504 __tablename__ = 'repo_to_perm'
2505 __table_args__ = (
2506 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2507 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2508 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2509 )
2510 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2511 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2512 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2513 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2514
2515 user = relationship('User')
2516 repository = relationship('Repository')
2517 permission = relationship('Permission')
2518
2519 @classmethod
2520 def create(cls, user, repository, permission):
2521 n = cls()
2522 n.user = user
2523 n.repository = repository
2524 n.permission = permission
2525 Session().add(n)
2526 return n
2527
2528 def __unicode__(self):
2529 return u'<%s => %s >' % (self.user, self.repository)
2530
2531
2532 class UserUserGroupToPerm(Base, BaseModel):
2533 __tablename__ = 'user_user_group_to_perm'
2534 __table_args__ = (
2535 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2536 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2537 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2538 )
2539 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2540 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2541 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2542 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2543
2544 user = relationship('User')
2545 user_group = relationship('UserGroup')
2546 permission = relationship('Permission')
2547
2548 @classmethod
2549 def create(cls, user, user_group, permission):
2550 n = cls()
2551 n.user = user
2552 n.user_group = user_group
2553 n.permission = permission
2554 Session().add(n)
2555 return n
2556
2557 def __unicode__(self):
2558 return u'<%s => %s >' % (self.user, self.user_group)
2559
2560
2561 class UserToPerm(Base, BaseModel):
2562 __tablename__ = 'user_to_perm'
2563 __table_args__ = (
2564 UniqueConstraint('user_id', 'permission_id'),
2565 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2566 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2567 )
2568 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2569 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2570 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2571
2572 user = relationship('User')
2573 permission = relationship('Permission', lazy='joined')
2574
2575 def __unicode__(self):
2576 return u'<%s => %s >' % (self.user, self.permission)
2577
2578
2579 class UserGroupRepoToPerm(Base, BaseModel):
2580 __tablename__ = 'users_group_repo_to_perm'
2581 __table_args__ = (
2582 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2583 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2584 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2585 )
2586 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2587 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2588 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2589 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2590
2591 users_group = relationship('UserGroup')
2592 permission = relationship('Permission')
2593 repository = relationship('Repository')
2594
2595 @classmethod
2596 def create(cls, users_group, repository, permission):
2597 n = cls()
2598 n.users_group = users_group
2599 n.repository = repository
2600 n.permission = permission
2601 Session().add(n)
2602 return n
2603
2604 def __unicode__(self):
2605 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2606
2607
2608 class UserGroupUserGroupToPerm(Base, BaseModel):
2609 __tablename__ = 'user_group_user_group_to_perm'
2610 __table_args__ = (
2611 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2612 CheckConstraint('target_user_group_id != user_group_id'),
2613 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2614 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2615 )
2616 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2617 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2618 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2619 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2620
2621 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2622 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2623 permission = relationship('Permission')
2624
2625 @classmethod
2626 def create(cls, target_user_group, user_group, permission):
2627 n = cls()
2628 n.target_user_group = target_user_group
2629 n.user_group = user_group
2630 n.permission = permission
2631 Session().add(n)
2632 return n
2633
2634 def __unicode__(self):
2635 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2636
2637
2638 class UserGroupToPerm(Base, BaseModel):
2639 __tablename__ = 'users_group_to_perm'
2640 __table_args__ = (
2641 UniqueConstraint('users_group_id', 'permission_id',),
2642 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2643 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2644 )
2645 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2646 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2647 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2648
2649 users_group = relationship('UserGroup')
2650 permission = relationship('Permission')
2651
2652
2653 class UserRepoGroupToPerm(Base, BaseModel):
2654 __tablename__ = 'user_repo_group_to_perm'
2655 __table_args__ = (
2656 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2657 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2658 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2659 )
2660
2661 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2663 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2664 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2665
2666 user = relationship('User')
2667 group = relationship('RepoGroup')
2668 permission = relationship('Permission')
2669
2670 @classmethod
2671 def create(cls, user, repository_group, permission):
2672 n = cls()
2673 n.user = user
2674 n.group = repository_group
2675 n.permission = permission
2676 Session().add(n)
2677 return n
2678
2679
2680 class UserGroupRepoGroupToPerm(Base, BaseModel):
2681 __tablename__ = 'users_group_repo_group_to_perm'
2682 __table_args__ = (
2683 UniqueConstraint('users_group_id', 'group_id'),
2684 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2685 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2686 )
2687
2688 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2689 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2690 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2691 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2692
2693 users_group = relationship('UserGroup')
2694 permission = relationship('Permission')
2695 group = relationship('RepoGroup')
2696
2697 @classmethod
2698 def create(cls, user_group, repository_group, permission):
2699 n = cls()
2700 n.users_group = user_group
2701 n.group = repository_group
2702 n.permission = permission
2703 Session().add(n)
2704 return n
2705
2706 def __unicode__(self):
2707 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2708
2709
2710 class Statistics(Base, BaseModel):
2711 __tablename__ = 'statistics'
2712 __table_args__ = (
2713 UniqueConstraint('repository_id'),
2714 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2715 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2716 )
2717 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2718 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2719 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2720 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2721 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2722 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2723
2724 repository = relationship('Repository', single_parent=True)
2725
2726
2727 class UserFollowing(Base, BaseModel):
2728 __tablename__ = 'user_followings'
2729 __table_args__ = (
2730 UniqueConstraint('user_id', 'follows_repository_id'),
2731 UniqueConstraint('user_id', 'follows_user_id'),
2732 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2733 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2734 )
2735
2736 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2737 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2738 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2739 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2740 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2741
2742 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2743
2744 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2745 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2746
2747 @classmethod
2748 def get_repo_followers(cls, repo_id):
2749 return cls.query().filter(cls.follows_repo_id == repo_id)
2750
2751
2752 class CacheKey(Base, BaseModel):
2753 __tablename__ = 'cache_invalidation'
2754 __table_args__ = (
2755 UniqueConstraint('cache_key'),
2756 Index('key_idx', 'cache_key'),
2757 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2758 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2759 )
2760 CACHE_TYPE_ATOM = 'ATOM'
2761 CACHE_TYPE_RSS = 'RSS'
2762 CACHE_TYPE_README = 'README'
2763
2764 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2765 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2766 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2767 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2768
2769 def __init__(self, cache_key, cache_args=''):
2770 self.cache_key = cache_key
2771 self.cache_args = cache_args
2772 self.cache_active = False
2773
2774 def __unicode__(self):
2775 return u"<%s('%s:%s[%s]')>" % (
2776 self.__class__.__name__,
2777 self.cache_id, self.cache_key, self.cache_active)
2778
2779 def _cache_key_partition(self):
2780 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2781 return prefix, repo_name, suffix
2782
2783 def get_prefix(self):
2784 """
2785 Try to extract prefix from existing cache key. The key could consist
2786 of prefix, repo_name, suffix
2787 """
2788 # this returns prefix, repo_name, suffix
2789 return self._cache_key_partition()[0]
2790
2791 def get_suffix(self):
2792 """
2793 get suffix that might have been used in _get_cache_key to
2794 generate self.cache_key. Only used for informational purposes
2795 in repo_edit.html.
2796 """
2797 # prefix, repo_name, suffix
2798 return self._cache_key_partition()[2]
2799
2800 @classmethod
2801 def delete_all_cache(cls):
2802 """
2803 Delete all cache keys from database.
2804 Should only be run when all instances are down and all entries
2805 thus stale.
2806 """
2807 cls.query().delete()
2808 Session().commit()
2809
2810 @classmethod
2811 def get_cache_key(cls, repo_name, cache_type):
2812 """
2813
2814 Generate a cache key for this process of RhodeCode instance.
2815 Prefix most likely will be process id or maybe explicitly set
2816 instance_id from .ini file.
2817 """
2818 import rhodecode
2819 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2820
2821 repo_as_unicode = safe_unicode(repo_name)
2822 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2823 if cache_type else repo_as_unicode
2824
2825 return u'{}{}'.format(prefix, key)
2826
2827 @classmethod
2828 def set_invalidate(cls, repo_name, delete=False):
2829 """
2830 Mark all caches of a repo as invalid in the database.
2831 """
2832
2833 try:
2834 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2835 if delete:
2836 log.debug('cache objects deleted for repo %s',
2837 safe_str(repo_name))
2838 qry.delete()
2839 else:
2840 log.debug('cache objects marked as invalid for repo %s',
2841 safe_str(repo_name))
2842 qry.update({"cache_active": False})
2843
2844 Session().commit()
2845 except Exception:
2846 log.exception(
2847 'Cache key invalidation failed for repository %s',
2848 safe_str(repo_name))
2849 Session().rollback()
2850
2851 @classmethod
2852 def get_active_cache(cls, cache_key):
2853 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2854 if inv_obj:
2855 return inv_obj
2856 return None
2857
2858 @classmethod
2859 def repo_context_cache(cls, compute_func, repo_name, cache_type):
2860 """
2861 @cache_region('long_term')
2862 def _heavy_calculation(cache_key):
2863 return 'result'
2864
2865 cache_context = CacheKey.repo_context_cache(
2866 _heavy_calculation, repo_name, cache_type)
2867
2868 with cache_context as context:
2869 context.invalidate()
2870 computed = context.compute()
2871
2872 assert computed == 'result'
2873 """
2874 from rhodecode.lib import caches
2875 return caches.InvalidationContext(compute_func, repo_name, cache_type)
2876
2877
2878 class ChangesetComment(Base, BaseModel):
2879 __tablename__ = 'changeset_comments'
2880 __table_args__ = (
2881 Index('cc_revision_idx', 'revision'),
2882 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2883 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2884 )
2885
2886 COMMENT_OUTDATED = u'comment_outdated'
2887
2888 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2889 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2890 revision = Column('revision', String(40), nullable=True)
2891 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2892 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2893 line_no = Column('line_no', Unicode(10), nullable=True)
2894 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2895 f_path = Column('f_path', Unicode(1000), nullable=True)
2896 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2897 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2898 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2899 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2900 renderer = Column('renderer', Unicode(64), nullable=True)
2901 display_state = Column('display_state', Unicode(128), nullable=True)
2902
2903 author = relationship('User', lazy='joined')
2904 repo = relationship('Repository')
2905 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2906 pull_request = relationship('PullRequest', lazy='joined')
2907 pull_request_version = relationship('PullRequestVersion')
2908
2909 @classmethod
2910 def get_users(cls, revision=None, pull_request_id=None):
2911 """
2912 Returns user associated with this ChangesetComment. ie those
2913 who actually commented
2914
2915 :param cls:
2916 :param revision:
2917 """
2918 q = Session().query(User)\
2919 .join(ChangesetComment.author)
2920 if revision:
2921 q = q.filter(cls.revision == revision)
2922 elif pull_request_id:
2923 q = q.filter(cls.pull_request_id == pull_request_id)
2924 return q.all()
2925
2926 def render(self, mentions=False):
2927 from rhodecode.lib import helpers as h
2928 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2929
2930 def __repr__(self):
2931 if self.comment_id:
2932 return '<DB:ChangesetComment #%s>' % self.comment_id
2933 else:
2934 return '<DB:ChangesetComment at %#x>' % id(self)
2935
2936
2937 class ChangesetStatus(Base, BaseModel):
2938 __tablename__ = 'changeset_statuses'
2939 __table_args__ = (
2940 Index('cs_revision_idx', 'revision'),
2941 Index('cs_version_idx', 'version'),
2942 UniqueConstraint('repo_id', 'revision', 'version'),
2943 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2944 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2945 )
2946 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2947 STATUS_APPROVED = 'approved'
2948 STATUS_REJECTED = 'rejected'
2949 STATUS_UNDER_REVIEW = 'under_review'
2950
2951 STATUSES = [
2952 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2953 (STATUS_APPROVED, _("Approved")),
2954 (STATUS_REJECTED, _("Rejected")),
2955 (STATUS_UNDER_REVIEW, _("Under Review")),
2956 ]
2957
2958 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2959 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2961 revision = Column('revision', String(40), nullable=False)
2962 status = Column('status', String(128), nullable=False, default=DEFAULT)
2963 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2964 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2965 version = Column('version', Integer(), nullable=False, default=0)
2966 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2967
2968 author = relationship('User', lazy='joined')
2969 repo = relationship('Repository')
2970 comment = relationship('ChangesetComment', lazy='joined')
2971 pull_request = relationship('PullRequest', lazy='joined')
2972
2973 def __unicode__(self):
2974 return u"<%s('%s[%s]:%s')>" % (
2975 self.__class__.__name__,
2976 self.status, self.version, self.author
2977 )
2978
2979 @classmethod
2980 def get_status_lbl(cls, value):
2981 return dict(cls.STATUSES).get(value)
2982
2983 @property
2984 def status_lbl(self):
2985 return ChangesetStatus.get_status_lbl(self.status)
2986
2987
2988 class _PullRequestBase(BaseModel):
2989 """
2990 Common attributes of pull request and version entries.
2991 """
2992
2993 # .status values
2994 STATUS_NEW = u'new'
2995 STATUS_OPEN = u'open'
2996 STATUS_CLOSED = u'closed'
2997
2998 title = Column('title', Unicode(255), nullable=True)
2999 description = Column(
3000 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3001 nullable=True)
3002 # new/open/closed status of pull request (not approve/reject/etc)
3003 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3004 created_on = Column(
3005 'created_on', DateTime(timezone=False), nullable=False,
3006 default=datetime.datetime.now)
3007 updated_on = Column(
3008 'updated_on', DateTime(timezone=False), nullable=False,
3009 default=datetime.datetime.now)
3010
3011 @declared_attr
3012 def user_id(cls):
3013 return Column(
3014 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3015 unique=None)
3016
3017 # 500 revisions max
3018 _revisions = Column(
3019 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3020
3021 @declared_attr
3022 def source_repo_id(cls):
3023 # TODO: dan: rename column to source_repo_id
3024 return Column(
3025 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3026 nullable=False)
3027
3028 source_ref = Column('org_ref', Unicode(255), nullable=False)
3029
3030 @declared_attr
3031 def target_repo_id(cls):
3032 # TODO: dan: rename column to target_repo_id
3033 return Column(
3034 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3035 nullable=False)
3036
3037 target_ref = Column('other_ref', Unicode(255), nullable=False)
3038
3039 # TODO: dan: rename column to last_merge_source_rev
3040 _last_merge_source_rev = Column(
3041 'last_merge_org_rev', String(40), nullable=True)
3042 # TODO: dan: rename column to last_merge_target_rev
3043 _last_merge_target_rev = Column(
3044 'last_merge_other_rev', String(40), nullable=True)
3045 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3046 merge_rev = Column('merge_rev', String(40), nullable=True)
3047
3048 @hybrid_property
3049 def revisions(self):
3050 return self._revisions.split(':') if self._revisions else []
3051
3052 @revisions.setter
3053 def revisions(self, val):
3054 self._revisions = ':'.join(val)
3055
3056 @declared_attr
3057 def author(cls):
3058 return relationship('User', lazy='joined')
3059
3060 @declared_attr
3061 def source_repo(cls):
3062 return relationship(
3063 'Repository',
3064 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3065
3066 @property
3067 def source_ref_parts(self):
3068 refs = self.source_ref.split(':')
3069 return Reference(refs[0], refs[1], refs[2])
3070
3071 @declared_attr
3072 def target_repo(cls):
3073 return relationship(
3074 'Repository',
3075 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3076
3077 @property
3078 def target_ref_parts(self):
3079 refs = self.target_ref.split(':')
3080 return Reference(refs[0], refs[1], refs[2])
3081
3082
3083 class PullRequest(Base, _PullRequestBase):
3084 __tablename__ = 'pull_requests'
3085 __table_args__ = (
3086 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3087 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3088 )
3089
3090 pull_request_id = Column(
3091 'pull_request_id', Integer(), nullable=False, primary_key=True)
3092
3093 def __repr__(self):
3094 if self.pull_request_id:
3095 return '<DB:PullRequest #%s>' % self.pull_request_id
3096 else:
3097 return '<DB:PullRequest at %#x>' % id(self)
3098
3099 reviewers = relationship('PullRequestReviewers',
3100 cascade="all, delete, delete-orphan")
3101 statuses = relationship('ChangesetStatus')
3102 comments = relationship('ChangesetComment',
3103 cascade="all, delete, delete-orphan")
3104 versions = relationship('PullRequestVersion',
3105 cascade="all, delete, delete-orphan")
3106
3107 def is_closed(self):
3108 return self.status == self.STATUS_CLOSED
3109
3110 def get_api_data(self):
3111 from rhodecode.model.pull_request import PullRequestModel
3112 pull_request = self
3113 merge_status = PullRequestModel().merge_status(pull_request)
3114 data = {
3115 'pull_request_id': pull_request.pull_request_id,
3116 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name,
3117 pull_request_id=self.pull_request_id,
3118 qualified=True),
3119 'title': pull_request.title,
3120 'description': pull_request.description,
3121 'status': pull_request.status,
3122 'created_on': pull_request.created_on,
3123 'updated_on': pull_request.updated_on,
3124 'commit_ids': pull_request.revisions,
3125 'review_status': pull_request.calculated_review_status(),
3126 'mergeable': {
3127 'status': merge_status[0],
3128 'message': unicode(merge_status[1]),
3129 },
3130 'source': {
3131 'clone_url': pull_request.source_repo.clone_url(),
3132 'repository': pull_request.source_repo.repo_name,
3133 'reference': {
3134 'name': pull_request.source_ref_parts.name,
3135 'type': pull_request.source_ref_parts.type,
3136 'commit_id': pull_request.source_ref_parts.commit_id,
3137 },
3138 },
3139 'target': {
3140 'clone_url': pull_request.target_repo.clone_url(),
3141 'repository': pull_request.target_repo.repo_name,
3142 'reference': {
3143 'name': pull_request.target_ref_parts.name,
3144 'type': pull_request.target_ref_parts.type,
3145 'commit_id': pull_request.target_ref_parts.commit_id,
3146 },
3147 },
3148 'author': pull_request.author.get_api_data(include_secrets=False,
3149 details='basic'),
3150 'reviewers': [
3151 {
3152 'user': reviewer.get_api_data(include_secrets=False,
3153 details='basic'),
3154 'review_status': st[0][1].status if st else 'not_reviewed',
3155 }
3156 for reviewer, st in pull_request.reviewers_statuses()
3157 ]
3158 }
3159
3160 return data
3161
3162 def __json__(self):
3163 return {
3164 'revisions': self.revisions,
3165 }
3166
3167 def calculated_review_status(self):
3168 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3169 # because it's tricky on how to use ChangesetStatusModel from there
3170 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3171 from rhodecode.model.changeset_status import ChangesetStatusModel
3172 return ChangesetStatusModel().calculated_review_status(self)
3173
3174 def reviewers_statuses(self):
3175 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3176 from rhodecode.model.changeset_status import ChangesetStatusModel
3177 return ChangesetStatusModel().reviewers_statuses(self)
3178
3179
3180 class PullRequestVersion(Base, _PullRequestBase):
3181 __tablename__ = 'pull_request_versions'
3182 __table_args__ = (
3183 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3184 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3185 )
3186
3187 pull_request_version_id = Column(
3188 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3189 pull_request_id = Column(
3190 'pull_request_id', Integer(),
3191 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3192 pull_request = relationship('PullRequest')
3193
3194 def __repr__(self):
3195 if self.pull_request_version_id:
3196 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3197 else:
3198 return '<DB:PullRequestVersion at %#x>' % id(self)
3199
3200
3201 class PullRequestReviewers(Base, BaseModel):
3202 __tablename__ = 'pull_request_reviewers'
3203 __table_args__ = (
3204 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3205 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3206 )
3207
3208 def __init__(self, user=None, pull_request=None):
3209 self.user = user
3210 self.pull_request = pull_request
3211
3212 pull_requests_reviewers_id = Column(
3213 'pull_requests_reviewers_id', Integer(), nullable=False,
3214 primary_key=True)
3215 pull_request_id = Column(
3216 "pull_request_id", Integer(),
3217 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3218 user_id = Column(
3219 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3220
3221 user = relationship('User')
3222 pull_request = relationship('PullRequest')
3223
3224
3225 class Notification(Base, BaseModel):
3226 __tablename__ = 'notifications'
3227 __table_args__ = (
3228 Index('notification_type_idx', 'type'),
3229 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3230 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3231 )
3232
3233 TYPE_CHANGESET_COMMENT = u'cs_comment'
3234 TYPE_MESSAGE = u'message'
3235 TYPE_MENTION = u'mention'
3236 TYPE_REGISTRATION = u'registration'
3237 TYPE_PULL_REQUEST = u'pull_request'
3238 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3239
3240 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3241 subject = Column('subject', Unicode(512), nullable=True)
3242 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3243 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3244 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3245 type_ = Column('type', Unicode(255))
3246
3247 created_by_user = relationship('User')
3248 notifications_to_users = relationship('UserNotification', lazy='joined',
3249 cascade="all, delete, delete-orphan")
3250
3251 @property
3252 def recipients(self):
3253 return [x.user for x in UserNotification.query()\
3254 .filter(UserNotification.notification == self)\
3255 .order_by(UserNotification.user_id.asc()).all()]
3256
3257 @classmethod
3258 def create(cls, created_by, subject, body, recipients, type_=None):
3259 if type_ is None:
3260 type_ = Notification.TYPE_MESSAGE
3261
3262 notification = cls()
3263 notification.created_by_user = created_by
3264 notification.subject = subject
3265 notification.body = body
3266 notification.type_ = type_
3267 notification.created_on = datetime.datetime.now()
3268
3269 for u in recipients:
3270 assoc = UserNotification()
3271 assoc.notification = notification
3272
3273 # if created_by is inside recipients mark his notification
3274 # as read
3275 if u.user_id == created_by.user_id:
3276 assoc.read = True
3277
3278 u.notifications.append(assoc)
3279 Session().add(notification)
3280
3281 return notification
3282
3283 @property
3284 def description(self):
3285 from rhodecode.model.notification import NotificationModel
3286 return NotificationModel().make_description(self)
3287
3288
3289 class UserNotification(Base, BaseModel):
3290 __tablename__ = 'user_to_notification'
3291 __table_args__ = (
3292 UniqueConstraint('user_id', 'notification_id'),
3293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3295 )
3296 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3297 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3298 read = Column('read', Boolean, default=False)
3299 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3300
3301 user = relationship('User', lazy="joined")
3302 notification = relationship('Notification', lazy="joined",
3303 order_by=lambda: Notification.created_on.desc(),)
3304
3305 def mark_as_read(self):
3306 self.read = True
3307 Session().add(self)
3308
3309
3310 class Gist(Base, BaseModel):
3311 __tablename__ = 'gists'
3312 __table_args__ = (
3313 Index('g_gist_access_id_idx', 'gist_access_id'),
3314 Index('g_created_on_idx', 'created_on'),
3315 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3316 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3317 )
3318 GIST_PUBLIC = u'public'
3319 GIST_PRIVATE = u'private'
3320 DEFAULT_FILENAME = u'gistfile1.txt'
3321
3322 ACL_LEVEL_PUBLIC = u'acl_public'
3323 ACL_LEVEL_PRIVATE = u'acl_private'
3324
3325 gist_id = Column('gist_id', Integer(), primary_key=True)
3326 gist_access_id = Column('gist_access_id', Unicode(250))
3327 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3328 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3329 gist_expires = Column('gist_expires', Float(53), nullable=False)
3330 gist_type = Column('gist_type', Unicode(128), nullable=False)
3331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3332 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3333 acl_level = Column('acl_level', Unicode(128), nullable=True)
3334
3335 owner = relationship('User')
3336
3337 def __repr__(self):
3338 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3339
3340 @classmethod
3341 def get_or_404(cls, id_):
3342 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3343 if not res:
3344 raise HTTPNotFound
3345 return res
3346
3347 @classmethod
3348 def get_by_access_id(cls, gist_access_id):
3349 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3350
3351 def gist_url(self):
3352 import rhodecode
3353 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3354 if alias_url:
3355 return alias_url.replace('{gistid}', self.gist_access_id)
3356
3357 return url('gist', gist_id=self.gist_access_id, qualified=True)
3358
3359 @classmethod
3360 def base_path(cls):
3361 """
3362 Returns base path when all gists are stored
3363
3364 :param cls:
3365 """
3366 from rhodecode.model.gist import GIST_STORE_LOC
3367 q = Session().query(RhodeCodeUi)\
3368 .filter(RhodeCodeUi.ui_key == URL_SEP)
3369 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3370 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3371
3372 def get_api_data(self):
3373 """
3374 Common function for generating gist related data for API
3375 """
3376 gist = self
3377 data = {
3378 'gist_id': gist.gist_id,
3379 'type': gist.gist_type,
3380 'access_id': gist.gist_access_id,
3381 'description': gist.gist_description,
3382 'url': gist.gist_url(),
3383 'expires': gist.gist_expires,
3384 'created_on': gist.created_on,
3385 'modified_at': gist.modified_at,
3386 'content': None,
3387 'acl_level': gist.acl_level,
3388 }
3389 return data
3390
3391 def __json__(self):
3392 data = dict(
3393 )
3394 data.update(self.get_api_data())
3395 return data
3396 # SCM functions
3397
3398 def scm_instance(self, **kwargs):
3399 from rhodecode.lib.vcs import get_repo
3400 base_path = self.base_path()
3401 return get_repo(os.path.join(*map(safe_str,
3402 [base_path, self.gist_access_id])))
3403
3404
3405 class DbMigrateVersion(Base, BaseModel):
3406 __tablename__ = 'db_migrate_version'
3407 __table_args__ = (
3408 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3409 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3410 )
3411 repository_id = Column('repository_id', String(250), primary_key=True)
3412 repository_path = Column('repository_path', Text)
3413 version = Column('version', Integer)
3414
3415
3416 class ExternalIdentity(Base, BaseModel):
3417 __tablename__ = 'external_identities'
3418 __table_args__ = (
3419 Index('local_user_id_idx', 'local_user_id'),
3420 Index('external_id_idx', 'external_id'),
3421 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3422 'mysql_charset': 'utf8'})
3423
3424 external_id = Column('external_id', Unicode(255), default=u'',
3425 primary_key=True)
3426 external_username = Column('external_username', Unicode(1024), default=u'')
3427 local_user_id = Column('local_user_id', Integer(),
3428 ForeignKey('users.user_id'), primary_key=True)
3429 provider_name = Column('provider_name', Unicode(255), default=u'',
3430 primary_key=True)
3431 access_token = Column('access_token', String(1024), default=u'')
3432 alt_token = Column('alt_token', String(1024), default=u'')
3433 token_secret = Column('token_secret', String(1024), default=u'')
3434
3435 @classmethod
3436 def by_external_id_and_provider(cls, external_id, provider_name,
3437 local_user_id=None):
3438 """
3439 Returns ExternalIdentity instance based on search params
3440
3441 :param external_id:
3442 :param provider_name:
3443 :return: ExternalIdentity
3444 """
3445 query = cls.query()
3446 query = query.filter(cls.external_id == external_id)
3447 query = query.filter(cls.provider_name == provider_name)
3448 if local_user_id:
3449 query = query.filter(cls.local_user_id == local_user_id)
3450 return query.first()
3451
3452 @classmethod
3453 def user_by_external_id_and_provider(cls, external_id, provider_name):
3454 """
3455 Returns User instance based on search params
3456
3457 :param external_id:
3458 :param provider_name:
3459 :return: User
3460 """
3461 query = User.query()
3462 query = query.filter(cls.external_id == external_id)
3463 query = query.filter(cls.provider_name == provider_name)
3464 query = query.filter(User.user_id == cls.local_user_id)
3465 return query.first()
3466
3467 @classmethod
3468 def by_local_user_id(cls, local_user_id):
3469 """
3470 Returns all tokens for user
3471
3472 :param local_user_id:
3473 :return: ExternalIdentity
3474 """
3475 query = cls.query()
3476 query = query.filter(cls.local_user_id == local_user_id)
3477 return query
3478
3479
3480 class Integration(Base, BaseModel):
3481 __tablename__ = 'integrations'
3482 __table_args__ = (
3483 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3484 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3485 )
3486
3487 integration_id = Column('integration_id', Integer(), primary_key=True)
3488 integration_type = Column('integration_type', String(255))
3489 enabled = Column("enabled", Boolean(), nullable=False)
3490 name = Column('name', String(255), nullable=False)
3491 settings_json = Column('settings_json',
3492 UnicodeText().with_variant(UnicodeText(16384), 'mysql'))
3493 repo_id = Column(
3494 "repo_id", Integer(), ForeignKey('repositories.repo_id'),
3495 nullable=True, unique=None, default=None)
3496 repo = relationship('Repository', lazy='joined')
3497
3498 @hybrid_property
3499 def settings(self):
3500 data = json.loads(self.settings_json or '{}')
3501 return data
3502
3503 @settings.setter
3504 def settings(self, dct):
3505 self.settings_json = json.dumps(dct, indent=2)
3506
3507 def __repr__(self):
3508 if self.repo:
3509 scope = 'repo=%r' % self.repo
3510 else:
3511 scope = 'global'
3512
3513 return '<Integration(%r, %r)>' % (self.integration_type, scope)
3514
3515 def settings_as_dict(self):
3516 return json.loads(self.settings_json)
@@ -0,0 +1,27 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 import sqlalchemy as sa
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8
9 from rhodecode.lib.dbmigrate.versions import _reset_base
10
11 log = logging.getLogger(__name__)
12
13
14 def upgrade(migrate_engine):
15 """
16 Upgrade operations go here.
17 Don't create your own engine; bind migrate_engine to your metadata
18 """
19 _reset_base(migrate_engine)
20 from rhodecode.lib.dbmigrate.schema import db_4_3_0_0
21
22 integrations_table = db_4_3_0_0.Integration.__table__
23 integrations_table.create()
24
25
26 def downgrade(migrate_engine):
27 pass
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1654 +1,1641 b''
1 {
1 {
2 Babel = super.buildPythonPackage {
2 Babel = super.buildPythonPackage {
3 name = "Babel-1.3";
3 name = "Babel-1.3";
4 buildInputs = with self; [];
4 buildInputs = with self; [];
5 doCheck = false;
5 doCheck = false;
6 propagatedBuildInputs = with self; [pytz];
6 propagatedBuildInputs = with self; [pytz];
7 src = fetchurl {
7 src = fetchurl {
8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 };
10 };
11 meta = {
11 meta = {
12 license = [ pkgs.lib.licenses.bsdOriginal ];
12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 };
13 };
14 };
14 };
15 Beaker = super.buildPythonPackage {
15 Beaker = super.buildPythonPackage {
16 name = "Beaker-1.7.0";
16 name = "Beaker-1.7.0";
17 buildInputs = with self; [];
17 buildInputs = with self; [];
18 doCheck = false;
18 doCheck = false;
19 propagatedBuildInputs = with self; [];
19 propagatedBuildInputs = with self; [];
20 src = fetchurl {
20 src = fetchurl {
21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 md5 = "386be3f7fe427358881eee4622b428b3";
22 md5 = "386be3f7fe427358881eee4622b428b3";
23 };
23 };
24 meta = {
24 meta = {
25 license = [ pkgs.lib.licenses.bsdOriginal ];
25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 };
26 };
27 };
27 };
28 CProfileV = super.buildPythonPackage {
28 CProfileV = super.buildPythonPackage {
29 name = "CProfileV-1.0.6";
29 name = "CProfileV-1.0.6";
30 buildInputs = with self; [];
30 buildInputs = with self; [];
31 doCheck = false;
31 doCheck = false;
32 propagatedBuildInputs = with self; [bottle];
32 propagatedBuildInputs = with self; [bottle];
33 src = fetchurl {
33 src = fetchurl {
34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 };
36 };
37 meta = {
37 meta = {
38 license = [ pkgs.lib.licenses.mit ];
38 license = [ pkgs.lib.licenses.mit ];
39 };
39 };
40 };
40 };
41 Fabric = super.buildPythonPackage {
41 Fabric = super.buildPythonPackage {
42 name = "Fabric-1.10.0";
42 name = "Fabric-1.10.0";
43 buildInputs = with self; [];
43 buildInputs = with self; [];
44 doCheck = false;
44 doCheck = false;
45 propagatedBuildInputs = with self; [paramiko];
45 propagatedBuildInputs = with self; [paramiko];
46 src = fetchurl {
46 src = fetchurl {
47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 md5 = "2cb96473387f0e7aa035210892352f4a";
48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 };
49 };
50 meta = {
50 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
52 };
53 };
53 };
54 FormEncode = super.buildPythonPackage {
54 FormEncode = super.buildPythonPackage {
55 name = "FormEncode-1.2.4";
55 name = "FormEncode-1.2.4";
56 buildInputs = with self; [];
56 buildInputs = with self; [];
57 doCheck = false;
57 doCheck = false;
58 propagatedBuildInputs = with self; [];
58 propagatedBuildInputs = with self; [];
59 src = fetchurl {
59 src = fetchurl {
60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 };
62 };
63 meta = {
63 meta = {
64 license = [ pkgs.lib.licenses.psfl ];
64 license = [ pkgs.lib.licenses.psfl ];
65 };
65 };
66 };
66 };
67 Jinja2 = super.buildPythonPackage {
67 Jinja2 = super.buildPythonPackage {
68 name = "Jinja2-2.7.3";
68 name = "Jinja2-2.7.3";
69 buildInputs = with self; [];
69 buildInputs = with self; [];
70 doCheck = false;
70 doCheck = false;
71 propagatedBuildInputs = with self; [MarkupSafe];
71 propagatedBuildInputs = with self; [MarkupSafe];
72 src = fetchurl {
72 src = fetchurl {
73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 };
75 };
76 meta = {
76 meta = {
77 license = [ pkgs.lib.licenses.bsdOriginal ];
77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 };
78 };
79 };
79 };
80 Mako = super.buildPythonPackage {
80 Mako = super.buildPythonPackage {
81 name = "Mako-1.0.1";
81 name = "Mako-1.0.1";
82 buildInputs = with self; [];
82 buildInputs = with self; [];
83 doCheck = false;
83 doCheck = false;
84 propagatedBuildInputs = with self; [MarkupSafe];
84 propagatedBuildInputs = with self; [MarkupSafe];
85 src = fetchurl {
85 src = fetchurl {
86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 md5 = "9f0aafd177b039ef67b90ea350497a54";
87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 };
88 };
89 meta = {
89 meta = {
90 license = [ pkgs.lib.licenses.mit ];
90 license = [ pkgs.lib.licenses.mit ];
91 };
91 };
92 };
92 };
93 Markdown = super.buildPythonPackage {
93 Markdown = super.buildPythonPackage {
94 name = "Markdown-2.6.2";
94 name = "Markdown-2.6.2";
95 buildInputs = with self; [];
95 buildInputs = with self; [];
96 doCheck = false;
96 doCheck = false;
97 propagatedBuildInputs = with self; [];
97 propagatedBuildInputs = with self; [];
98 src = fetchurl {
98 src = fetchurl {
99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 };
101 };
102 meta = {
102 meta = {
103 license = [ pkgs.lib.licenses.bsdOriginal ];
103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 };
104 };
105 };
105 };
106 MarkupSafe = super.buildPythonPackage {
106 MarkupSafe = super.buildPythonPackage {
107 name = "MarkupSafe-0.23";
107 name = "MarkupSafe-0.23";
108 buildInputs = with self; [];
108 buildInputs = with self; [];
109 doCheck = false;
109 doCheck = false;
110 propagatedBuildInputs = with self; [];
110 propagatedBuildInputs = with self; [];
111 src = fetchurl {
111 src = fetchurl {
112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 };
114 };
115 meta = {
115 meta = {
116 license = [ pkgs.lib.licenses.bsdOriginal ];
116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 };
117 };
118 };
118 };
119 MySQL-python = super.buildPythonPackage {
119 MySQL-python = super.buildPythonPackage {
120 name = "MySQL-python-1.2.5";
120 name = "MySQL-python-1.2.5";
121 buildInputs = with self; [];
121 buildInputs = with self; [];
122 doCheck = false;
122 doCheck = false;
123 propagatedBuildInputs = with self; [];
123 propagatedBuildInputs = with self; [];
124 src = fetchurl {
124 src = fetchurl {
125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 md5 = "654f75b302db6ed8dc5a898c625e030c";
126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 };
127 };
128 meta = {
128 meta = {
129 license = [ pkgs.lib.licenses.gpl1 ];
129 license = [ pkgs.lib.licenses.gpl1 ];
130 };
130 };
131 };
131 };
132 Paste = super.buildPythonPackage {
132 Paste = super.buildPythonPackage {
133 name = "Paste-2.0.2";
133 name = "Paste-2.0.2";
134 buildInputs = with self; [];
134 buildInputs = with self; [];
135 doCheck = false;
135 doCheck = false;
136 propagatedBuildInputs = with self; [six];
136 propagatedBuildInputs = with self; [six];
137 src = fetchurl {
137 src = fetchurl {
138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 };
140 };
141 meta = {
141 meta = {
142 license = [ pkgs.lib.licenses.mit ];
142 license = [ pkgs.lib.licenses.mit ];
143 };
143 };
144 };
144 };
145 PasteDeploy = super.buildPythonPackage {
145 PasteDeploy = super.buildPythonPackage {
146 name = "PasteDeploy-1.5.2";
146 name = "PasteDeploy-1.5.2";
147 buildInputs = with self; [];
147 buildInputs = with self; [];
148 doCheck = false;
148 doCheck = false;
149 propagatedBuildInputs = with self; [];
149 propagatedBuildInputs = with self; [];
150 src = fetchurl {
150 src = fetchurl {
151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 md5 = "352b7205c78c8de4987578d19431af3b";
152 md5 = "352b7205c78c8de4987578d19431af3b";
153 };
153 };
154 meta = {
154 meta = {
155 license = [ pkgs.lib.licenses.mit ];
155 license = [ pkgs.lib.licenses.mit ];
156 };
156 };
157 };
157 };
158 PasteScript = super.buildPythonPackage {
158 PasteScript = super.buildPythonPackage {
159 name = "PasteScript-1.7.5";
159 name = "PasteScript-1.7.5";
160 buildInputs = with self; [];
160 buildInputs = with self; [];
161 doCheck = false;
161 doCheck = false;
162 propagatedBuildInputs = with self; [Paste PasteDeploy];
162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 src = fetchurl {
163 src = fetchurl {
164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 };
166 };
167 meta = {
167 meta = {
168 license = [ pkgs.lib.licenses.mit ];
168 license = [ pkgs.lib.licenses.mit ];
169 };
169 };
170 };
170 };
171 Pygments = super.buildPythonPackage {
171 Pygments = super.buildPythonPackage {
172 name = "Pygments-2.1.3";
172 name = "Pygments-2.1.3";
173 buildInputs = with self; [];
173 buildInputs = with self; [];
174 doCheck = false;
174 doCheck = false;
175 propagatedBuildInputs = with self; [];
175 propagatedBuildInputs = with self; [];
176 src = fetchurl {
176 src = fetchurl {
177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 };
179 };
180 meta = {
180 meta = {
181 license = [ pkgs.lib.licenses.bsdOriginal ];
181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 };
182 };
183 };
183 };
184 Pylons = super.buildPythonPackage {
184 Pylons = super.buildPythonPackage {
185 name = "Pylons-1.0.1";
185 name = "Pylons-1.0.1";
186 buildInputs = with self; [];
186 buildInputs = with self; [];
187 doCheck = false;
187 doCheck = false;
188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 src = fetchurl {
189 src = fetchurl {
190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 md5 = "6cb880d75fa81213192142b07a6e4915";
191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 };
192 };
193 meta = {
193 meta = {
194 license = [ pkgs.lib.licenses.bsdOriginal ];
194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 };
195 };
196 };
196 };
197 Pyro4 = super.buildPythonPackage {
197 Pyro4 = super.buildPythonPackage {
198 name = "Pyro4-4.41";
198 name = "Pyro4-4.41";
199 buildInputs = with self; [];
199 buildInputs = with self; [];
200 doCheck = false;
200 doCheck = false;
201 propagatedBuildInputs = with self; [serpent];
201 propagatedBuildInputs = with self; [serpent];
202 src = fetchurl {
202 src = fetchurl {
203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 };
205 };
206 meta = {
206 meta = {
207 license = [ pkgs.lib.licenses.mit ];
207 license = [ pkgs.lib.licenses.mit ];
208 };
208 };
209 };
209 };
210 Routes = super.buildPythonPackage {
210 Routes = super.buildPythonPackage {
211 name = "Routes-1.13";
211 name = "Routes-1.13";
212 buildInputs = with self; [];
212 buildInputs = with self; [];
213 doCheck = false;
213 doCheck = false;
214 propagatedBuildInputs = with self; [repoze.lru];
214 propagatedBuildInputs = with self; [repoze.lru];
215 src = fetchurl {
215 src = fetchurl {
216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 md5 = "d527b0ab7dd9172b1275a41f97448783";
217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 };
218 };
219 meta = {
219 meta = {
220 license = [ pkgs.lib.licenses.bsdOriginal ];
220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 };
221 };
222 };
222 };
223 SQLAlchemy = super.buildPythonPackage {
223 SQLAlchemy = super.buildPythonPackage {
224 name = "SQLAlchemy-0.9.9";
224 name = "SQLAlchemy-0.9.9";
225 buildInputs = with self; [];
225 buildInputs = with self; [];
226 doCheck = false;
226 doCheck = false;
227 propagatedBuildInputs = with self; [];
227 propagatedBuildInputs = with self; [];
228 src = fetchurl {
228 src = fetchurl {
229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 };
231 };
232 meta = {
232 meta = {
233 license = [ pkgs.lib.licenses.mit ];
233 license = [ pkgs.lib.licenses.mit ];
234 };
234 };
235 };
235 };
236 Sphinx = super.buildPythonPackage {
236 Sphinx = super.buildPythonPackage {
237 name = "Sphinx-1.2.2";
237 name = "Sphinx-1.2.2";
238 buildInputs = with self; [];
238 buildInputs = with self; [];
239 doCheck = false;
239 doCheck = false;
240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 src = fetchurl {
241 src = fetchurl {
242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 };
244 };
245 meta = {
245 meta = {
246 license = [ pkgs.lib.licenses.bsdOriginal ];
246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 };
247 };
248 };
248 };
249 Tempita = super.buildPythonPackage {
249 Tempita = super.buildPythonPackage {
250 name = "Tempita-0.5.2";
250 name = "Tempita-0.5.2";
251 buildInputs = with self; [];
251 buildInputs = with self; [];
252 doCheck = false;
252 doCheck = false;
253 propagatedBuildInputs = with self; [];
253 propagatedBuildInputs = with self; [];
254 src = fetchurl {
254 src = fetchurl {
255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 };
257 };
258 meta = {
258 meta = {
259 license = [ pkgs.lib.licenses.mit ];
259 license = [ pkgs.lib.licenses.mit ];
260 };
260 };
261 };
261 };
262 URLObject = super.buildPythonPackage {
262 URLObject = super.buildPythonPackage {
263 name = "URLObject-2.4.0";
263 name = "URLObject-2.4.0";
264 buildInputs = with self; [];
264 buildInputs = with self; [];
265 doCheck = false;
265 doCheck = false;
266 propagatedBuildInputs = with self; [];
266 propagatedBuildInputs = with self; [];
267 src = fetchurl {
267 src = fetchurl {
268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 };
270 };
271 meta = {
271 meta = {
272 license = [ ];
272 license = [ ];
273 };
273 };
274 };
274 };
275 WebError = super.buildPythonPackage {
275 WebError = super.buildPythonPackage {
276 name = "WebError-0.10.3";
276 name = "WebError-0.10.3";
277 buildInputs = with self; [];
277 buildInputs = with self; [];
278 doCheck = false;
278 doCheck = false;
279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 src = fetchurl {
280 src = fetchurl {
281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 };
283 };
284 meta = {
284 meta = {
285 license = [ pkgs.lib.licenses.mit ];
285 license = [ pkgs.lib.licenses.mit ];
286 };
286 };
287 };
287 };
288 WebHelpers = super.buildPythonPackage {
288 WebHelpers = super.buildPythonPackage {
289 name = "WebHelpers-1.3";
289 name = "WebHelpers-1.3";
290 buildInputs = with self; [];
290 buildInputs = with self; [];
291 doCheck = false;
291 doCheck = false;
292 propagatedBuildInputs = with self; [MarkupSafe];
292 propagatedBuildInputs = with self; [MarkupSafe];
293 src = fetchurl {
293 src = fetchurl {
294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 md5 = "32749ffadfc40fea51075a7def32588b";
295 md5 = "32749ffadfc40fea51075a7def32588b";
296 };
296 };
297 meta = {
297 meta = {
298 license = [ pkgs.lib.licenses.bsdOriginal ];
298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 };
299 };
300 };
300 };
301 WebHelpers2 = super.buildPythonPackage {
301 WebHelpers2 = super.buildPythonPackage {
302 name = "WebHelpers2-2.0";
302 name = "WebHelpers2-2.0";
303 buildInputs = with self; [];
303 buildInputs = with self; [];
304 doCheck = false;
304 doCheck = false;
305 propagatedBuildInputs = with self; [MarkupSafe six];
305 propagatedBuildInputs = with self; [MarkupSafe six];
306 src = fetchurl {
306 src = fetchurl {
307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 };
309 };
310 meta = {
310 meta = {
311 license = [ pkgs.lib.licenses.mit ];
311 license = [ pkgs.lib.licenses.mit ];
312 };
312 };
313 };
313 };
314 WebOb = super.buildPythonPackage {
314 WebOb = super.buildPythonPackage {
315 name = "WebOb-1.3.1";
315 name = "WebOb-1.3.1";
316 buildInputs = with self; [];
316 buildInputs = with self; [];
317 doCheck = false;
317 doCheck = false;
318 propagatedBuildInputs = with self; [];
318 propagatedBuildInputs = with self; [];
319 src = fetchurl {
319 src = fetchurl {
320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 md5 = "20918251c5726956ba8fef22d1556177";
321 md5 = "20918251c5726956ba8fef22d1556177";
322 };
322 };
323 meta = {
323 meta = {
324 license = [ pkgs.lib.licenses.mit ];
324 license = [ pkgs.lib.licenses.mit ];
325 };
325 };
326 };
326 };
327 WebTest = super.buildPythonPackage {
327 WebTest = super.buildPythonPackage {
328 name = "WebTest-1.4.3";
328 name = "WebTest-1.4.3";
329 buildInputs = with self; [];
329 buildInputs = with self; [];
330 doCheck = false;
330 doCheck = false;
331 propagatedBuildInputs = with self; [WebOb];
331 propagatedBuildInputs = with self; [WebOb];
332 src = fetchurl {
332 src = fetchurl {
333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 md5 = "631ce728bed92c681a4020a36adbc353";
334 md5 = "631ce728bed92c681a4020a36adbc353";
335 };
335 };
336 meta = {
336 meta = {
337 license = [ pkgs.lib.licenses.mit ];
337 license = [ pkgs.lib.licenses.mit ];
338 };
338 };
339 };
339 };
340 Whoosh = super.buildPythonPackage {
340 Whoosh = super.buildPythonPackage {
341 name = "Whoosh-2.7.0";
341 name = "Whoosh-2.7.0";
342 buildInputs = with self; [];
342 buildInputs = with self; [];
343 doCheck = false;
343 doCheck = false;
344 propagatedBuildInputs = with self; [];
344 propagatedBuildInputs = with self; [];
345 src = fetchurl {
345 src = fetchurl {
346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 };
348 };
349 meta = {
349 meta = {
350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 };
351 };
352 };
352 };
353 alembic = super.buildPythonPackage {
353 alembic = super.buildPythonPackage {
354 name = "alembic-0.8.4";
354 name = "alembic-0.8.4";
355 buildInputs = with self; [];
355 buildInputs = with self; [];
356 doCheck = false;
356 doCheck = false;
357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 src = fetchurl {
358 src = fetchurl {
359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 };
361 };
362 meta = {
362 meta = {
363 license = [ pkgs.lib.licenses.mit ];
363 license = [ pkgs.lib.licenses.mit ];
364 };
364 };
365 };
365 };
366 amqplib = super.buildPythonPackage {
366 amqplib = super.buildPythonPackage {
367 name = "amqplib-1.0.2";
367 name = "amqplib-1.0.2";
368 buildInputs = with self; [];
368 buildInputs = with self; [];
369 doCheck = false;
369 doCheck = false;
370 propagatedBuildInputs = with self; [];
370 propagatedBuildInputs = with self; [];
371 src = fetchurl {
371 src = fetchurl {
372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 };
374 };
375 meta = {
375 meta = {
376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 };
377 };
378 };
378 };
379 anyjson = super.buildPythonPackage {
379 anyjson = super.buildPythonPackage {
380 name = "anyjson-0.3.3";
380 name = "anyjson-0.3.3";
381 buildInputs = with self; [];
381 buildInputs = with self; [];
382 doCheck = false;
382 doCheck = false;
383 propagatedBuildInputs = with self; [];
383 propagatedBuildInputs = with self; [];
384 src = fetchurl {
384 src = fetchurl {
385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 };
387 };
388 meta = {
388 meta = {
389 license = [ pkgs.lib.licenses.bsdOriginal ];
389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 };
390 };
391 };
391 };
392 appenlight-client = super.buildPythonPackage {
392 appenlight-client = super.buildPythonPackage {
393 name = "appenlight-client-0.6.14";
393 name = "appenlight-client-0.6.14";
394 buildInputs = with self; [];
394 buildInputs = with self; [];
395 doCheck = false;
395 doCheck = false;
396 propagatedBuildInputs = with self; [WebOb requests];
396 propagatedBuildInputs = with self; [WebOb requests];
397 src = fetchurl {
397 src = fetchurl {
398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 md5 = "578c69b09f4356d898fff1199b98a95c";
399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 };
400 };
401 meta = {
401 meta = {
402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 };
403 };
404 };
404 };
405 authomatic = super.buildPythonPackage {
405 authomatic = super.buildPythonPackage {
406 name = "authomatic-0.1.0.post1";
406 name = "authomatic-0.1.0.post1";
407 buildInputs = with self; [];
407 buildInputs = with self; [];
408 doCheck = false;
408 doCheck = false;
409 propagatedBuildInputs = with self; [];
409 propagatedBuildInputs = with self; [];
410 src = fetchurl {
410 src = fetchurl {
411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 };
413 };
414 meta = {
414 meta = {
415 license = [ pkgs.lib.licenses.mit ];
415 license = [ pkgs.lib.licenses.mit ];
416 };
416 };
417 };
417 };
418 backport-ipaddress = super.buildPythonPackage {
418 backport-ipaddress = super.buildPythonPackage {
419 name = "backport-ipaddress-0.1";
419 name = "backport-ipaddress-0.1";
420 buildInputs = with self; [];
420 buildInputs = with self; [];
421 doCheck = false;
421 doCheck = false;
422 propagatedBuildInputs = with self; [];
422 propagatedBuildInputs = with self; [];
423 src = fetchurl {
423 src = fetchurl {
424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 md5 = "9c1f45f4361f71b124d7293a60006c05";
425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 };
426 };
427 meta = {
427 meta = {
428 license = [ pkgs.lib.licenses.psfl ];
428 license = [ pkgs.lib.licenses.psfl ];
429 };
429 };
430 };
430 };
431 bottle = super.buildPythonPackage {
431 bottle = super.buildPythonPackage {
432 name = "bottle-0.12.8";
432 name = "bottle-0.12.8";
433 buildInputs = with self; [];
433 buildInputs = with self; [];
434 doCheck = false;
434 doCheck = false;
435 propagatedBuildInputs = with self; [];
435 propagatedBuildInputs = with self; [];
436 src = fetchurl {
436 src = fetchurl {
437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 };
439 };
440 meta = {
440 meta = {
441 license = [ pkgs.lib.licenses.mit ];
441 license = [ pkgs.lib.licenses.mit ];
442 };
442 };
443 };
443 };
444 bumpversion = super.buildPythonPackage {
444 bumpversion = super.buildPythonPackage {
445 name = "bumpversion-0.5.3";
445 name = "bumpversion-0.5.3";
446 buildInputs = with self; [];
446 buildInputs = with self; [];
447 doCheck = false;
447 doCheck = false;
448 propagatedBuildInputs = with self; [];
448 propagatedBuildInputs = with self; [];
449 src = fetchurl {
449 src = fetchurl {
450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 };
452 };
453 meta = {
453 meta = {
454 license = [ pkgs.lib.licenses.mit ];
454 license = [ pkgs.lib.licenses.mit ];
455 };
455 };
456 };
456 };
457 celery = super.buildPythonPackage {
457 celery = super.buildPythonPackage {
458 name = "celery-2.2.10";
458 name = "celery-2.2.10";
459 buildInputs = with self; [];
459 buildInputs = with self; [];
460 doCheck = false;
460 doCheck = false;
461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 src = fetchurl {
462 src = fetchurl {
463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 md5 = "898bc87e54f278055b561316ba73e222";
464 md5 = "898bc87e54f278055b561316ba73e222";
465 };
465 };
466 meta = {
466 meta = {
467 license = [ pkgs.lib.licenses.bsdOriginal ];
467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 };
468 };
469 };
469 };
470 click = super.buildPythonPackage {
470 click = super.buildPythonPackage {
471 name = "click-5.1";
471 name = "click-5.1";
472 buildInputs = with self; [];
472 buildInputs = with self; [];
473 doCheck = false;
473 doCheck = false;
474 propagatedBuildInputs = with self; [];
474 propagatedBuildInputs = with self; [];
475 src = fetchurl {
475 src = fetchurl {
476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 md5 = "9c5323008cccfe232a8b161fc8196d41";
477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 };
478 };
479 meta = {
479 meta = {
480 license = [ pkgs.lib.licenses.bsdOriginal ];
480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 };
481 };
482 };
482 };
483 colander = super.buildPythonPackage {
483 colander = super.buildPythonPackage {
484 name = "colander-1.2";
484 name = "colander-1.2";
485 buildInputs = with self; [];
485 buildInputs = with self; [];
486 doCheck = false;
486 doCheck = false;
487 propagatedBuildInputs = with self; [translationstring iso8601];
487 propagatedBuildInputs = with self; [translationstring iso8601];
488 src = fetchurl {
488 src = fetchurl {
489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 md5 = "83db21b07936a0726e588dae1914b9ed";
490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 };
491 };
492 meta = {
492 meta = {
493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 };
494 };
495 };
495 };
496 configobj = super.buildPythonPackage {
496 configobj = super.buildPythonPackage {
497 name = "configobj-5.0.6";
497 name = "configobj-5.0.6";
498 buildInputs = with self; [];
498 buildInputs = with self; [];
499 doCheck = false;
499 doCheck = false;
500 propagatedBuildInputs = with self; [six];
500 propagatedBuildInputs = with self; [six];
501 src = fetchurl {
501 src = fetchurl {
502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 };
504 };
505 meta = {
505 meta = {
506 license = [ pkgs.lib.licenses.bsdOriginal ];
506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 };
507 };
508 };
508 };
509 cov-core = super.buildPythonPackage {
509 cov-core = super.buildPythonPackage {
510 name = "cov-core-1.15.0";
510 name = "cov-core-1.15.0";
511 buildInputs = with self; [];
511 buildInputs = with self; [];
512 doCheck = false;
512 doCheck = false;
513 propagatedBuildInputs = with self; [coverage];
513 propagatedBuildInputs = with self; [coverage];
514 src = fetchurl {
514 src = fetchurl {
515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 };
517 };
518 meta = {
518 meta = {
519 license = [ pkgs.lib.licenses.mit ];
519 license = [ pkgs.lib.licenses.mit ];
520 };
520 };
521 };
521 };
522 coverage = super.buildPythonPackage {
522 coverage = super.buildPythonPackage {
523 name = "coverage-3.7.1";
523 name = "coverage-3.7.1";
524 buildInputs = with self; [];
524 buildInputs = with self; [];
525 doCheck = false;
525 doCheck = false;
526 propagatedBuildInputs = with self; [];
526 propagatedBuildInputs = with self; [];
527 src = fetchurl {
527 src = fetchurl {
528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 };
530 };
531 meta = {
531 meta = {
532 license = [ pkgs.lib.licenses.bsdOriginal ];
532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 };
533 };
534 };
534 };
535 cssselect = super.buildPythonPackage {
535 cssselect = super.buildPythonPackage {
536 name = "cssselect-0.9.1";
536 name = "cssselect-0.9.1";
537 buildInputs = with self; [];
537 buildInputs = with self; [];
538 doCheck = false;
538 doCheck = false;
539 propagatedBuildInputs = with self; [];
539 propagatedBuildInputs = with self; [];
540 src = fetchurl {
540 src = fetchurl {
541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 };
543 };
544 meta = {
544 meta = {
545 license = [ pkgs.lib.licenses.bsdOriginal ];
545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 };
546 };
547 };
547 };
548 decorator = super.buildPythonPackage {
548 decorator = super.buildPythonPackage {
549 name = "decorator-3.4.2";
549 name = "decorator-3.4.2";
550 buildInputs = with self; [];
550 buildInputs = with self; [];
551 doCheck = false;
551 doCheck = false;
552 propagatedBuildInputs = with self; [];
552 propagatedBuildInputs = with self; [];
553 src = fetchurl {
553 src = fetchurl {
554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 };
556 };
557 meta = {
557 meta = {
558 license = [ pkgs.lib.licenses.bsdOriginal ];
558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 };
559 };
560 };
560 };
561 docutils = super.buildPythonPackage {
561 docutils = super.buildPythonPackage {
562 name = "docutils-0.12";
562 name = "docutils-0.12";
563 buildInputs = with self; [];
563 buildInputs = with self; [];
564 doCheck = false;
564 doCheck = false;
565 propagatedBuildInputs = with self; [];
565 propagatedBuildInputs = with self; [];
566 src = fetchurl {
566 src = fetchurl {
567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 md5 = "4622263b62c5c771c03502afa3157768";
568 md5 = "4622263b62c5c771c03502afa3157768";
569 };
569 };
570 meta = {
570 meta = {
571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 };
572 };
573 };
573 };
574 dogpile.cache = super.buildPythonPackage {
574 dogpile.cache = super.buildPythonPackage {
575 name = "dogpile.cache-0.6.1";
575 name = "dogpile.cache-0.6.1";
576 buildInputs = with self; [];
576 buildInputs = with self; [];
577 doCheck = false;
577 doCheck = false;
578 propagatedBuildInputs = with self; [dogpile.core];
578 propagatedBuildInputs = with self; [dogpile.core];
579 src = fetchurl {
579 src = fetchurl {
580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 };
582 };
583 meta = {
583 meta = {
584 license = [ pkgs.lib.licenses.bsdOriginal ];
584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 };
585 };
586 };
586 };
587 dogpile.core = super.buildPythonPackage {
587 dogpile.core = super.buildPythonPackage {
588 name = "dogpile.core-0.4.1";
588 name = "dogpile.core-0.4.1";
589 buildInputs = with self; [];
589 buildInputs = with self; [];
590 doCheck = false;
590 doCheck = false;
591 propagatedBuildInputs = with self; [];
591 propagatedBuildInputs = with self; [];
592 src = fetchurl {
592 src = fetchurl {
593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 md5 = "01cb19f52bba3e95c9b560f39341f045";
594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 };
595 };
596 meta = {
596 meta = {
597 license = [ pkgs.lib.licenses.bsdOriginal ];
597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 };
598 };
599 };
599 };
600 dulwich = super.buildPythonPackage {
600 dulwich = super.buildPythonPackage {
601 name = "dulwich-0.12.0";
601 name = "dulwich-0.12.0";
602 buildInputs = with self; [];
602 buildInputs = with self; [];
603 doCheck = false;
603 doCheck = false;
604 propagatedBuildInputs = with self; [];
604 propagatedBuildInputs = with self; [];
605 src = fetchurl {
605 src = fetchurl {
606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 };
608 };
609 meta = {
609 meta = {
610 license = [ pkgs.lib.licenses.gpl2Plus ];
610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 };
611 };
612 };
612 };
613 ecdsa = super.buildPythonPackage {
613 ecdsa = super.buildPythonPackage {
614 name = "ecdsa-0.11";
614 name = "ecdsa-0.11";
615 buildInputs = with self; [];
615 buildInputs = with self; [];
616 doCheck = false;
616 doCheck = false;
617 propagatedBuildInputs = with self; [];
617 propagatedBuildInputs = with self; [];
618 src = fetchurl {
618 src = fetchurl {
619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 };
621 };
622 meta = {
622 meta = {
623 license = [ pkgs.lib.licenses.mit ];
623 license = [ pkgs.lib.licenses.mit ];
624 };
624 };
625 };
625 };
626 elasticsearch = super.buildPythonPackage {
626 elasticsearch = super.buildPythonPackage {
627 name = "elasticsearch-2.3.0";
627 name = "elasticsearch-2.3.0";
628 buildInputs = with self; [];
628 buildInputs = with self; [];
629 doCheck = false;
629 doCheck = false;
630 propagatedBuildInputs = with self; [urllib3];
630 propagatedBuildInputs = with self; [urllib3];
631 src = fetchurl {
631 src = fetchurl {
632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 md5 = "2550f3b51629cf1ef9636608af92c340";
633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 };
634 };
635 meta = {
635 meta = {
636 license = [ pkgs.lib.licenses.asl20 ];
636 license = [ pkgs.lib.licenses.asl20 ];
637 };
637 };
638 };
638 };
639 elasticsearch-dsl = super.buildPythonPackage {
639 elasticsearch-dsl = super.buildPythonPackage {
640 name = "elasticsearch-dsl-2.0.0";
640 name = "elasticsearch-dsl-2.0.0";
641 buildInputs = with self; [];
641 buildInputs = with self; [];
642 doCheck = false;
642 doCheck = false;
643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 src = fetchurl {
644 src = fetchurl {
645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 };
647 };
648 meta = {
648 meta = {
649 license = [ pkgs.lib.licenses.asl20 ];
649 license = [ pkgs.lib.licenses.asl20 ];
650 };
650 };
651 };
651 };
652 flake8 = super.buildPythonPackage {
652 flake8 = super.buildPythonPackage {
653 name = "flake8-2.4.1";
653 name = "flake8-2.4.1";
654 buildInputs = with self; [];
654 buildInputs = with self; [];
655 doCheck = false;
655 doCheck = false;
656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 src = fetchurl {
657 src = fetchurl {
658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 };
660 };
661 meta = {
661 meta = {
662 license = [ pkgs.lib.licenses.mit ];
662 license = [ pkgs.lib.licenses.mit ];
663 };
663 };
664 };
664 };
665 future = super.buildPythonPackage {
665 future = super.buildPythonPackage {
666 name = "future-0.14.3";
666 name = "future-0.14.3";
667 buildInputs = with self; [];
667 buildInputs = with self; [];
668 doCheck = false;
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 };
673 };
674 meta = {
674 meta = {
675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 };
676 };
677 };
677 };
678 futures = super.buildPythonPackage {
678 futures = super.buildPythonPackage {
679 name = "futures-3.0.2";
679 name = "futures-3.0.2";
680 buildInputs = with self; [];
680 buildInputs = with self; [];
681 doCheck = false;
681 doCheck = false;
682 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
683 src = fetchurl {
684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 };
686 };
687 meta = {
687 meta = {
688 license = [ pkgs.lib.licenses.bsdOriginal ];
688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 };
689 };
690 };
690 };
691 gnureadline = super.buildPythonPackage {
691 gnureadline = super.buildPythonPackage {
692 name = "gnureadline-6.3.3";
692 name = "gnureadline-6.3.3";
693 buildInputs = with self; [];
693 buildInputs = with self; [];
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = with self; [];
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
696 src = fetchurl {
697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 };
699 };
700 meta = {
700 meta = {
701 license = [ pkgs.lib.licenses.gpl1 ];
701 license = [ pkgs.lib.licenses.gpl1 ];
702 };
702 };
703 };
703 };
704 gprof2dot = super.buildPythonPackage {
704 gprof2dot = super.buildPythonPackage {
705 name = "gprof2dot-2015.12.1";
705 name = "gprof2dot-2015.12.1";
706 buildInputs = with self; [];
706 buildInputs = with self; [];
707 doCheck = false;
707 doCheck = false;
708 propagatedBuildInputs = with self; [];
708 propagatedBuildInputs = with self; [];
709 src = fetchurl {
709 src = fetchurl {
710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 md5 = "e23bf4e2f94db032750c193384b4165b";
711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 };
712 };
713 meta = {
713 meta = {
714 license = [ { fullName = "LGPL"; } ];
714 license = [ { fullName = "LGPL"; } ];
715 };
715 };
716 };
716 };
717 gunicorn = super.buildPythonPackage {
717 gunicorn = super.buildPythonPackage {
718 name = "gunicorn-19.6.0";
718 name = "gunicorn-19.6.0";
719 buildInputs = with self; [];
719 buildInputs = with self; [];
720 doCheck = false;
720 doCheck = false;
721 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 md5 = "338e5e8a83ea0f0625f768dba4597530";
724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
728 };
728 };
729 };
729 };
730 infrae.cache = super.buildPythonPackage {
730 infrae.cache = super.buildPythonPackage {
731 name = "infrae.cache-1.0.1";
731 name = "infrae.cache-1.0.1";
732 buildInputs = with self; [];
732 buildInputs = with self; [];
733 doCheck = false;
733 doCheck = false;
734 propagatedBuildInputs = with self; [Beaker repoze.lru];
734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 src = fetchurl {
735 src = fetchurl {
736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 md5 = "b09076a766747e6ed2a755cc62088e32";
737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 };
738 };
739 meta = {
739 meta = {
740 license = [ pkgs.lib.licenses.zpt21 ];
740 license = [ pkgs.lib.licenses.zpt21 ];
741 };
741 };
742 };
742 };
743 invoke = super.buildPythonPackage {
743 invoke = super.buildPythonPackage {
744 name = "invoke-0.13.0";
744 name = "invoke-0.13.0";
745 buildInputs = with self; [];
745 buildInputs = with self; [];
746 doCheck = false;
746 doCheck = false;
747 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
748 src = fetchurl {
749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 };
751 };
752 meta = {
752 meta = {
753 license = [ pkgs.lib.licenses.bsdOriginal ];
753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 };
754 };
755 };
755 };
756 ipdb = super.buildPythonPackage {
756 ipdb = super.buildPythonPackage {
757 name = "ipdb-0.8";
757 name = "ipdb-0.8";
758 buildInputs = with self; [];
758 buildInputs = with self; [];
759 doCheck = false;
759 doCheck = false;
760 propagatedBuildInputs = with self; [ipython];
760 propagatedBuildInputs = with self; [ipython];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.gpl1 ];
766 license = [ pkgs.lib.licenses.gpl1 ];
767 };
767 };
768 };
768 };
769 ipython = super.buildPythonPackage {
769 ipython = super.buildPythonPackage {
770 name = "ipython-3.1.0";
770 name = "ipython-3.1.0";
771 buildInputs = with self; [];
771 buildInputs = with self; [];
772 doCheck = false;
772 doCheck = false;
773 propagatedBuildInputs = with self; [];
773 propagatedBuildInputs = with self; [];
774 src = fetchurl {
774 src = fetchurl {
775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 };
777 };
778 meta = {
778 meta = {
779 license = [ pkgs.lib.licenses.bsdOriginal ];
779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 };
780 };
781 };
781 };
782 iso8601 = super.buildPythonPackage {
782 iso8601 = super.buildPythonPackage {
783 name = "iso8601-0.1.11";
783 name = "iso8601-0.1.11";
784 buildInputs = with self; [];
784 buildInputs = with self; [];
785 doCheck = false;
785 doCheck = false;
786 propagatedBuildInputs = with self; [];
786 propagatedBuildInputs = with self; [];
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 md5 = "b06d11cd14a64096f907086044f0fe38";
789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ pkgs.lib.licenses.mit ];
792 license = [ pkgs.lib.licenses.mit ];
793 };
793 };
794 };
794 };
795 itsdangerous = super.buildPythonPackage {
795 itsdangerous = super.buildPythonPackage {
796 name = "itsdangerous-0.24";
796 name = "itsdangerous-0.24";
797 buildInputs = with self; [];
797 buildInputs = with self; [];
798 doCheck = false;
798 doCheck = false;
799 propagatedBuildInputs = with self; [];
799 propagatedBuildInputs = with self; [];
800 src = fetchurl {
800 src = fetchurl {
801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 md5 = "a3d55aa79369aef5345c036a8a26307f";
802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 };
803 };
804 meta = {
804 meta = {
805 license = [ pkgs.lib.licenses.bsdOriginal ];
805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 };
806 };
807 };
807 };
808 kombu = super.buildPythonPackage {
808 kombu = super.buildPythonPackage {
809 name = "kombu-1.5.1";
809 name = "kombu-1.5.1";
810 buildInputs = with self; [];
810 buildInputs = with self; [];
811 doCheck = false;
811 doCheck = false;
812 propagatedBuildInputs = with self; [anyjson amqplib];
812 propagatedBuildInputs = with self; [anyjson amqplib];
813 src = fetchurl {
813 src = fetchurl {
814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 };
816 };
817 meta = {
817 meta = {
818 license = [ pkgs.lib.licenses.bsdOriginal ];
818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 };
819 };
820 };
820 };
821 lxml = super.buildPythonPackage {
821 lxml = super.buildPythonPackage {
822 name = "lxml-3.4.4";
822 name = "lxml-3.4.4";
823 buildInputs = with self; [];
823 buildInputs = with self; [];
824 doCheck = false;
824 doCheck = false;
825 propagatedBuildInputs = with self; [];
825 propagatedBuildInputs = with self; [];
826 src = fetchurl {
826 src = fetchurl {
827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 };
829 };
830 meta = {
830 meta = {
831 license = [ pkgs.lib.licenses.bsdOriginal ];
831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 };
832 };
833 };
833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
847 mccabe = super.buildPythonPackage {
834 mccabe = super.buildPythonPackage {
848 name = "mccabe-0.3";
835 name = "mccabe-0.3";
849 buildInputs = with self; [];
836 buildInputs = with self; [];
850 doCheck = false;
837 doCheck = false;
851 propagatedBuildInputs = with self; [];
838 propagatedBuildInputs = with self; [];
852 src = fetchurl {
839 src = fetchurl {
853 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
840 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
854 md5 = "81640948ff226f8c12b3277059489157";
841 md5 = "81640948ff226f8c12b3277059489157";
855 };
842 };
856 meta = {
843 meta = {
857 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
844 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
858 };
845 };
859 };
846 };
860 meld3 = super.buildPythonPackage {
847 meld3 = super.buildPythonPackage {
861 name = "meld3-1.0.2";
848 name = "meld3-1.0.2";
862 buildInputs = with self; [];
849 buildInputs = with self; [];
863 doCheck = false;
850 doCheck = false;
864 propagatedBuildInputs = with self; [];
851 propagatedBuildInputs = with self; [];
865 src = fetchurl {
852 src = fetchurl {
866 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
853 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
867 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
854 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
868 };
855 };
869 meta = {
856 meta = {
870 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
857 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
871 };
858 };
872 };
859 };
873 mock = super.buildPythonPackage {
860 mock = super.buildPythonPackage {
874 name = "mock-1.0.1";
861 name = "mock-1.0.1";
875 buildInputs = with self; [];
862 buildInputs = with self; [];
876 doCheck = false;
863 doCheck = false;
877 propagatedBuildInputs = with self; [];
864 propagatedBuildInputs = with self; [];
878 src = fetchurl {
865 src = fetchurl {
879 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
866 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
880 md5 = "869f08d003c289a97c1a6610faf5e913";
867 md5 = "869f08d003c289a97c1a6610faf5e913";
881 };
868 };
882 meta = {
869 meta = {
883 license = [ pkgs.lib.licenses.bsdOriginal ];
870 license = [ pkgs.lib.licenses.bsdOriginal ];
884 };
871 };
885 };
872 };
886 msgpack-python = super.buildPythonPackage {
873 msgpack-python = super.buildPythonPackage {
887 name = "msgpack-python-0.4.6";
874 name = "msgpack-python-0.4.6";
888 buildInputs = with self; [];
875 buildInputs = with self; [];
889 doCheck = false;
876 doCheck = false;
890 propagatedBuildInputs = with self; [];
877 propagatedBuildInputs = with self; [];
891 src = fetchurl {
878 src = fetchurl {
892 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
879 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
893 md5 = "8b317669314cf1bc881716cccdaccb30";
880 md5 = "8b317669314cf1bc881716cccdaccb30";
894 };
881 };
895 meta = {
882 meta = {
896 license = [ pkgs.lib.licenses.asl20 ];
883 license = [ pkgs.lib.licenses.asl20 ];
897 };
884 };
898 };
885 };
899 nose = super.buildPythonPackage {
886 nose = super.buildPythonPackage {
900 name = "nose-1.3.6";
887 name = "nose-1.3.6";
901 buildInputs = with self; [];
888 buildInputs = with self; [];
902 doCheck = false;
889 doCheck = false;
903 propagatedBuildInputs = with self; [];
890 propagatedBuildInputs = with self; [];
904 src = fetchurl {
891 src = fetchurl {
905 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
892 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
906 md5 = "0ca546d81ca8309080fc80cb389e7a16";
893 md5 = "0ca546d81ca8309080fc80cb389e7a16";
907 };
894 };
908 meta = {
895 meta = {
909 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
896 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
910 };
897 };
911 };
898 };
912 objgraph = super.buildPythonPackage {
899 objgraph = super.buildPythonPackage {
913 name = "objgraph-2.0.0";
900 name = "objgraph-2.0.0";
914 buildInputs = with self; [];
901 buildInputs = with self; [];
915 doCheck = false;
902 doCheck = false;
916 propagatedBuildInputs = with self; [];
903 propagatedBuildInputs = with self; [];
917 src = fetchurl {
904 src = fetchurl {
918 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
905 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
919 md5 = "25b0d5e5adc74aa63ead15699614159c";
906 md5 = "25b0d5e5adc74aa63ead15699614159c";
920 };
907 };
921 meta = {
908 meta = {
922 license = [ pkgs.lib.licenses.mit ];
909 license = [ pkgs.lib.licenses.mit ];
923 };
910 };
924 };
911 };
925 packaging = super.buildPythonPackage {
912 packaging = super.buildPythonPackage {
926 name = "packaging-15.2";
913 name = "packaging-15.2";
927 buildInputs = with self; [];
914 buildInputs = with self; [];
928 doCheck = false;
915 doCheck = false;
929 propagatedBuildInputs = with self; [];
916 propagatedBuildInputs = with self; [];
930 src = fetchurl {
917 src = fetchurl {
931 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
918 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
932 md5 = "c16093476f6ced42128bf610e5db3784";
919 md5 = "c16093476f6ced42128bf610e5db3784";
933 };
920 };
934 meta = {
921 meta = {
935 license = [ pkgs.lib.licenses.asl20 ];
922 license = [ pkgs.lib.licenses.asl20 ];
936 };
923 };
937 };
924 };
938 paramiko = super.buildPythonPackage {
925 paramiko = super.buildPythonPackage {
939 name = "paramiko-1.15.1";
926 name = "paramiko-1.15.1";
940 buildInputs = with self; [];
927 buildInputs = with self; [];
941 doCheck = false;
928 doCheck = false;
942 propagatedBuildInputs = with self; [pycrypto ecdsa];
929 propagatedBuildInputs = with self; [pycrypto ecdsa];
943 src = fetchurl {
930 src = fetchurl {
944 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
931 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
945 md5 = "48c274c3f9b1282932567b21f6acf3b5";
932 md5 = "48c274c3f9b1282932567b21f6acf3b5";
946 };
933 };
947 meta = {
934 meta = {
948 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
935 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
949 };
936 };
950 };
937 };
951 pep8 = super.buildPythonPackage {
938 pep8 = super.buildPythonPackage {
952 name = "pep8-1.5.7";
939 name = "pep8-1.5.7";
953 buildInputs = with self; [];
940 buildInputs = with self; [];
954 doCheck = false;
941 doCheck = false;
955 propagatedBuildInputs = with self; [];
942 propagatedBuildInputs = with self; [];
956 src = fetchurl {
943 src = fetchurl {
957 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
944 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
958 md5 = "f6adbdd69365ecca20513c709f9b7c93";
945 md5 = "f6adbdd69365ecca20513c709f9b7c93";
959 };
946 };
960 meta = {
947 meta = {
961 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
948 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
962 };
949 };
963 };
950 };
964 psutil = super.buildPythonPackage {
951 psutil = super.buildPythonPackage {
965 name = "psutil-2.2.1";
952 name = "psutil-2.2.1";
966 buildInputs = with self; [];
953 buildInputs = with self; [];
967 doCheck = false;
954 doCheck = false;
968 propagatedBuildInputs = with self; [];
955 propagatedBuildInputs = with self; [];
969 src = fetchurl {
956 src = fetchurl {
970 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
957 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
971 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
958 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
972 };
959 };
973 meta = {
960 meta = {
974 license = [ pkgs.lib.licenses.bsdOriginal ];
961 license = [ pkgs.lib.licenses.bsdOriginal ];
975 };
962 };
976 };
963 };
977 psycopg2 = super.buildPythonPackage {
964 psycopg2 = super.buildPythonPackage {
978 name = "psycopg2-2.6.1";
965 name = "psycopg2-2.6.1";
979 buildInputs = with self; [];
966 buildInputs = with self; [];
980 doCheck = false;
967 doCheck = false;
981 propagatedBuildInputs = with self; [];
968 propagatedBuildInputs = with self; [];
982 src = fetchurl {
969 src = fetchurl {
983 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
970 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
984 md5 = "842b44f8c95517ed5b792081a2370da1";
971 md5 = "842b44f8c95517ed5b792081a2370da1";
985 };
972 };
986 meta = {
973 meta = {
987 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
974 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
988 };
975 };
989 };
976 };
990 py = super.buildPythonPackage {
977 py = super.buildPythonPackage {
991 name = "py-1.4.29";
978 name = "py-1.4.29";
992 buildInputs = with self; [];
979 buildInputs = with self; [];
993 doCheck = false;
980 doCheck = false;
994 propagatedBuildInputs = with self; [];
981 propagatedBuildInputs = with self; [];
995 src = fetchurl {
982 src = fetchurl {
996 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
983 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
997 md5 = "c28e0accba523a29b35a48bb703fb96c";
984 md5 = "c28e0accba523a29b35a48bb703fb96c";
998 };
985 };
999 meta = {
986 meta = {
1000 license = [ pkgs.lib.licenses.mit ];
987 license = [ pkgs.lib.licenses.mit ];
1001 };
988 };
1002 };
989 };
1003 py-bcrypt = super.buildPythonPackage {
990 py-bcrypt = super.buildPythonPackage {
1004 name = "py-bcrypt-0.4";
991 name = "py-bcrypt-0.4";
1005 buildInputs = with self; [];
992 buildInputs = with self; [];
1006 doCheck = false;
993 doCheck = false;
1007 propagatedBuildInputs = with self; [];
994 propagatedBuildInputs = with self; [];
1008 src = fetchurl {
995 src = fetchurl {
1009 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
996 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1010 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
997 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1011 };
998 };
1012 meta = {
999 meta = {
1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1000 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 };
1001 };
1015 };
1002 };
1016 py-gfm = super.buildPythonPackage {
1003 py-gfm = super.buildPythonPackage {
1017 name = "py-gfm-0.1.3";
1004 name = "py-gfm-0.1.3";
1018 buildInputs = with self; [];
1005 buildInputs = with self; [];
1019 doCheck = false;
1006 doCheck = false;
1020 propagatedBuildInputs = with self; [setuptools Markdown];
1007 propagatedBuildInputs = with self; [setuptools Markdown];
1021 src = fetchurl {
1008 src = fetchurl {
1022 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1009 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1023 md5 = "e588d9e69640a241b97e2c59c22527a6";
1010 md5 = "e588d9e69640a241b97e2c59c22527a6";
1024 };
1011 };
1025 meta = {
1012 meta = {
1026 license = [ pkgs.lib.licenses.bsdOriginal ];
1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1027 };
1014 };
1028 };
1015 };
1029 pycrypto = super.buildPythonPackage {
1016 pycrypto = super.buildPythonPackage {
1030 name = "pycrypto-2.6.1";
1017 name = "pycrypto-2.6.1";
1031 buildInputs = with self; [];
1018 buildInputs = with self; [];
1032 doCheck = false;
1019 doCheck = false;
1033 propagatedBuildInputs = with self; [];
1020 propagatedBuildInputs = with self; [];
1034 src = fetchurl {
1021 src = fetchurl {
1035 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1022 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1036 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1023 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1037 };
1024 };
1038 meta = {
1025 meta = {
1039 license = [ pkgs.lib.licenses.publicDomain ];
1026 license = [ pkgs.lib.licenses.publicDomain ];
1040 };
1027 };
1041 };
1028 };
1042 pycurl = super.buildPythonPackage {
1029 pycurl = super.buildPythonPackage {
1043 name = "pycurl-7.19.5";
1030 name = "pycurl-7.19.5";
1044 buildInputs = with self; [];
1031 buildInputs = with self; [];
1045 doCheck = false;
1032 doCheck = false;
1046 propagatedBuildInputs = with self; [];
1033 propagatedBuildInputs = with self; [];
1047 src = fetchurl {
1034 src = fetchurl {
1048 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1035 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1049 md5 = "47b4eac84118e2606658122104e62072";
1036 md5 = "47b4eac84118e2606658122104e62072";
1050 };
1037 };
1051 meta = {
1038 meta = {
1052 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1039 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1053 };
1040 };
1054 };
1041 };
1055 pyflakes = super.buildPythonPackage {
1042 pyflakes = super.buildPythonPackage {
1056 name = "pyflakes-0.8.1";
1043 name = "pyflakes-0.8.1";
1057 buildInputs = with self; [];
1044 buildInputs = with self; [];
1058 doCheck = false;
1045 doCheck = false;
1059 propagatedBuildInputs = with self; [];
1046 propagatedBuildInputs = with self; [];
1060 src = fetchurl {
1047 src = fetchurl {
1061 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1048 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1062 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1049 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1063 };
1050 };
1064 meta = {
1051 meta = {
1065 license = [ pkgs.lib.licenses.mit ];
1052 license = [ pkgs.lib.licenses.mit ];
1066 };
1053 };
1067 };
1054 };
1068 pyparsing = super.buildPythonPackage {
1055 pyparsing = super.buildPythonPackage {
1069 name = "pyparsing-1.5.7";
1056 name = "pyparsing-1.5.7";
1070 buildInputs = with self; [];
1057 buildInputs = with self; [];
1071 doCheck = false;
1058 doCheck = false;
1072 propagatedBuildInputs = with self; [];
1059 propagatedBuildInputs = with self; [];
1073 src = fetchurl {
1060 src = fetchurl {
1074 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1061 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1075 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1062 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1076 };
1063 };
1077 meta = {
1064 meta = {
1078 license = [ pkgs.lib.licenses.mit ];
1065 license = [ pkgs.lib.licenses.mit ];
1079 };
1066 };
1080 };
1067 };
1081 pyramid = super.buildPythonPackage {
1068 pyramid = super.buildPythonPackage {
1082 name = "pyramid-1.6.1";
1069 name = "pyramid-1.6.1";
1083 buildInputs = with self; [];
1070 buildInputs = with self; [];
1084 doCheck = false;
1071 doCheck = false;
1085 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1072 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1086 src = fetchurl {
1073 src = fetchurl {
1087 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1074 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1088 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1075 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1089 };
1076 };
1090 meta = {
1077 meta = {
1091 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1078 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 };
1079 };
1093 };
1080 };
1094 pyramid-beaker = super.buildPythonPackage {
1081 pyramid-beaker = super.buildPythonPackage {
1095 name = "pyramid-beaker-0.8";
1082 name = "pyramid-beaker-0.8";
1096 buildInputs = with self; [];
1083 buildInputs = with self; [];
1097 doCheck = false;
1084 doCheck = false;
1098 propagatedBuildInputs = with self; [pyramid Beaker];
1085 propagatedBuildInputs = with self; [pyramid Beaker];
1099 src = fetchurl {
1086 src = fetchurl {
1100 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1087 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1101 md5 = "22f14be31b06549f80890e2c63a93834";
1088 md5 = "22f14be31b06549f80890e2c63a93834";
1102 };
1089 };
1103 meta = {
1090 meta = {
1104 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1091 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1105 };
1092 };
1106 };
1093 };
1107 pyramid-debugtoolbar = super.buildPythonPackage {
1094 pyramid-debugtoolbar = super.buildPythonPackage {
1108 name = "pyramid-debugtoolbar-2.4.2";
1095 name = "pyramid-debugtoolbar-2.4.2";
1109 buildInputs = with self; [];
1096 buildInputs = with self; [];
1110 doCheck = false;
1097 doCheck = false;
1111 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1098 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1112 src = fetchurl {
1099 src = fetchurl {
1113 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1100 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1114 md5 = "073ea67086cc4bd5decc3a000853642d";
1101 md5 = "073ea67086cc4bd5decc3a000853642d";
1115 };
1102 };
1116 meta = {
1103 meta = {
1117 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1104 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1118 };
1105 };
1119 };
1106 };
1120 pyramid-jinja2 = super.buildPythonPackage {
1107 pyramid-jinja2 = super.buildPythonPackage {
1121 name = "pyramid-jinja2-2.5";
1108 name = "pyramid-jinja2-2.5";
1122 buildInputs = with self; [];
1109 buildInputs = with self; [];
1123 doCheck = false;
1110 doCheck = false;
1124 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1111 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1125 src = fetchurl {
1112 src = fetchurl {
1126 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1113 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1127 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1114 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1128 };
1115 };
1129 meta = {
1116 meta = {
1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1117 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 };
1118 };
1132 };
1119 };
1133 pyramid-mako = super.buildPythonPackage {
1120 pyramid-mako = super.buildPythonPackage {
1134 name = "pyramid-mako-1.0.2";
1121 name = "pyramid-mako-1.0.2";
1135 buildInputs = with self; [];
1122 buildInputs = with self; [];
1136 doCheck = false;
1123 doCheck = false;
1137 propagatedBuildInputs = with self; [pyramid Mako];
1124 propagatedBuildInputs = with self; [pyramid Mako];
1138 src = fetchurl {
1125 src = fetchurl {
1139 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1126 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1140 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1127 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1141 };
1128 };
1142 meta = {
1129 meta = {
1143 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1144 };
1131 };
1145 };
1132 };
1146 pysqlite = super.buildPythonPackage {
1133 pysqlite = super.buildPythonPackage {
1147 name = "pysqlite-2.6.3";
1134 name = "pysqlite-2.6.3";
1148 buildInputs = with self; [];
1135 buildInputs = with self; [];
1149 doCheck = false;
1136 doCheck = false;
1150 propagatedBuildInputs = with self; [];
1137 propagatedBuildInputs = with self; [];
1151 src = fetchurl {
1138 src = fetchurl {
1152 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1139 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1153 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1140 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1154 };
1141 };
1155 meta = {
1142 meta = {
1156 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1143 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1157 };
1144 };
1158 };
1145 };
1159 pytest = super.buildPythonPackage {
1146 pytest = super.buildPythonPackage {
1160 name = "pytest-2.8.5";
1147 name = "pytest-2.8.5";
1161 buildInputs = with self; [];
1148 buildInputs = with self; [];
1162 doCheck = false;
1149 doCheck = false;
1163 propagatedBuildInputs = with self; [py];
1150 propagatedBuildInputs = with self; [py];
1164 src = fetchurl {
1151 src = fetchurl {
1165 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1152 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1166 md5 = "8493b06f700862f1294298d6c1b715a9";
1153 md5 = "8493b06f700862f1294298d6c1b715a9";
1167 };
1154 };
1168 meta = {
1155 meta = {
1169 license = [ pkgs.lib.licenses.mit ];
1156 license = [ pkgs.lib.licenses.mit ];
1170 };
1157 };
1171 };
1158 };
1172 pytest-catchlog = super.buildPythonPackage {
1159 pytest-catchlog = super.buildPythonPackage {
1173 name = "pytest-catchlog-1.2.2";
1160 name = "pytest-catchlog-1.2.2";
1174 buildInputs = with self; [];
1161 buildInputs = with self; [];
1175 doCheck = false;
1162 doCheck = false;
1176 propagatedBuildInputs = with self; [py pytest];
1163 propagatedBuildInputs = with self; [py pytest];
1177 src = fetchurl {
1164 src = fetchurl {
1178 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1165 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1179 md5 = "09d890c54c7456c818102b7ff8c182c8";
1166 md5 = "09d890c54c7456c818102b7ff8c182c8";
1180 };
1167 };
1181 meta = {
1168 meta = {
1182 license = [ pkgs.lib.licenses.mit ];
1169 license = [ pkgs.lib.licenses.mit ];
1183 };
1170 };
1184 };
1171 };
1185 pytest-cov = super.buildPythonPackage {
1172 pytest-cov = super.buildPythonPackage {
1186 name = "pytest-cov-1.8.1";
1173 name = "pytest-cov-1.8.1";
1187 buildInputs = with self; [];
1174 buildInputs = with self; [];
1188 doCheck = false;
1175 doCheck = false;
1189 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1176 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1190 src = fetchurl {
1177 src = fetchurl {
1191 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1178 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1192 md5 = "76c778afa2494088270348be42d759fc";
1179 md5 = "76c778afa2494088270348be42d759fc";
1193 };
1180 };
1194 meta = {
1181 meta = {
1195 license = [ pkgs.lib.licenses.mit ];
1182 license = [ pkgs.lib.licenses.mit ];
1196 };
1183 };
1197 };
1184 };
1198 pytest-profiling = super.buildPythonPackage {
1185 pytest-profiling = super.buildPythonPackage {
1199 name = "pytest-profiling-1.0.1";
1186 name = "pytest-profiling-1.0.1";
1200 buildInputs = with self; [];
1187 buildInputs = with self; [];
1201 doCheck = false;
1188 doCheck = false;
1202 propagatedBuildInputs = with self; [six pytest gprof2dot];
1189 propagatedBuildInputs = with self; [six pytest gprof2dot];
1203 src = fetchurl {
1190 src = fetchurl {
1204 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1191 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1205 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1192 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1206 };
1193 };
1207 meta = {
1194 meta = {
1208 license = [ pkgs.lib.licenses.mit ];
1195 license = [ pkgs.lib.licenses.mit ];
1209 };
1196 };
1210 };
1197 };
1211 pytest-runner = super.buildPythonPackage {
1198 pytest-runner = super.buildPythonPackage {
1212 name = "pytest-runner-2.7.1";
1199 name = "pytest-runner-2.7.1";
1213 buildInputs = with self; [];
1200 buildInputs = with self; [];
1214 doCheck = false;
1201 doCheck = false;
1215 propagatedBuildInputs = with self; [];
1202 propagatedBuildInputs = with self; [];
1216 src = fetchurl {
1203 src = fetchurl {
1217 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1204 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1218 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1205 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1219 };
1206 };
1220 meta = {
1207 meta = {
1221 license = [ pkgs.lib.licenses.mit ];
1208 license = [ pkgs.lib.licenses.mit ];
1222 };
1209 };
1223 };
1210 };
1224 pytest-timeout = super.buildPythonPackage {
1211 pytest-timeout = super.buildPythonPackage {
1225 name = "pytest-timeout-0.4";
1212 name = "pytest-timeout-0.4";
1226 buildInputs = with self; [];
1213 buildInputs = with self; [];
1227 doCheck = false;
1214 doCheck = false;
1228 propagatedBuildInputs = with self; [pytest];
1215 propagatedBuildInputs = with self; [pytest];
1229 src = fetchurl {
1216 src = fetchurl {
1230 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1217 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1231 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1218 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1232 };
1219 };
1233 meta = {
1220 meta = {
1234 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1221 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1235 };
1222 };
1236 };
1223 };
1237 python-dateutil = super.buildPythonPackage {
1224 python-dateutil = super.buildPythonPackage {
1238 name = "python-dateutil-1.5";
1225 name = "python-dateutil-1.5";
1239 buildInputs = with self; [];
1226 buildInputs = with self; [];
1240 doCheck = false;
1227 doCheck = false;
1241 propagatedBuildInputs = with self; [];
1228 propagatedBuildInputs = with self; [];
1242 src = fetchurl {
1229 src = fetchurl {
1243 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1230 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1244 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1231 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1245 };
1232 };
1246 meta = {
1233 meta = {
1247 license = [ pkgs.lib.licenses.psfl ];
1234 license = [ pkgs.lib.licenses.psfl ];
1248 };
1235 };
1249 };
1236 };
1250 python-editor = super.buildPythonPackage {
1237 python-editor = super.buildPythonPackage {
1251 name = "python-editor-1.0.1";
1238 name = "python-editor-1.0.1";
1252 buildInputs = with self; [];
1239 buildInputs = with self; [];
1253 doCheck = false;
1240 doCheck = false;
1254 propagatedBuildInputs = with self; [];
1241 propagatedBuildInputs = with self; [];
1255 src = fetchurl {
1242 src = fetchurl {
1256 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1243 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1257 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1244 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1258 };
1245 };
1259 meta = {
1246 meta = {
1260 license = [ pkgs.lib.licenses.asl20 ];
1247 license = [ pkgs.lib.licenses.asl20 ];
1261 };
1248 };
1262 };
1249 };
1263 python-ldap = super.buildPythonPackage {
1250 python-ldap = super.buildPythonPackage {
1264 name = "python-ldap-2.4.19";
1251 name = "python-ldap-2.4.19";
1265 buildInputs = with self; [];
1252 buildInputs = with self; [];
1266 doCheck = false;
1253 doCheck = false;
1267 propagatedBuildInputs = with self; [setuptools];
1254 propagatedBuildInputs = with self; [setuptools];
1268 src = fetchurl {
1255 src = fetchurl {
1269 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1256 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1270 md5 = "b941bf31d09739492aa19ef679e94ae3";
1257 md5 = "b941bf31d09739492aa19ef679e94ae3";
1271 };
1258 };
1272 meta = {
1259 meta = {
1273 license = [ pkgs.lib.licenses.psfl ];
1260 license = [ pkgs.lib.licenses.psfl ];
1274 };
1261 };
1275 };
1262 };
1276 python-memcached = super.buildPythonPackage {
1263 python-memcached = super.buildPythonPackage {
1277 name = "python-memcached-1.57";
1264 name = "python-memcached-1.57";
1278 buildInputs = with self; [];
1265 buildInputs = with self; [];
1279 doCheck = false;
1266 doCheck = false;
1280 propagatedBuildInputs = with self; [six];
1267 propagatedBuildInputs = with self; [six];
1281 src = fetchurl {
1268 src = fetchurl {
1282 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1269 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1283 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1270 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1284 };
1271 };
1285 meta = {
1272 meta = {
1286 license = [ pkgs.lib.licenses.psfl ];
1273 license = [ pkgs.lib.licenses.psfl ];
1287 };
1274 };
1288 };
1275 };
1289 python-pam = super.buildPythonPackage {
1276 python-pam = super.buildPythonPackage {
1290 name = "python-pam-1.8.2";
1277 name = "python-pam-1.8.2";
1291 buildInputs = with self; [];
1278 buildInputs = with self; [];
1292 doCheck = false;
1279 doCheck = false;
1293 propagatedBuildInputs = with self; [];
1280 propagatedBuildInputs = with self; [];
1294 src = fetchurl {
1281 src = fetchurl {
1295 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1282 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1296 md5 = "db71b6b999246fb05d78ecfbe166629d";
1283 md5 = "db71b6b999246fb05d78ecfbe166629d";
1297 };
1284 };
1298 meta = {
1285 meta = {
1299 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1286 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1300 };
1287 };
1301 };
1288 };
1302 pytz = super.buildPythonPackage {
1289 pytz = super.buildPythonPackage {
1303 name = "pytz-2015.4";
1290 name = "pytz-2015.4";
1304 buildInputs = with self; [];
1291 buildInputs = with self; [];
1305 doCheck = false;
1292 doCheck = false;
1306 propagatedBuildInputs = with self; [];
1293 propagatedBuildInputs = with self; [];
1307 src = fetchurl {
1294 src = fetchurl {
1308 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1295 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1309 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1296 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1310 };
1297 };
1311 meta = {
1298 meta = {
1312 license = [ pkgs.lib.licenses.mit ];
1299 license = [ pkgs.lib.licenses.mit ];
1313 };
1300 };
1314 };
1301 };
1315 pyzmq = super.buildPythonPackage {
1302 pyzmq = super.buildPythonPackage {
1316 name = "pyzmq-14.6.0";
1303 name = "pyzmq-14.6.0";
1317 buildInputs = with self; [];
1304 buildInputs = with self; [];
1318 doCheck = false;
1305 doCheck = false;
1319 propagatedBuildInputs = with self; [];
1306 propagatedBuildInputs = with self; [];
1320 src = fetchurl {
1307 src = fetchurl {
1321 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1308 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1322 md5 = "395b5de95a931afa5b14c9349a5b8024";
1309 md5 = "395b5de95a931afa5b14c9349a5b8024";
1323 };
1310 };
1324 meta = {
1311 meta = {
1325 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1312 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1326 };
1313 };
1327 };
1314 };
1328 recaptcha-client = super.buildPythonPackage {
1315 recaptcha-client = super.buildPythonPackage {
1329 name = "recaptcha-client-1.0.6";
1316 name = "recaptcha-client-1.0.6";
1330 buildInputs = with self; [];
1317 buildInputs = with self; [];
1331 doCheck = false;
1318 doCheck = false;
1332 propagatedBuildInputs = with self; [];
1319 propagatedBuildInputs = with self; [];
1333 src = fetchurl {
1320 src = fetchurl {
1334 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1321 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1335 md5 = "74228180f7e1fb76c4d7089160b0d919";
1322 md5 = "74228180f7e1fb76c4d7089160b0d919";
1336 };
1323 };
1337 meta = {
1324 meta = {
1338 license = [ { fullName = "MIT/X11"; } ];
1325 license = [ { fullName = "MIT/X11"; } ];
1339 };
1326 };
1340 };
1327 };
1341 repoze.lru = super.buildPythonPackage {
1328 repoze.lru = super.buildPythonPackage {
1342 name = "repoze.lru-0.6";
1329 name = "repoze.lru-0.6";
1343 buildInputs = with self; [];
1330 buildInputs = with self; [];
1344 doCheck = false;
1331 doCheck = false;
1345 propagatedBuildInputs = with self; [];
1332 propagatedBuildInputs = with self; [];
1346 src = fetchurl {
1333 src = fetchurl {
1347 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1334 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1348 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1335 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1349 };
1336 };
1350 meta = {
1337 meta = {
1351 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1338 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1352 };
1339 };
1353 };
1340 };
1354 requests = super.buildPythonPackage {
1341 requests = super.buildPythonPackage {
1355 name = "requests-2.9.1";
1342 name = "requests-2.9.1";
1356 buildInputs = with self; [];
1343 buildInputs = with self; [];
1357 doCheck = false;
1344 doCheck = false;
1358 propagatedBuildInputs = with self; [];
1345 propagatedBuildInputs = with self; [];
1359 src = fetchurl {
1346 src = fetchurl {
1360 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1347 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1361 md5 = "0b7f480d19012ec52bab78292efd976d";
1348 md5 = "0b7f480d19012ec52bab78292efd976d";
1362 };
1349 };
1363 meta = {
1350 meta = {
1364 license = [ pkgs.lib.licenses.asl20 ];
1351 license = [ pkgs.lib.licenses.asl20 ];
1365 };
1352 };
1366 };
1353 };
1367 rhodecode-enterprise-ce = super.buildPythonPackage {
1354 rhodecode-enterprise-ce = super.buildPythonPackage {
1368 name = "rhodecode-enterprise-ce-4.3.0";
1355 name = "rhodecode-enterprise-ce-4.3.0";
1369 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1356 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1370 doCheck = true;
1357 doCheck = true;
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1372 src = ./.;
1359 src = ./.;
1373 meta = {
1360 meta = {
1374 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1361 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1375 };
1362 };
1376 };
1363 };
1377 rhodecode-tools = super.buildPythonPackage {
1364 rhodecode-tools = super.buildPythonPackage {
1378 name = "rhodecode-tools-0.8.3";
1365 name = "rhodecode-tools-0.8.3";
1379 buildInputs = with self; [];
1366 buildInputs = with self; [];
1380 doCheck = false;
1367 doCheck = false;
1381 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1368 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1382 src = fetchurl {
1369 src = fetchurl {
1383 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1370 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1384 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1371 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1385 };
1372 };
1386 meta = {
1373 meta = {
1387 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1374 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1388 };
1375 };
1389 };
1376 };
1390 serpent = super.buildPythonPackage {
1377 serpent = super.buildPythonPackage {
1391 name = "serpent-1.12";
1378 name = "serpent-1.12";
1392 buildInputs = with self; [];
1379 buildInputs = with self; [];
1393 doCheck = false;
1380 doCheck = false;
1394 propagatedBuildInputs = with self; [];
1381 propagatedBuildInputs = with self; [];
1395 src = fetchurl {
1382 src = fetchurl {
1396 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1383 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1397 md5 = "05869ac7b062828b34f8f927f0457b65";
1384 md5 = "05869ac7b062828b34f8f927f0457b65";
1398 };
1385 };
1399 meta = {
1386 meta = {
1400 license = [ pkgs.lib.licenses.mit ];
1387 license = [ pkgs.lib.licenses.mit ];
1401 };
1388 };
1402 };
1389 };
1403 setproctitle = super.buildPythonPackage {
1390 setproctitle = super.buildPythonPackage {
1404 name = "setproctitle-1.1.8";
1391 name = "setproctitle-1.1.8";
1405 buildInputs = with self; [];
1392 buildInputs = with self; [];
1406 doCheck = false;
1393 doCheck = false;
1407 propagatedBuildInputs = with self; [];
1394 propagatedBuildInputs = with self; [];
1408 src = fetchurl {
1395 src = fetchurl {
1409 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1396 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1410 md5 = "728f4c8c6031bbe56083a48594027edd";
1397 md5 = "728f4c8c6031bbe56083a48594027edd";
1411 };
1398 };
1412 meta = {
1399 meta = {
1413 license = [ pkgs.lib.licenses.bsdOriginal ];
1400 license = [ pkgs.lib.licenses.bsdOriginal ];
1414 };
1401 };
1415 };
1402 };
1416 setuptools = super.buildPythonPackage {
1403 setuptools = super.buildPythonPackage {
1417 name = "setuptools-20.8.1";
1404 name = "setuptools-20.8.1";
1418 buildInputs = with self; [];
1405 buildInputs = with self; [];
1419 doCheck = false;
1406 doCheck = false;
1420 propagatedBuildInputs = with self; [];
1407 propagatedBuildInputs = with self; [];
1421 src = fetchurl {
1408 src = fetchurl {
1422 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1409 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1423 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1410 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1424 };
1411 };
1425 meta = {
1412 meta = {
1426 license = [ pkgs.lib.licenses.mit ];
1413 license = [ pkgs.lib.licenses.mit ];
1427 };
1414 };
1428 };
1415 };
1429 setuptools-scm = super.buildPythonPackage {
1416 setuptools-scm = super.buildPythonPackage {
1430 name = "setuptools-scm-1.11.0";
1417 name = "setuptools-scm-1.11.0";
1431 buildInputs = with self; [];
1418 buildInputs = with self; [];
1432 doCheck = false;
1419 doCheck = false;
1433 propagatedBuildInputs = with self; [];
1420 propagatedBuildInputs = with self; [];
1434 src = fetchurl {
1421 src = fetchurl {
1435 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1422 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1436 md5 = "4c5c896ba52e134bbc3507bac6400087";
1423 md5 = "4c5c896ba52e134bbc3507bac6400087";
1437 };
1424 };
1438 meta = {
1425 meta = {
1439 license = [ pkgs.lib.licenses.mit ];
1426 license = [ pkgs.lib.licenses.mit ];
1440 };
1427 };
1441 };
1428 };
1442 simplejson = super.buildPythonPackage {
1429 simplejson = super.buildPythonPackage {
1443 name = "simplejson-3.7.2";
1430 name = "simplejson-3.7.2";
1444 buildInputs = with self; [];
1431 buildInputs = with self; [];
1445 doCheck = false;
1432 doCheck = false;
1446 propagatedBuildInputs = with self; [];
1433 propagatedBuildInputs = with self; [];
1447 src = fetchurl {
1434 src = fetchurl {
1448 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1435 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1449 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1436 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1450 };
1437 };
1451 meta = {
1438 meta = {
1452 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1439 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1453 };
1440 };
1454 };
1441 };
1455 six = super.buildPythonPackage {
1442 six = super.buildPythonPackage {
1456 name = "six-1.9.0";
1443 name = "six-1.9.0";
1457 buildInputs = with self; [];
1444 buildInputs = with self; [];
1458 doCheck = false;
1445 doCheck = false;
1459 propagatedBuildInputs = with self; [];
1446 propagatedBuildInputs = with self; [];
1460 src = fetchurl {
1447 src = fetchurl {
1461 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1448 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1462 md5 = "476881ef4012262dfc8adc645ee786c4";
1449 md5 = "476881ef4012262dfc8adc645ee786c4";
1463 };
1450 };
1464 meta = {
1451 meta = {
1465 license = [ pkgs.lib.licenses.mit ];
1452 license = [ pkgs.lib.licenses.mit ];
1466 };
1453 };
1467 };
1454 };
1468 subprocess32 = super.buildPythonPackage {
1455 subprocess32 = super.buildPythonPackage {
1469 name = "subprocess32-3.2.6";
1456 name = "subprocess32-3.2.6";
1470 buildInputs = with self; [];
1457 buildInputs = with self; [];
1471 doCheck = false;
1458 doCheck = false;
1472 propagatedBuildInputs = with self; [];
1459 propagatedBuildInputs = with self; [];
1473 src = fetchurl {
1460 src = fetchurl {
1474 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1461 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1475 md5 = "754c5ab9f533e764f931136974b618f1";
1462 md5 = "754c5ab9f533e764f931136974b618f1";
1476 };
1463 };
1477 meta = {
1464 meta = {
1478 license = [ pkgs.lib.licenses.psfl ];
1465 license = [ pkgs.lib.licenses.psfl ];
1479 };
1466 };
1480 };
1467 };
1481 supervisor = super.buildPythonPackage {
1468 supervisor = super.buildPythonPackage {
1482 name = "supervisor-3.3.0";
1469 name = "supervisor-3.3.0";
1483 buildInputs = with self; [];
1470 buildInputs = with self; [];
1484 doCheck = false;
1471 doCheck = false;
1485 propagatedBuildInputs = with self; [meld3];
1472 propagatedBuildInputs = with self; [meld3];
1486 src = fetchurl {
1473 src = fetchurl {
1487 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1474 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1488 md5 = "46bac00378d1eddb616752b990c67416";
1475 md5 = "46bac00378d1eddb616752b990c67416";
1489 };
1476 };
1490 meta = {
1477 meta = {
1491 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1478 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1492 };
1479 };
1493 };
1480 };
1494 transifex-client = super.buildPythonPackage {
1481 transifex-client = super.buildPythonPackage {
1495 name = "transifex-client-0.10";
1482 name = "transifex-client-0.10";
1496 buildInputs = with self; [];
1483 buildInputs = with self; [];
1497 doCheck = false;
1484 doCheck = false;
1498 propagatedBuildInputs = with self; [];
1485 propagatedBuildInputs = with self; [];
1499 src = fetchurl {
1486 src = fetchurl {
1500 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1487 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1501 md5 = "5549538d84b8eede6b254cd81ae024fa";
1488 md5 = "5549538d84b8eede6b254cd81ae024fa";
1502 };
1489 };
1503 meta = {
1490 meta = {
1504 license = [ pkgs.lib.licenses.gpl2 ];
1491 license = [ pkgs.lib.licenses.gpl2 ];
1505 };
1492 };
1506 };
1493 };
1507 translationstring = super.buildPythonPackage {
1494 translationstring = super.buildPythonPackage {
1508 name = "translationstring-1.3";
1495 name = "translationstring-1.3";
1509 buildInputs = with self; [];
1496 buildInputs = with self; [];
1510 doCheck = false;
1497 doCheck = false;
1511 propagatedBuildInputs = with self; [];
1498 propagatedBuildInputs = with self; [];
1512 src = fetchurl {
1499 src = fetchurl {
1513 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1500 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1514 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1501 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1515 };
1502 };
1516 meta = {
1503 meta = {
1517 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1504 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1518 };
1505 };
1519 };
1506 };
1520 trollius = super.buildPythonPackage {
1507 trollius = super.buildPythonPackage {
1521 name = "trollius-1.0.4";
1508 name = "trollius-1.0.4";
1522 buildInputs = with self; [];
1509 buildInputs = with self; [];
1523 doCheck = false;
1510 doCheck = false;
1524 propagatedBuildInputs = with self; [futures];
1511 propagatedBuildInputs = with self; [futures];
1525 src = fetchurl {
1512 src = fetchurl {
1526 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1513 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1527 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1514 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1528 };
1515 };
1529 meta = {
1516 meta = {
1530 license = [ pkgs.lib.licenses.asl20 ];
1517 license = [ pkgs.lib.licenses.asl20 ];
1531 };
1518 };
1532 };
1519 };
1533 uWSGI = super.buildPythonPackage {
1520 uWSGI = super.buildPythonPackage {
1534 name = "uWSGI-2.0.11.2";
1521 name = "uWSGI-2.0.11.2";
1535 buildInputs = with self; [];
1522 buildInputs = with self; [];
1536 doCheck = false;
1523 doCheck = false;
1537 propagatedBuildInputs = with self; [];
1524 propagatedBuildInputs = with self; [];
1538 src = fetchurl {
1525 src = fetchurl {
1539 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1526 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1540 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1527 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1541 };
1528 };
1542 meta = {
1529 meta = {
1543 license = [ pkgs.lib.licenses.gpl2 ];
1530 license = [ pkgs.lib.licenses.gpl2 ];
1544 };
1531 };
1545 };
1532 };
1546 urllib3 = super.buildPythonPackage {
1533 urllib3 = super.buildPythonPackage {
1547 name = "urllib3-1.16";
1534 name = "urllib3-1.16";
1548 buildInputs = with self; [];
1535 buildInputs = with self; [];
1549 doCheck = false;
1536 doCheck = false;
1550 propagatedBuildInputs = with self; [];
1537 propagatedBuildInputs = with self; [];
1551 src = fetchurl {
1538 src = fetchurl {
1552 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1539 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1553 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1540 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1554 };
1541 };
1555 meta = {
1542 meta = {
1556 license = [ pkgs.lib.licenses.mit ];
1543 license = [ pkgs.lib.licenses.mit ];
1557 };
1544 };
1558 };
1545 };
1559 venusian = super.buildPythonPackage {
1546 venusian = super.buildPythonPackage {
1560 name = "venusian-1.0";
1547 name = "venusian-1.0";
1561 buildInputs = with self; [];
1548 buildInputs = with self; [];
1562 doCheck = false;
1549 doCheck = false;
1563 propagatedBuildInputs = with self; [];
1550 propagatedBuildInputs = with self; [];
1564 src = fetchurl {
1551 src = fetchurl {
1565 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1552 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1566 md5 = "dccf2eafb7113759d60c86faf5538756";
1553 md5 = "dccf2eafb7113759d60c86faf5538756";
1567 };
1554 };
1568 meta = {
1555 meta = {
1569 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1556 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1570 };
1557 };
1571 };
1558 };
1572 waitress = super.buildPythonPackage {
1559 waitress = super.buildPythonPackage {
1573 name = "waitress-0.8.9";
1560 name = "waitress-0.8.9";
1574 buildInputs = with self; [];
1561 buildInputs = with self; [];
1575 doCheck = false;
1562 doCheck = false;
1576 propagatedBuildInputs = with self; [setuptools];
1563 propagatedBuildInputs = with self; [setuptools];
1577 src = fetchurl {
1564 src = fetchurl {
1578 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1565 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1579 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1566 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1580 };
1567 };
1581 meta = {
1568 meta = {
1582 license = [ pkgs.lib.licenses.zpt21 ];
1569 license = [ pkgs.lib.licenses.zpt21 ];
1583 };
1570 };
1584 };
1571 };
1585 wsgiref = super.buildPythonPackage {
1572 wsgiref = super.buildPythonPackage {
1586 name = "wsgiref-0.1.2";
1573 name = "wsgiref-0.1.2";
1587 buildInputs = with self; [];
1574 buildInputs = with self; [];
1588 doCheck = false;
1575 doCheck = false;
1589 propagatedBuildInputs = with self; [];
1576 propagatedBuildInputs = with self; [];
1590 src = fetchurl {
1577 src = fetchurl {
1591 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1578 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1592 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1579 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1593 };
1580 };
1594 meta = {
1581 meta = {
1595 license = [ { fullName = "PSF or ZPL"; } ];
1582 license = [ { fullName = "PSF or ZPL"; } ];
1596 };
1583 };
1597 };
1584 };
1598 zope.cachedescriptors = super.buildPythonPackage {
1585 zope.cachedescriptors = super.buildPythonPackage {
1599 name = "zope.cachedescriptors-4.0.0";
1586 name = "zope.cachedescriptors-4.0.0";
1600 buildInputs = with self; [];
1587 buildInputs = with self; [];
1601 doCheck = false;
1588 doCheck = false;
1602 propagatedBuildInputs = with self; [setuptools];
1589 propagatedBuildInputs = with self; [setuptools];
1603 src = fetchurl {
1590 src = fetchurl {
1604 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1591 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1605 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1592 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1606 };
1593 };
1607 meta = {
1594 meta = {
1608 license = [ pkgs.lib.licenses.zpt21 ];
1595 license = [ pkgs.lib.licenses.zpt21 ];
1609 };
1596 };
1610 };
1597 };
1611 zope.deprecation = super.buildPythonPackage {
1598 zope.deprecation = super.buildPythonPackage {
1612 name = "zope.deprecation-4.1.2";
1599 name = "zope.deprecation-4.1.2";
1613 buildInputs = with self; [];
1600 buildInputs = with self; [];
1614 doCheck = false;
1601 doCheck = false;
1615 propagatedBuildInputs = with self; [setuptools];
1602 propagatedBuildInputs = with self; [setuptools];
1616 src = fetchurl {
1603 src = fetchurl {
1617 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1604 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1618 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1605 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1619 };
1606 };
1620 meta = {
1607 meta = {
1621 license = [ pkgs.lib.licenses.zpt21 ];
1608 license = [ pkgs.lib.licenses.zpt21 ];
1622 };
1609 };
1623 };
1610 };
1624 zope.event = super.buildPythonPackage {
1611 zope.event = super.buildPythonPackage {
1625 name = "zope.event-4.0.3";
1612 name = "zope.event-4.0.3";
1626 buildInputs = with self; [];
1613 buildInputs = with self; [];
1627 doCheck = false;
1614 doCheck = false;
1628 propagatedBuildInputs = with self; [setuptools];
1615 propagatedBuildInputs = with self; [setuptools];
1629 src = fetchurl {
1616 src = fetchurl {
1630 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1617 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1631 md5 = "9a3780916332b18b8b85f522bcc3e249";
1618 md5 = "9a3780916332b18b8b85f522bcc3e249";
1632 };
1619 };
1633 meta = {
1620 meta = {
1634 license = [ pkgs.lib.licenses.zpt21 ];
1621 license = [ pkgs.lib.licenses.zpt21 ];
1635 };
1622 };
1636 };
1623 };
1637 zope.interface = super.buildPythonPackage {
1624 zope.interface = super.buildPythonPackage {
1638 name = "zope.interface-4.1.3";
1625 name = "zope.interface-4.1.3";
1639 buildInputs = with self; [];
1626 buildInputs = with self; [];
1640 doCheck = false;
1627 doCheck = false;
1641 propagatedBuildInputs = with self; [setuptools];
1628 propagatedBuildInputs = with self; [setuptools];
1642 src = fetchurl {
1629 src = fetchurl {
1643 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1630 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1644 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1631 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1645 };
1632 };
1646 meta = {
1633 meta = {
1647 license = [ pkgs.lib.licenses.zpt21 ];
1634 license = [ pkgs.lib.licenses.zpt21 ];
1648 };
1635 };
1649 };
1636 };
1650
1637
1651 ### Test requirements
1638 ### Test requirements
1652
1639
1653
1640
1654 }
1641 }
@@ -1,152 +1,151 b''
1 Babel==1.3
1 Babel==1.3
2 Beaker==1.7.0
2 Beaker==1.7.0
3 CProfileV==1.0.6
3 CProfileV==1.0.6
4 Fabric==1.10.0
4 Fabric==1.10.0
5 FormEncode==1.2.4
5 FormEncode==1.2.4
6 Jinja2==2.7.3
6 Jinja2==2.7.3
7 Mako==1.0.1
7 Mako==1.0.1
8 Markdown==2.6.2
8 Markdown==2.6.2
9 MarkupSafe==0.23
9 MarkupSafe==0.23
10 MySQL-python==1.2.5
10 MySQL-python==1.2.5
11 Paste==2.0.2
11 Paste==2.0.2
12 PasteDeploy==1.5.2
12 PasteDeploy==1.5.2
13 PasteScript==1.7.5
13 PasteScript==1.7.5
14 Pygments==2.1.3
14 Pygments==2.1.3
15
15
16 # TODO: This version is not available on PyPI
16 # TODO: This version is not available on PyPI
17 # Pylons==1.0.2.dev20160108
17 # Pylons==1.0.2.dev20160108
18 Pylons==1.0.1
18 Pylons==1.0.1
19
19
20 # TODO: This version is not available, but newer ones are
20 # TODO: This version is not available, but newer ones are
21 # Pyro4==4.35
21 # Pyro4==4.35
22 Pyro4==4.41
22 Pyro4==4.41
23
23
24 # TODO: This should probably not be in here
24 # TODO: This should probably not be in here
25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26
26
27 # TODO: This is not really a dependency, we should add it only
27 # TODO: This is not really a dependency, we should add it only
28 # into the development environment, since there it is useful.
28 # into the development environment, since there it is useful.
29 # RhodeCodeVCSServer==3.9.0
29 # RhodeCodeVCSServer==3.9.0
30
30
31 Routes==1.13
31 Routes==1.13
32 SQLAlchemy==0.9.9
32 SQLAlchemy==0.9.9
33 Sphinx==1.2.2
33 Sphinx==1.2.2
34 Tempita==0.5.2
34 Tempita==0.5.2
35 URLObject==2.4.0
35 URLObject==2.4.0
36 WebError==0.10.3
36 WebError==0.10.3
37
37
38 # TODO: This is modified by us, needs a better integration. For now
38 # TODO: This is modified by us, needs a better integration. For now
39 # using the latest version before.
39 # using the latest version before.
40 # WebHelpers==1.3.dev20150807
40 # WebHelpers==1.3.dev20150807
41 WebHelpers==1.3
41 WebHelpers==1.3
42
42
43 WebHelpers2==2.0
43 WebHelpers2==2.0
44 WebOb==1.3.1
44 WebOb==1.3.1
45 WebTest==1.4.3
45 WebTest==1.4.3
46 Whoosh==2.7.0
46 Whoosh==2.7.0
47 alembic==0.8.4
47 alembic==0.8.4
48 amqplib==1.0.2
48 amqplib==1.0.2
49 anyjson==0.3.3
49 anyjson==0.3.3
50 appenlight-client==0.6.14
50 appenlight-client==0.6.14
51 authomatic==0.1.0.post1;
51 authomatic==0.1.0.post1;
52 backport-ipaddress==0.1
52 backport-ipaddress==0.1
53 bottle==0.12.8
53 bottle==0.12.8
54 bumpversion==0.5.3
54 bumpversion==0.5.3
55 celery==2.2.10
55 celery==2.2.10
56 click==5.1
56 click==5.1
57 colander==1.2
57 colander==1.2
58 configobj==5.0.6
58 configobj==5.0.6
59 cov-core==1.15.0
59 cov-core==1.15.0
60 coverage==3.7.1
60 coverage==3.7.1
61 cssselect==0.9.1
61 cssselect==0.9.1
62 decorator==3.4.2
62 decorator==3.4.2
63 docutils==0.12
63 docutils==0.12
64 dogpile.cache==0.6.1
64 dogpile.cache==0.6.1
65 dogpile.core==0.4.1
65 dogpile.core==0.4.1
66 dulwich==0.12.0
66 dulwich==0.12.0
67 ecdsa==0.11
67 ecdsa==0.11
68 flake8==2.4.1
68 flake8==2.4.1
69 future==0.14.3
69 future==0.14.3
70 futures==3.0.2
70 futures==3.0.2
71 gprof2dot==2015.12.1
71 gprof2dot==2015.12.1
72 gunicorn==19.6.0
72 gunicorn==19.6.0
73
73
74 # TODO: Needs subvertpy and blows up without Subversion headers,
74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 # actually we should not need this for Enterprise at all.
75 # actually we should not need this for Enterprise at all.
76 # hgsubversion==1.8.2
76 # hgsubversion==1.8.2
77
77
78 gnureadline==6.3.3
78 gnureadline==6.3.3
79 infrae.cache==1.0.1
79 infrae.cache==1.0.1
80 invoke==0.13.0
80 invoke==0.13.0
81 ipdb==0.8
81 ipdb==0.8
82 ipython==3.1.0
82 ipython==3.1.0
83 iso8601==0.1.11
83 iso8601==0.1.11
84 itsdangerous==0.24
84 itsdangerous==0.24
85 kombu==1.5.1
85 kombu==1.5.1
86 lxml==3.4.4
86 lxml==3.4.4
87 marshmallow==2.8.0
88 mccabe==0.3
87 mccabe==0.3
89 meld3==1.0.2
88 meld3==1.0.2
90 mock==1.0.1
89 mock==1.0.1
91 msgpack-python==0.4.6
90 msgpack-python==0.4.6
92 nose==1.3.6
91 nose==1.3.6
93 objgraph==2.0.0
92 objgraph==2.0.0
94 packaging==15.2
93 packaging==15.2
95 paramiko==1.15.1
94 paramiko==1.15.1
96 pep8==1.5.7
95 pep8==1.5.7
97 psutil==2.2.1
96 psutil==2.2.1
98 psycopg2==2.6.1
97 psycopg2==2.6.1
99 py==1.4.29
98 py==1.4.29
100 py-bcrypt==0.4
99 py-bcrypt==0.4
101 py-gfm==0.1.3
100 py-gfm==0.1.3
102 pycrypto==2.6.1
101 pycrypto==2.6.1
103 pycurl==7.19.5
102 pycurl==7.19.5
104 pyflakes==0.8.1
103 pyflakes==0.8.1
105 pyparsing==1.5.7
104 pyparsing==1.5.7
106 pyramid==1.6.1
105 pyramid==1.6.1
107 pyramid-beaker==0.8
106 pyramid-beaker==0.8
108 pyramid-debugtoolbar==2.4.2
107 pyramid-debugtoolbar==2.4.2
109 pyramid-jinja2==2.5
108 pyramid-jinja2==2.5
110 pyramid-mako==1.0.2
109 pyramid-mako==1.0.2
111 pysqlite==2.6.3
110 pysqlite==2.6.3
112 pytest==2.8.5
111 pytest==2.8.5
113 pytest-runner==2.7.1
112 pytest-runner==2.7.1
114 pytest-catchlog==1.2.2
113 pytest-catchlog==1.2.2
115 pytest-cov==1.8.1
114 pytest-cov==1.8.1
116 pytest-profiling==1.0.1
115 pytest-profiling==1.0.1
117 pytest-timeout==0.4
116 pytest-timeout==0.4
118 python-dateutil==1.5
117 python-dateutil==1.5
119 python-ldap==2.4.19
118 python-ldap==2.4.19
120 python-memcached==1.57
119 python-memcached==1.57
121 python-pam==1.8.2
120 python-pam==1.8.2
122 pytz==2015.4
121 pytz==2015.4
123 pyzmq==14.6.0
122 pyzmq==14.6.0
124
123
125 # TODO: This is not available in public
124 # TODO: This is not available in public
126 # rc-testdata==0.2.0
125 # rc-testdata==0.2.0
127
126
128 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
129
128
130
129
131 recaptcha-client==1.0.6
130 recaptcha-client==1.0.6
132 repoze.lru==0.6
131 repoze.lru==0.6
133 requests==2.9.1
132 requests==2.9.1
134 serpent==1.12
133 serpent==1.12
135 setproctitle==1.1.8
134 setproctitle==1.1.8
136 setuptools==20.8.1
135 setuptools==20.8.1
137 setuptools-scm==1.11.0
136 setuptools-scm==1.11.0
138 simplejson==3.7.2
137 simplejson==3.7.2
139 six==1.9.0
138 six==1.9.0
140 subprocess32==3.2.6
139 subprocess32==3.2.6
141 supervisor==3.3.0
140 supervisor==3.3.0
142 transifex-client==0.10
141 transifex-client==0.10
143 translationstring==1.3
142 translationstring==1.3
144 trollius==1.0.4
143 trollius==1.0.4
145 uWSGI==2.0.11.2
144 uWSGI==2.0.11.2
146 venusian==1.0
145 venusian==1.0
147 waitress==0.8.9
146 waitress==0.8.9
148 wsgiref==0.1.2
147 wsgiref==0.1.2
149 zope.cachedescriptors==4.0.0
148 zope.cachedescriptors==4.0.0
150 zope.deprecation==4.1.2
149 zope.deprecation==4.1.2
151 zope.event==4.0.3
150 zope.event==4.0.3
152 zope.interface==4.1.3
151 zope.interface==4.1.3
@@ -1,58 +1,58 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22
22
23 RhodeCode, a web based repository management software
23 RhodeCode, a web based repository management software
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 """
25 """
26
26
27 import os
27 import os
28 import sys
28 import sys
29 import platform
29 import platform
30
30
31 VERSION = tuple(open(os.path.join(
31 VERSION = tuple(open(os.path.join(
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33
33
34 BACKENDS = {
34 BACKENDS = {
35 'hg': 'Mercurial repository',
35 'hg': 'Mercurial repository',
36 'git': 'Git repository',
36 'git': 'Git repository',
37 'svn': 'Subversion repository',
37 'svn': 'Subversion repository',
38 }
38 }
39
39
40 CELERY_ENABLED = False
40 CELERY_ENABLED = False
41 CELERY_EAGER = False
41 CELERY_EAGER = False
42
42
43 # link to config for pylons
43 # link to config for pylons
44 CONFIG = {}
44 CONFIG = {}
45
45
46 # Linked module for extensions
46 # Linked module for extensions
47 EXTENSIONS = {}
47 EXTENSIONS = {}
48
48
49 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
49 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
50 __dbversion__ = 54 # defines current db version for migrations
50 __dbversion__ = 55 # defines current db version for migrations
51 __platform__ = platform.system()
51 __platform__ = platform.system()
52 __license__ = 'AGPLv3, and Commercial License'
52 __license__ = 'AGPLv3, and Commercial License'
53 __author__ = 'RhodeCode GmbH'
53 __author__ = 'RhodeCode GmbH'
54 __url__ = 'http://rhodecode.com'
54 __url__ = 'http://rhodecode.com'
55
55
56 is_windows = __platform__ in ['Windows']
56 is_windows = __platform__ in ['Windows']
57 is_unix = not is_windows
57 is_unix = not is_windows
58 is_test = False
58 is_test = False
@@ -1,124 +1,126 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23 import collections
23 import collections
24
24
25 from pylons import url
25 from pylons import url
26 from zope.interface import implementer
26 from zope.interface import implementer
27
27
28 from rhodecode.admin.interfaces import IAdminNavigationRegistry
28 from rhodecode.admin.interfaces import IAdminNavigationRegistry
29 from rhodecode.lib.utils import get_registry
29 from rhodecode.lib.utils import get_registry
30 from rhodecode.translation import _
30 from rhodecode.translation import _
31
31
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35 NavListEntry = collections.namedtuple('NavListEntry', ['key', 'name', 'url'])
35 NavListEntry = collections.namedtuple('NavListEntry', ['key', 'name', 'url'])
36
36
37
37
38 class NavEntry(object):
38 class NavEntry(object):
39 """
39 """
40 Represents an entry in the admin navigation.
40 Represents an entry in the admin navigation.
41
41
42 :param key: Unique identifier used to store reference in an OrderedDict.
42 :param key: Unique identifier used to store reference in an OrderedDict.
43 :param name: Display name, usually a translation string.
43 :param name: Display name, usually a translation string.
44 :param view_name: Name of the view, used generate the URL.
44 :param view_name: Name of the view, used generate the URL.
45 :param pyramid: Indicator to use pyramid for URL generation. This should
45 :param pyramid: Indicator to use pyramid for URL generation. This should
46 be removed as soon as we are fully migrated to pyramid.
46 be removed as soon as we are fully migrated to pyramid.
47 """
47 """
48
48
49 def __init__(self, key, name, view_name, pyramid=False):
49 def __init__(self, key, name, view_name, pyramid=False):
50 self.key = key
50 self.key = key
51 self.name = name
51 self.name = name
52 self.view_name = view_name
52 self.view_name = view_name
53 self.pyramid = pyramid
53 self.pyramid = pyramid
54
54
55 def generate_url(self, request):
55 def generate_url(self, request):
56 if self.pyramid:
56 if self.pyramid:
57 if hasattr(request, 'route_path'):
57 if hasattr(request, 'route_path'):
58 return request.route_path(self.view_name)
58 return request.route_path(self.view_name)
59 else:
59 else:
60 # TODO: johbo: Remove this after migrating to pyramid.
60 # TODO: johbo: Remove this after migrating to pyramid.
61 # We need the pyramid request here to generate URLs to pyramid
61 # We need the pyramid request here to generate URLs to pyramid
62 # views from within pylons views.
62 # views from within pylons views.
63 from pyramid.threadlocal import get_current_request
63 from pyramid.threadlocal import get_current_request
64 pyramid_request = get_current_request()
64 pyramid_request = get_current_request()
65 return pyramid_request.route_path(self.view_name)
65 return pyramid_request.route_path(self.view_name)
66 else:
66 else:
67 return url(self.view_name)
67 return url(self.view_name)
68
68
69
69
70 @implementer(IAdminNavigationRegistry)
70 @implementer(IAdminNavigationRegistry)
71 class NavigationRegistry(object):
71 class NavigationRegistry(object):
72
72
73 _base_entries = [
73 _base_entries = [
74 NavEntry('global', _('Global'), 'admin_settings_global'),
74 NavEntry('global', _('Global'), 'admin_settings_global'),
75 NavEntry('vcs', _('VCS'), 'admin_settings_vcs'),
75 NavEntry('vcs', _('VCS'), 'admin_settings_vcs'),
76 NavEntry('visual', _('Visual'), 'admin_settings_visual'),
76 NavEntry('visual', _('Visual'), 'admin_settings_visual'),
77 NavEntry('mapping', _('Remap and Rescan'), 'admin_settings_mapping'),
77 NavEntry('mapping', _('Remap and Rescan'), 'admin_settings_mapping'),
78 NavEntry('issuetracker', _('Issue Tracker'),
78 NavEntry('issuetracker', _('Issue Tracker'),
79 'admin_settings_issuetracker'),
79 'admin_settings_issuetracker'),
80 NavEntry('email', _('Email'), 'admin_settings_email'),
80 NavEntry('email', _('Email'), 'admin_settings_email'),
81 NavEntry('hooks', _('Hooks'), 'admin_settings_hooks'),
81 NavEntry('hooks', _('Hooks'), 'admin_settings_hooks'),
82 NavEntry('search', _('Full Text Search'), 'admin_settings_search'),
82 NavEntry('search', _('Full Text Search'), 'admin_settings_search'),
83 NavEntry('integrations', _('Integrations'),
84 'global_integrations_home', pyramid=True),
83 NavEntry('system', _('System Info'), 'admin_settings_system'),
85 NavEntry('system', _('System Info'), 'admin_settings_system'),
84 NavEntry('open_source', _('Open Source Licenses'),
86 NavEntry('open_source', _('Open Source Licenses'),
85 'admin_settings_open_source', pyramid=True),
87 'admin_settings_open_source', pyramid=True),
86 # TODO: marcink: we disable supervisor now until the supervisor stats
88 # TODO: marcink: we disable supervisor now until the supervisor stats
87 # page is fixed in the nix configuration
89 # page is fixed in the nix configuration
88 # NavEntry('supervisor', _('Supervisor'), 'admin_settings_supervisor'),
90 # NavEntry('supervisor', _('Supervisor'), 'admin_settings_supervisor'),
89 ]
91 ]
90
92
91 _labs_entry = NavEntry('labs', _('Labs'),
93 _labs_entry = NavEntry('labs', _('Labs'),
92 'admin_settings_labs')
94 'admin_settings_labs')
93
95
94 def __init__(self, labs_active=False):
96 def __init__(self, labs_active=False):
95 self._registered_entries = collections.OrderedDict([
97 self._registered_entries = collections.OrderedDict([
96 (item.key, item) for item in self.__class__._base_entries
98 (item.key, item) for item in self.__class__._base_entries
97 ])
99 ])
98
100
99 if labs_active:
101 if labs_active:
100 self.add_entry(self._labs_entry)
102 self.add_entry(self._labs_entry)
101
103
102 def add_entry(self, entry):
104 def add_entry(self, entry):
103 self._registered_entries[entry.key] = entry
105 self._registered_entries[entry.key] = entry
104
106
105 def get_navlist(self, request):
107 def get_navlist(self, request):
106 navlist = [NavListEntry(i.key, i.name, i.generate_url(request))
108 navlist = [NavListEntry(i.key, i.name, i.generate_url(request))
107 for i in self._registered_entries.values()]
109 for i in self._registered_entries.values()]
108 return navlist
110 return navlist
109
111
110
112
111 def navigation_registry(request):
113 def navigation_registry(request):
112 """
114 """
113 Helper that returns the admin navigation registry.
115 Helper that returns the admin navigation registry.
114 """
116 """
115 pyramid_registry = get_registry(request)
117 pyramid_registry = get_registry(request)
116 nav_registry = pyramid_registry.queryUtility(IAdminNavigationRegistry)
118 nav_registry = pyramid_registry.queryUtility(IAdminNavigationRegistry)
117 return nav_registry
119 return nav_registry
118
120
119
121
120 def navigation_list(request):
122 def navigation_list(request):
121 """
123 """
122 Helper that returns the admin navigation as list of NavListEntry objects.
124 Helper that returns the admin navigation as list of NavListEntry objects.
123 """
125 """
124 return navigation_registry(request).get_navlist(request)
126 return navigation_registry(request).get_navlist(request)
@@ -1,387 +1,388 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Pylons middleware initialization
22 Pylons middleware initialization
23 """
23 """
24 import logging
24 import logging
25
25
26 from paste.registry import RegistryManager
26 from paste.registry import RegistryManager
27 from paste.gzipper import make_gzip_middleware
27 from paste.gzipper import make_gzip_middleware
28 from pylons.wsgiapp import PylonsApp
28 from pylons.wsgiapp import PylonsApp
29 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.authorization import ACLAuthorizationPolicy
30 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.static import static_view
31 from pyramid.static import static_view
32 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
35 import pyramid.httpexceptions as httpexceptions
35 import pyramid.httpexceptions as httpexceptions
36 from pyramid.renderers import render_to_response, render
36 from pyramid.renderers import render_to_response, render
37 from routes.middleware import RoutesMiddleware
37 from routes.middleware import RoutesMiddleware
38 import routes.util
38 import routes.util
39
39
40 import rhodecode
40 import rhodecode
41 from rhodecode.config import patches
41 from rhodecode.config import patches
42 from rhodecode.config.environment import (
42 from rhodecode.config.environment import (
43 load_environment, load_pyramid_environment)
43 load_environment, load_pyramid_environment)
44 from rhodecode.lib.middleware import csrf
44 from rhodecode.lib.middleware import csrf
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 from rhodecode.lib.middleware.vcs import VCSMiddleware
48 from rhodecode.lib.middleware.vcs import VCSMiddleware
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50
50
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
56 """Create a Pylons WSGI application and return it
56 """Create a Pylons WSGI application and return it
57
57
58 ``global_conf``
58 ``global_conf``
59 The inherited configuration for this application. Normally from
59 The inherited configuration for this application. Normally from
60 the [DEFAULT] section of the Paste ini file.
60 the [DEFAULT] section of the Paste ini file.
61
61
62 ``full_stack``
62 ``full_stack``
63 Whether or not this application provides a full WSGI stack (by
63 Whether or not this application provides a full WSGI stack (by
64 default, meaning it handles its own exceptions and errors).
64 default, meaning it handles its own exceptions and errors).
65 Disable full_stack when this application is "managed" by
65 Disable full_stack when this application is "managed" by
66 another WSGI middleware.
66 another WSGI middleware.
67
67
68 ``app_conf``
68 ``app_conf``
69 The application's local configuration. Normally specified in
69 The application's local configuration. Normally specified in
70 the [app:<name>] section of the Paste ini file (where <name>
70 the [app:<name>] section of the Paste ini file (where <name>
71 defaults to main).
71 defaults to main).
72
72
73 """
73 """
74 # Apply compatibility patches
74 # Apply compatibility patches
75 patches.kombu_1_5_1_python_2_7_11()
75 patches.kombu_1_5_1_python_2_7_11()
76 patches.inspect_getargspec()
76 patches.inspect_getargspec()
77
77
78 # Configure the Pylons environment
78 # Configure the Pylons environment
79 config = load_environment(global_conf, app_conf)
79 config = load_environment(global_conf, app_conf)
80
80
81 # The Pylons WSGI app
81 # The Pylons WSGI app
82 app = PylonsApp(config=config)
82 app = PylonsApp(config=config)
83 if rhodecode.is_test:
83 if rhodecode.is_test:
84 app = csrf.CSRFDetector(app)
84 app = csrf.CSRFDetector(app)
85
85
86 expected_origin = config.get('expected_origin')
86 expected_origin = config.get('expected_origin')
87 if expected_origin:
87 if expected_origin:
88 # The API can be accessed from other Origins.
88 # The API can be accessed from other Origins.
89 app = csrf.OriginChecker(app, expected_origin,
89 app = csrf.OriginChecker(app, expected_origin,
90 skip_urls=[routes.util.url_for('api')])
90 skip_urls=[routes.util.url_for('api')])
91
91
92
92
93 if asbool(full_stack):
93 if asbool(full_stack):
94
94
95 # Appenlight monitoring and error handler
95 # Appenlight monitoring and error handler
96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
97
97
98 # we want our low level middleware to get to the request ASAP. We don't
98 # we want our low level middleware to get to the request ASAP. We don't
99 # need any pylons stack middleware in them
99 # need any pylons stack middleware in them
100 app = VCSMiddleware(app, config, appenlight_client)
100 app = VCSMiddleware(app, config, appenlight_client)
101
101
102 # Establish the Registry for this application
102 # Establish the Registry for this application
103 app = RegistryManager(app)
103 app = RegistryManager(app)
104
104
105 app.config = config
105 app.config = config
106
106
107 return app
107 return app
108
108
109
109
110 def make_pyramid_app(global_config, **settings):
110 def make_pyramid_app(global_config, **settings):
111 """
111 """
112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
113 application.
113 application.
114
114
115 Specials:
115 Specials:
116
116
117 * We migrate from Pylons to Pyramid. While doing this, we keep both
117 * We migrate from Pylons to Pyramid. While doing this, we keep both
118 frameworks functional. This involves moving some WSGI middlewares around
118 frameworks functional. This involves moving some WSGI middlewares around
119 and providing access to some data internals, so that the old code is
119 and providing access to some data internals, so that the old code is
120 still functional.
120 still functional.
121
121
122 * The application can also be integrated like a plugin via the call to
122 * The application can also be integrated like a plugin via the call to
123 `includeme`. This is accompanied with the other utility functions which
123 `includeme`. This is accompanied with the other utility functions which
124 are called. Changing this should be done with great care to not break
124 are called. Changing this should be done with great care to not break
125 cases when these fragments are assembled from another place.
125 cases when these fragments are assembled from another place.
126
126
127 """
127 """
128 # The edition string should be available in pylons too, so we add it here
128 # The edition string should be available in pylons too, so we add it here
129 # before copying the settings.
129 # before copying the settings.
130 settings.setdefault('rhodecode.edition', 'Community Edition')
130 settings.setdefault('rhodecode.edition', 'Community Edition')
131
131
132 # As long as our Pylons application does expect "unprepared" settings, make
132 # As long as our Pylons application does expect "unprepared" settings, make
133 # sure that we keep an unmodified copy. This avoids unintentional change of
133 # sure that we keep an unmodified copy. This avoids unintentional change of
134 # behavior in the old application.
134 # behavior in the old application.
135 settings_pylons = settings.copy()
135 settings_pylons = settings.copy()
136
136
137 sanitize_settings_and_apply_defaults(settings)
137 sanitize_settings_and_apply_defaults(settings)
138 config = Configurator(settings=settings)
138 config = Configurator(settings=settings)
139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
140
140
141 load_pyramid_environment(global_config, settings)
141 load_pyramid_environment(global_config, settings)
142
142
143 includeme(config)
143 includeme(config)
144 includeme_last(config)
144 includeme_last(config)
145 pyramid_app = config.make_wsgi_app()
145 pyramid_app = config.make_wsgi_app()
146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
147 return pyramid_app
147 return pyramid_app
148
148
149
149
150 def add_pylons_compat_data(registry, global_config, settings):
150 def add_pylons_compat_data(registry, global_config, settings):
151 """
151 """
152 Attach data to the registry to support the Pylons integration.
152 Attach data to the registry to support the Pylons integration.
153 """
153 """
154 registry._pylons_compat_global_config = global_config
154 registry._pylons_compat_global_config = global_config
155 registry._pylons_compat_settings = settings
155 registry._pylons_compat_settings = settings
156
156
157
157
158 def webob_to_pyramid_http_response(webob_response):
158 def webob_to_pyramid_http_response(webob_response):
159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
160 pyramid_response = ResponseClass(webob_response.status)
160 pyramid_response = ResponseClass(webob_response.status)
161 pyramid_response.status = webob_response.status
161 pyramid_response.status = webob_response.status
162 pyramid_response.headers.update(webob_response.headers)
162 pyramid_response.headers.update(webob_response.headers)
163 if pyramid_response.headers['content-type'] == 'text/html':
163 if pyramid_response.headers['content-type'] == 'text/html':
164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
165 return pyramid_response
165 return pyramid_response
166
166
167
167
168 def error_handler(exception, request):
168 def error_handler(exception, request):
169 # TODO: dan: replace the old pylons error controller with this
169 # TODO: dan: replace the old pylons error controller with this
170 from rhodecode.model.settings import SettingsModel
170 from rhodecode.model.settings import SettingsModel
171 from rhodecode.lib.utils2 import AttributeDict
171 from rhodecode.lib.utils2 import AttributeDict
172
172
173 try:
173 try:
174 rc_config = SettingsModel().get_all_settings()
174 rc_config = SettingsModel().get_all_settings()
175 except Exception:
175 except Exception:
176 log.exception('failed to fetch settings')
176 log.exception('failed to fetch settings')
177 rc_config = {}
177 rc_config = {}
178
178
179 base_response = HTTPInternalServerError()
179 base_response = HTTPInternalServerError()
180 # prefer original exception for the response since it may have headers set
180 # prefer original exception for the response since it may have headers set
181 if isinstance(exception, HTTPError):
181 if isinstance(exception, HTTPError):
182 base_response = exception
182 base_response = exception
183
183
184 c = AttributeDict()
184 c = AttributeDict()
185 c.error_message = base_response.status
185 c.error_message = base_response.status
186 c.error_explanation = base_response.explanation or str(base_response)
186 c.error_explanation = base_response.explanation or str(base_response)
187 c.visual = AttributeDict()
187 c.visual = AttributeDict()
188
188
189 c.visual.rhodecode_support_url = (
189 c.visual.rhodecode_support_url = (
190 request.registry.settings.get('rhodecode_support_url') or
190 request.registry.settings.get('rhodecode_support_url') or
191 request.route_url('rhodecode_support')
191 request.route_url('rhodecode_support')
192 )
192 )
193 c.redirect_time = 0
193 c.redirect_time = 0
194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
195 if not c.rhodecode_name:
195 if not c.rhodecode_name:
196 c.rhodecode_name = 'Rhodecode'
196 c.rhodecode_name = 'Rhodecode'
197
197
198 response = render_to_response(
198 response = render_to_response(
199 '/errors/error_document.html', {'c': c}, request=request,
199 '/errors/error_document.html', {'c': c}, request=request,
200 response=base_response)
200 response=base_response)
201
201
202 return response
202 return response
203
203
204
204
205 def includeme(config):
205 def includeme(config):
206 settings = config.registry.settings
206 settings = config.registry.settings
207
207
208 if asbool(settings.get('appenlight', 'false')):
208 if asbool(settings.get('appenlight', 'false')):
209 config.include('appenlight_client.ext.pyramid_tween')
209 config.include('appenlight_client.ext.pyramid_tween')
210
210
211 # Includes which are required. The application would fail without them.
211 # Includes which are required. The application would fail without them.
212 config.include('pyramid_mako')
212 config.include('pyramid_mako')
213 config.include('pyramid_beaker')
213 config.include('pyramid_beaker')
214 config.include('rhodecode.admin')
214 config.include('rhodecode.admin')
215 config.include('rhodecode.authentication')
215 config.include('rhodecode.authentication')
216 config.include('rhodecode.integrations')
216 config.include('rhodecode.login')
217 config.include('rhodecode.login')
217 config.include('rhodecode.tweens')
218 config.include('rhodecode.tweens')
218 config.include('rhodecode.api')
219 config.include('rhodecode.api')
219 config.add_route(
220 config.add_route(
220 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221
222
222 # Set the authorization policy.
223 # Set the authorization policy.
223 authz_policy = ACLAuthorizationPolicy()
224 authz_policy = ACLAuthorizationPolicy()
224 config.set_authorization_policy(authz_policy)
225 config.set_authorization_policy(authz_policy)
225
226
226 # Set the default renderer for HTML templates to mako.
227 # Set the default renderer for HTML templates to mako.
227 config.add_mako_renderer('.html')
228 config.add_mako_renderer('.html')
228
229
229 # plugin information
230 # plugin information
230 config.registry.rhodecode_plugins = {}
231 config.registry.rhodecode_plugins = {}
231
232
232 config.add_directive(
233 config.add_directive(
233 'register_rhodecode_plugin', register_rhodecode_plugin)
234 'register_rhodecode_plugin', register_rhodecode_plugin)
234 # include RhodeCode plugins
235 # include RhodeCode plugins
235 includes = aslist(settings.get('rhodecode.includes', []))
236 includes = aslist(settings.get('rhodecode.includes', []))
236 for inc in includes:
237 for inc in includes:
237 config.include(inc)
238 config.include(inc)
238
239
239 pylons_app = make_app(
240 pylons_app = make_app(
240 config.registry._pylons_compat_global_config,
241 config.registry._pylons_compat_global_config,
241 **config.registry._pylons_compat_settings)
242 **config.registry._pylons_compat_settings)
242 config.registry._pylons_compat_config = pylons_app.config
243 config.registry._pylons_compat_config = pylons_app.config
243
244
244 pylons_app_as_view = wsgiapp(pylons_app)
245 pylons_app_as_view = wsgiapp(pylons_app)
245
246
246 # Protect from VCS Server error related pages when server is not available
247 # Protect from VCS Server error related pages when server is not available
247 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 if not vcs_server_enabled:
249 if not vcs_server_enabled:
249 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250
251
251
252
252 def pylons_app_with_error_handler(context, request):
253 def pylons_app_with_error_handler(context, request):
253 """
254 """
254 Handle exceptions from rc pylons app:
255 Handle exceptions from rc pylons app:
255
256
256 - old webob type exceptions get converted to pyramid exceptions
257 - old webob type exceptions get converted to pyramid exceptions
257 - pyramid exceptions are passed to the error handler view
258 - pyramid exceptions are passed to the error handler view
258 """
259 """
259 try:
260 try:
260 response = pylons_app_as_view(context, request)
261 response = pylons_app_as_view(context, request)
261 if 400 <= response.status_int <= 599: # webob type error responses
262 if 400 <= response.status_int <= 599: # webob type error responses
262 return error_handler(
263 return error_handler(
263 webob_to_pyramid_http_response(response), request)
264 webob_to_pyramid_http_response(response), request)
264 except HTTPError as e: # pyramid type exceptions
265 except HTTPError as e: # pyramid type exceptions
265 return error_handler(e, request)
266 return error_handler(e, request)
266 except Exception:
267 except Exception:
267 if settings.get('debugtoolbar.enabled', False):
268 if settings.get('debugtoolbar.enabled', False):
268 raise
269 raise
269 return error_handler(HTTPInternalServerError(), request)
270 return error_handler(HTTPInternalServerError(), request)
270 return response
271 return response
271
272
272 # This is the glue which allows us to migrate in chunks. By registering the
273 # This is the glue which allows us to migrate in chunks. By registering the
273 # pylons based application as the "Not Found" view in Pyramid, we will
274 # pylons based application as the "Not Found" view in Pyramid, we will
274 # fallback to the old application each time the new one does not yet know
275 # fallback to the old application each time the new one does not yet know
275 # how to handle a request.
276 # how to handle a request.
276 config.add_notfound_view(pylons_app_with_error_handler)
277 config.add_notfound_view(pylons_app_with_error_handler)
277
278
278 if settings.get('debugtoolbar.enabled', False):
279 if settings.get('debugtoolbar.enabled', False):
279 # if toolbar, then only http type exceptions get caught and rendered
280 # if toolbar, then only http type exceptions get caught and rendered
280 ExcClass = HTTPError
281 ExcClass = HTTPError
281 else:
282 else:
282 # if no toolbar, then any exception gets caught and rendered
283 # if no toolbar, then any exception gets caught and rendered
283 ExcClass = Exception
284 ExcClass = Exception
284 config.add_view(error_handler, context=ExcClass)
285 config.add_view(error_handler, context=ExcClass)
285
286
286
287
287 def includeme_last(config):
288 def includeme_last(config):
288 """
289 """
289 The static file catchall needs to be last in the view configuration.
290 The static file catchall needs to be last in the view configuration.
290 """
291 """
291 settings = config.registry.settings
292 settings = config.registry.settings
292
293
293 # Note: johbo: I would prefer to register a prefix for static files at some
294 # Note: johbo: I would prefer to register a prefix for static files at some
294 # point, e.g. move them under '_static/'. This would fully avoid that we
295 # point, e.g. move them under '_static/'. This would fully avoid that we
295 # can have name clashes with a repository name. Imaging someone calling his
296 # can have name clashes with a repository name. Imaging someone calling his
296 # repo "css" ;-) Also having an external web server to serve out the static
297 # repo "css" ;-) Also having an external web server to serve out the static
297 # files seems to be easier to set up if they have a common prefix.
298 # files seems to be easier to set up if they have a common prefix.
298 #
299 #
299 # Example: config.add_static_view('_static', path='rhodecode:public')
300 # Example: config.add_static_view('_static', path='rhodecode:public')
300 #
301 #
301 # It might be an option to register both paths for a while and then migrate
302 # It might be an option to register both paths for a while and then migrate
302 # over to the new location.
303 # over to the new location.
303
304
304 # Serving static files with a catchall.
305 # Serving static files with a catchall.
305 if settings['static_files']:
306 if settings['static_files']:
306 config.add_route('catchall_static', '/*subpath')
307 config.add_route('catchall_static', '/*subpath')
307 config.add_view(
308 config.add_view(
308 static_view('rhodecode:public'), route_name='catchall_static')
309 static_view('rhodecode:public'), route_name='catchall_static')
309
310
310
311
311 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 """
313 """
313 Apply outer WSGI middlewares around the application.
314 Apply outer WSGI middlewares around the application.
314
315
315 Part of this has been moved up from the Pylons layer, so that the
316 Part of this has been moved up from the Pylons layer, so that the
316 data is also available if old Pylons code is hit through an already ported
317 data is also available if old Pylons code is hit through an already ported
317 view.
318 view.
318 """
319 """
319 settings = config.registry.settings
320 settings = config.registry.settings
320
321
321 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 pyramid_app = HttpsFixup(pyramid_app, settings)
323 pyramid_app = HttpsFixup(pyramid_app, settings)
323
324
324 # Add RoutesMiddleware to support the pylons compatibility tween during
325 # Add RoutesMiddleware to support the pylons compatibility tween during
325
326
326 # migration to pyramid.
327 # migration to pyramid.
327 pyramid_app = RoutesMiddleware(
328 pyramid_app = RoutesMiddleware(
328 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329
330
330 if asbool(settings.get('appenlight', 'false')):
331 if asbool(settings.get('appenlight', 'false')):
331 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 pyramid_app, config.registry._pylons_compat_config)
333 pyramid_app, config.registry._pylons_compat_config)
333
334
334 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 # serving static files, might be something that should have its own setting
336 # serving static files, might be something that should have its own setting
336 # as well?
337 # as well?
337 if settings['static_files']:
338 if settings['static_files']:
338 pyramid_app = make_gzip_middleware(
339 pyramid_app = make_gzip_middleware(
339 pyramid_app, settings, compress_level=1)
340 pyramid_app, settings, compress_level=1)
340
341
341 return pyramid_app
342 return pyramid_app
342
343
343
344
344 def sanitize_settings_and_apply_defaults(settings):
345 def sanitize_settings_and_apply_defaults(settings):
345 """
346 """
346 Applies settings defaults and does all type conversion.
347 Applies settings defaults and does all type conversion.
347
348
348 We would move all settings parsing and preparation into this place, so that
349 We would move all settings parsing and preparation into this place, so that
349 we have only one place left which deals with this part. The remaining parts
350 we have only one place left which deals with this part. The remaining parts
350 of the application would start to rely fully on well prepared settings.
351 of the application would start to rely fully on well prepared settings.
351
352
352 This piece would later be split up per topic to avoid a big fat monster
353 This piece would later be split up per topic to avoid a big fat monster
353 function.
354 function.
354 """
355 """
355
356
356 # Pyramid's mako renderer has to search in the templates folder so that the
357 # Pyramid's mako renderer has to search in the templates folder so that the
357 # old templates still work. Ported and new templates are expected to use
358 # old templates still work. Ported and new templates are expected to use
358 # real asset specifications for the includes.
359 # real asset specifications for the includes.
359 mako_directories = settings.setdefault('mako.directories', [
360 mako_directories = settings.setdefault('mako.directories', [
360 # Base templates of the original Pylons application
361 # Base templates of the original Pylons application
361 'rhodecode:templates',
362 'rhodecode:templates',
362 ])
363 ])
363 log.debug(
364 log.debug(
364 "Using the following Mako template directories: %s",
365 "Using the following Mako template directories: %s",
365 mako_directories)
366 mako_directories)
366
367
367 # Default includes, possible to change as a user
368 # Default includes, possible to change as a user
368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 pyramid_includes = settings.setdefault('pyramid.includes', [
369 'rhodecode.lib.middleware.request_wrapper',
370 'rhodecode.lib.middleware.request_wrapper',
370 ])
371 ])
371 log.debug(
372 log.debug(
372 "Using the following pyramid.includes: %s",
373 "Using the following pyramid.includes: %s",
373 pyramid_includes)
374 pyramid_includes)
374
375
375 # TODO: johbo: Re-think this, usually the call to config.include
376 # TODO: johbo: Re-think this, usually the call to config.include
376 # should allow to pass in a prefix.
377 # should allow to pass in a prefix.
377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 settings.setdefault('rhodecode.api.url', '/_admin/api')
378
379
379 _bool_setting(settings, 'vcs.server.enable', 'true')
380 _bool_setting(settings, 'vcs.server.enable', 'true')
380 _bool_setting(settings, 'static_files', 'true')
381 _bool_setting(settings, 'static_files', 'true')
381 _bool_setting(settings, 'is_test', 'false')
382 _bool_setting(settings, 'is_test', 'false')
382
383
383 return settings
384 return settings
384
385
385
386
386 def _bool_setting(settings, name, default):
387 def _bool_setting(settings, name, default):
387 settings[name] = asbool(settings.get(name, default))
388 settings[name] = asbool(settings.get(name, default))
@@ -1,1141 +1,1154 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Routes configuration
22 Routes configuration
23
23
24 The more specific and detailed routes should be defined first so they
24 The more specific and detailed routes should be defined first so they
25 may take precedent over the more generic routes. For more information
25 may take precedent over the more generic routes. For more information
26 refer to the routes manual at http://routes.groovie.org/docs/
26 refer to the routes manual at http://routes.groovie.org/docs/
27
27
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 and _route_name variable which uses some of stored naming here to do redirects.
29 and _route_name variable which uses some of stored naming here to do redirects.
30 """
30 """
31 import os
31 import os
32 import re
32 import re
33 from routes import Mapper
33 from routes import Mapper
34
34
35 from rhodecode.config import routing_links
35 from rhodecode.config import routing_links
36
36
37 # prefix for non repository related links needs to be prefixed with `/`
37 # prefix for non repository related links needs to be prefixed with `/`
38 ADMIN_PREFIX = '/_admin'
38 ADMIN_PREFIX = '/_admin'
39
39
40 # Default requirements for URL parts
40 # Default requirements for URL parts
41 URL_NAME_REQUIREMENTS = {
41 URL_NAME_REQUIREMENTS = {
42 # group name can have a slash in them, but they must not end with a slash
42 # group name can have a slash in them, but they must not end with a slash
43 'group_name': r'.*?[^/]',
43 'group_name': r'.*?[^/]',
44 # repo names can have a slash in them, but they must not end with a slash
44 # repo names can have a slash in them, but they must not end with a slash
45 'repo_name': r'.*?[^/]',
45 'repo_name': r'.*?[^/]',
46 # file path eats up everything at the end
46 # file path eats up everything at the end
47 'f_path': r'.*',
47 'f_path': r'.*',
48 # reference types
48 # reference types
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
51 }
51 }
52
52
53
53
54 def add_route_requirements(route_path, requirements):
55 """
56 Adds regex requirements to pyramid routes using a mapping dict
57
58 >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'})
59 '/{action}/{id:\d+}'
60
61 """
62 for key, regex in requirements.items():
63 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
64 return route_path
65
66
54 class JSRoutesMapper(Mapper):
67 class JSRoutesMapper(Mapper):
55 """
68 """
56 Wrapper for routes.Mapper to make pyroutes compatible url definitions
69 Wrapper for routes.Mapper to make pyroutes compatible url definitions
57 """
70 """
58 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
71 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
59 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
72 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
60 def __init__(self, *args, **kw):
73 def __init__(self, *args, **kw):
61 super(JSRoutesMapper, self).__init__(*args, **kw)
74 super(JSRoutesMapper, self).__init__(*args, **kw)
62 self._jsroutes = []
75 self._jsroutes = []
63
76
64 def connect(self, *args, **kw):
77 def connect(self, *args, **kw):
65 """
78 """
66 Wrapper for connect to take an extra argument jsroute=True
79 Wrapper for connect to take an extra argument jsroute=True
67
80
68 :param jsroute: boolean, if True will add the route to the pyroutes list
81 :param jsroute: boolean, if True will add the route to the pyroutes list
69 """
82 """
70 if kw.pop('jsroute', False):
83 if kw.pop('jsroute', False):
71 if not self._named_route_regex.match(args[0]):
84 if not self._named_route_regex.match(args[0]):
72 raise Exception('only named routes can be added to pyroutes')
85 raise Exception('only named routes can be added to pyroutes')
73 self._jsroutes.append(args[0])
86 self._jsroutes.append(args[0])
74
87
75 super(JSRoutesMapper, self).connect(*args, **kw)
88 super(JSRoutesMapper, self).connect(*args, **kw)
76
89
77 def _extract_route_information(self, route):
90 def _extract_route_information(self, route):
78 """
91 """
79 Convert a route into tuple(name, path, args), eg:
92 Convert a route into tuple(name, path, args), eg:
80 ('user_profile', '/profile/%(username)s', ['username'])
93 ('user_profile', '/profile/%(username)s', ['username'])
81 """
94 """
82 routepath = route.routepath
95 routepath = route.routepath
83 def replace(matchobj):
96 def replace(matchobj):
84 if matchobj.group(1):
97 if matchobj.group(1):
85 return "%%(%s)s" % matchobj.group(1).split(':')[0]
98 return "%%(%s)s" % matchobj.group(1).split(':')[0]
86 else:
99 else:
87 return "%%(%s)s" % matchobj.group(2)
100 return "%%(%s)s" % matchobj.group(2)
88
101
89 routepath = self._argument_prog.sub(replace, routepath)
102 routepath = self._argument_prog.sub(replace, routepath)
90 return (
103 return (
91 route.name,
104 route.name,
92 routepath,
105 routepath,
93 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
106 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
94 for arg in self._argument_prog.findall(route.routepath)]
107 for arg in self._argument_prog.findall(route.routepath)]
95 )
108 )
96
109
97 def jsroutes(self):
110 def jsroutes(self):
98 """
111 """
99 Return a list of pyroutes.js compatible routes
112 Return a list of pyroutes.js compatible routes
100 """
113 """
101 for route_name in self._jsroutes:
114 for route_name in self._jsroutes:
102 yield self._extract_route_information(self._routenames[route_name])
115 yield self._extract_route_information(self._routenames[route_name])
103
116
104
117
105 def make_map(config):
118 def make_map(config):
106 """Create, configure and return the routes Mapper"""
119 """Create, configure and return the routes Mapper"""
107 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
120 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
108 always_scan=config['debug'])
121 always_scan=config['debug'])
109 rmap.minimization = False
122 rmap.minimization = False
110 rmap.explicit = False
123 rmap.explicit = False
111
124
112 from rhodecode.lib.utils2 import str2bool
125 from rhodecode.lib.utils2 import str2bool
113 from rhodecode.model import repo, repo_group
126 from rhodecode.model import repo, repo_group
114
127
115 def check_repo(environ, match_dict):
128 def check_repo(environ, match_dict):
116 """
129 """
117 check for valid repository for proper 404 handling
130 check for valid repository for proper 404 handling
118
131
119 :param environ:
132 :param environ:
120 :param match_dict:
133 :param match_dict:
121 """
134 """
122 repo_name = match_dict.get('repo_name')
135 repo_name = match_dict.get('repo_name')
123
136
124 if match_dict.get('f_path'):
137 if match_dict.get('f_path'):
125 # fix for multiple initial slashes that causes errors
138 # fix for multiple initial slashes that causes errors
126 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
139 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
127 repo_model = repo.RepoModel()
140 repo_model = repo.RepoModel()
128 by_name_match = repo_model.get_by_repo_name(repo_name)
141 by_name_match = repo_model.get_by_repo_name(repo_name)
129 # if we match quickly from database, short circuit the operation,
142 # if we match quickly from database, short circuit the operation,
130 # and validate repo based on the type.
143 # and validate repo based on the type.
131 if by_name_match:
144 if by_name_match:
132 return True
145 return True
133
146
134 by_id_match = repo_model.get_repo_by_id(repo_name)
147 by_id_match = repo_model.get_repo_by_id(repo_name)
135 if by_id_match:
148 if by_id_match:
136 repo_name = by_id_match.repo_name
149 repo_name = by_id_match.repo_name
137 match_dict['repo_name'] = repo_name
150 match_dict['repo_name'] = repo_name
138 return True
151 return True
139
152
140 return False
153 return False
141
154
142 def check_group(environ, match_dict):
155 def check_group(environ, match_dict):
143 """
156 """
144 check for valid repository group path for proper 404 handling
157 check for valid repository group path for proper 404 handling
145
158
146 :param environ:
159 :param environ:
147 :param match_dict:
160 :param match_dict:
148 """
161 """
149 repo_group_name = match_dict.get('group_name')
162 repo_group_name = match_dict.get('group_name')
150 repo_group_model = repo_group.RepoGroupModel()
163 repo_group_model = repo_group.RepoGroupModel()
151 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
164 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
152 if by_name_match:
165 if by_name_match:
153 return True
166 return True
154
167
155 return False
168 return False
156
169
157 def check_user_group(environ, match_dict):
170 def check_user_group(environ, match_dict):
158 """
171 """
159 check for valid user group for proper 404 handling
172 check for valid user group for proper 404 handling
160
173
161 :param environ:
174 :param environ:
162 :param match_dict:
175 :param match_dict:
163 """
176 """
164 return True
177 return True
165
178
166 def check_int(environ, match_dict):
179 def check_int(environ, match_dict):
167 return match_dict.get('id').isdigit()
180 return match_dict.get('id').isdigit()
168
181
169
182
170 #==========================================================================
183 #==========================================================================
171 # CUSTOM ROUTES HERE
184 # CUSTOM ROUTES HERE
172 #==========================================================================
185 #==========================================================================
173
186
174 # MAIN PAGE
187 # MAIN PAGE
175 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
188 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
176 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
189 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
177 action='goto_switcher_data')
190 action='goto_switcher_data')
178 rmap.connect('repo_list_data', '/_repos', controller='home',
191 rmap.connect('repo_list_data', '/_repos', controller='home',
179 action='repo_list_data')
192 action='repo_list_data')
180
193
181 rmap.connect('user_autocomplete_data', '/_users', controller='home',
194 rmap.connect('user_autocomplete_data', '/_users', controller='home',
182 action='user_autocomplete_data', jsroute=True)
195 action='user_autocomplete_data', jsroute=True)
183 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
196 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
184 action='user_group_autocomplete_data')
197 action='user_group_autocomplete_data')
185
198
186 rmap.connect(
199 rmap.connect(
187 'user_profile', '/_profiles/{username}', controller='users',
200 'user_profile', '/_profiles/{username}', controller='users',
188 action='user_profile')
201 action='user_profile')
189
202
190 # TODO: johbo: Static links, to be replaced by our redirection mechanism
203 # TODO: johbo: Static links, to be replaced by our redirection mechanism
191 rmap.connect('rst_help',
204 rmap.connect('rst_help',
192 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
205 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
193 _static=True)
206 _static=True)
194 rmap.connect('markdown_help',
207 rmap.connect('markdown_help',
195 'http://daringfireball.net/projects/markdown/syntax',
208 'http://daringfireball.net/projects/markdown/syntax',
196 _static=True)
209 _static=True)
197 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
210 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
198 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
211 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
199 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
212 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
200 # TODO: anderson - making this a static link since redirect won't play
213 # TODO: anderson - making this a static link since redirect won't play
201 # nice with POST requests
214 # nice with POST requests
202 rmap.connect('enterprise_license_convert_from_old',
215 rmap.connect('enterprise_license_convert_from_old',
203 'https://rhodecode.com/u/license-upgrade',
216 'https://rhodecode.com/u/license-upgrade',
204 _static=True)
217 _static=True)
205
218
206 routing_links.connect_redirection_links(rmap)
219 routing_links.connect_redirection_links(rmap)
207
220
208 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
221 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
209 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
222 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
210
223
211 # ADMIN REPOSITORY ROUTES
224 # ADMIN REPOSITORY ROUTES
212 with rmap.submapper(path_prefix=ADMIN_PREFIX,
225 with rmap.submapper(path_prefix=ADMIN_PREFIX,
213 controller='admin/repos') as m:
226 controller='admin/repos') as m:
214 m.connect('repos', '/repos',
227 m.connect('repos', '/repos',
215 action='create', conditions={'method': ['POST']})
228 action='create', conditions={'method': ['POST']})
216 m.connect('repos', '/repos',
229 m.connect('repos', '/repos',
217 action='index', conditions={'method': ['GET']})
230 action='index', conditions={'method': ['GET']})
218 m.connect('new_repo', '/create_repository', jsroute=True,
231 m.connect('new_repo', '/create_repository', jsroute=True,
219 action='create_repository', conditions={'method': ['GET']})
232 action='create_repository', conditions={'method': ['GET']})
220 m.connect('/repos/{repo_name}',
233 m.connect('/repos/{repo_name}',
221 action='update', conditions={'method': ['PUT'],
234 action='update', conditions={'method': ['PUT'],
222 'function': check_repo},
235 'function': check_repo},
223 requirements=URL_NAME_REQUIREMENTS)
236 requirements=URL_NAME_REQUIREMENTS)
224 m.connect('delete_repo', '/repos/{repo_name}',
237 m.connect('delete_repo', '/repos/{repo_name}',
225 action='delete', conditions={'method': ['DELETE']},
238 action='delete', conditions={'method': ['DELETE']},
226 requirements=URL_NAME_REQUIREMENTS)
239 requirements=URL_NAME_REQUIREMENTS)
227 m.connect('repo', '/repos/{repo_name}',
240 m.connect('repo', '/repos/{repo_name}',
228 action='show', conditions={'method': ['GET'],
241 action='show', conditions={'method': ['GET'],
229 'function': check_repo},
242 'function': check_repo},
230 requirements=URL_NAME_REQUIREMENTS)
243 requirements=URL_NAME_REQUIREMENTS)
231
244
232 # ADMIN REPOSITORY GROUPS ROUTES
245 # ADMIN REPOSITORY GROUPS ROUTES
233 with rmap.submapper(path_prefix=ADMIN_PREFIX,
246 with rmap.submapper(path_prefix=ADMIN_PREFIX,
234 controller='admin/repo_groups') as m:
247 controller='admin/repo_groups') as m:
235 m.connect('repo_groups', '/repo_groups',
248 m.connect('repo_groups', '/repo_groups',
236 action='create', conditions={'method': ['POST']})
249 action='create', conditions={'method': ['POST']})
237 m.connect('repo_groups', '/repo_groups',
250 m.connect('repo_groups', '/repo_groups',
238 action='index', conditions={'method': ['GET']})
251 action='index', conditions={'method': ['GET']})
239 m.connect('new_repo_group', '/repo_groups/new',
252 m.connect('new_repo_group', '/repo_groups/new',
240 action='new', conditions={'method': ['GET']})
253 action='new', conditions={'method': ['GET']})
241 m.connect('update_repo_group', '/repo_groups/{group_name}',
254 m.connect('update_repo_group', '/repo_groups/{group_name}',
242 action='update', conditions={'method': ['PUT'],
255 action='update', conditions={'method': ['PUT'],
243 'function': check_group},
256 'function': check_group},
244 requirements=URL_NAME_REQUIREMENTS)
257 requirements=URL_NAME_REQUIREMENTS)
245
258
246 # EXTRAS REPO GROUP ROUTES
259 # EXTRAS REPO GROUP ROUTES
247 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
260 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
248 action='edit',
261 action='edit',
249 conditions={'method': ['GET'], 'function': check_group},
262 conditions={'method': ['GET'], 'function': check_group},
250 requirements=URL_NAME_REQUIREMENTS)
263 requirements=URL_NAME_REQUIREMENTS)
251 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
264 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
252 action='edit',
265 action='edit',
253 conditions={'method': ['PUT'], 'function': check_group},
266 conditions={'method': ['PUT'], 'function': check_group},
254 requirements=URL_NAME_REQUIREMENTS)
267 requirements=URL_NAME_REQUIREMENTS)
255
268
256 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
269 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
257 action='edit_repo_group_advanced',
270 action='edit_repo_group_advanced',
258 conditions={'method': ['GET'], 'function': check_group},
271 conditions={'method': ['GET'], 'function': check_group},
259 requirements=URL_NAME_REQUIREMENTS)
272 requirements=URL_NAME_REQUIREMENTS)
260 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
273 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
261 action='edit_repo_group_advanced',
274 action='edit_repo_group_advanced',
262 conditions={'method': ['PUT'], 'function': check_group},
275 conditions={'method': ['PUT'], 'function': check_group},
263 requirements=URL_NAME_REQUIREMENTS)
276 requirements=URL_NAME_REQUIREMENTS)
264
277
265 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
278 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
266 action='edit_repo_group_perms',
279 action='edit_repo_group_perms',
267 conditions={'method': ['GET'], 'function': check_group},
280 conditions={'method': ['GET'], 'function': check_group},
268 requirements=URL_NAME_REQUIREMENTS)
281 requirements=URL_NAME_REQUIREMENTS)
269 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
282 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
270 action='update_perms',
283 action='update_perms',
271 conditions={'method': ['PUT'], 'function': check_group},
284 conditions={'method': ['PUT'], 'function': check_group},
272 requirements=URL_NAME_REQUIREMENTS)
285 requirements=URL_NAME_REQUIREMENTS)
273
286
274 m.connect('delete_repo_group', '/repo_groups/{group_name}',
287 m.connect('delete_repo_group', '/repo_groups/{group_name}',
275 action='delete', conditions={'method': ['DELETE'],
288 action='delete', conditions={'method': ['DELETE'],
276 'function': check_group},
289 'function': check_group},
277 requirements=URL_NAME_REQUIREMENTS)
290 requirements=URL_NAME_REQUIREMENTS)
278
291
279 # ADMIN USER ROUTES
292 # ADMIN USER ROUTES
280 with rmap.submapper(path_prefix=ADMIN_PREFIX,
293 with rmap.submapper(path_prefix=ADMIN_PREFIX,
281 controller='admin/users') as m:
294 controller='admin/users') as m:
282 m.connect('users', '/users',
295 m.connect('users', '/users',
283 action='create', conditions={'method': ['POST']})
296 action='create', conditions={'method': ['POST']})
284 m.connect('users', '/users',
297 m.connect('users', '/users',
285 action='index', conditions={'method': ['GET']})
298 action='index', conditions={'method': ['GET']})
286 m.connect('new_user', '/users/new',
299 m.connect('new_user', '/users/new',
287 action='new', conditions={'method': ['GET']})
300 action='new', conditions={'method': ['GET']})
288 m.connect('update_user', '/users/{user_id}',
301 m.connect('update_user', '/users/{user_id}',
289 action='update', conditions={'method': ['PUT']})
302 action='update', conditions={'method': ['PUT']})
290 m.connect('delete_user', '/users/{user_id}',
303 m.connect('delete_user', '/users/{user_id}',
291 action='delete', conditions={'method': ['DELETE']})
304 action='delete', conditions={'method': ['DELETE']})
292 m.connect('edit_user', '/users/{user_id}/edit',
305 m.connect('edit_user', '/users/{user_id}/edit',
293 action='edit', conditions={'method': ['GET']})
306 action='edit', conditions={'method': ['GET']})
294 m.connect('user', '/users/{user_id}',
307 m.connect('user', '/users/{user_id}',
295 action='show', conditions={'method': ['GET']})
308 action='show', conditions={'method': ['GET']})
296 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
309 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
297 action='reset_password', conditions={'method': ['POST']})
310 action='reset_password', conditions={'method': ['POST']})
298 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
311 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
299 action='create_personal_repo_group', conditions={'method': ['POST']})
312 action='create_personal_repo_group', conditions={'method': ['POST']})
300
313
301 # EXTRAS USER ROUTES
314 # EXTRAS USER ROUTES
302 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
315 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
303 action='edit_advanced', conditions={'method': ['GET']})
316 action='edit_advanced', conditions={'method': ['GET']})
304 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
317 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
305 action='update_advanced', conditions={'method': ['PUT']})
318 action='update_advanced', conditions={'method': ['PUT']})
306
319
307 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
320 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
308 action='edit_auth_tokens', conditions={'method': ['GET']})
321 action='edit_auth_tokens', conditions={'method': ['GET']})
309 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
322 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
310 action='add_auth_token', conditions={'method': ['PUT']})
323 action='add_auth_token', conditions={'method': ['PUT']})
311 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
324 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
312 action='delete_auth_token', conditions={'method': ['DELETE']})
325 action='delete_auth_token', conditions={'method': ['DELETE']})
313
326
314 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
327 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
315 action='edit_global_perms', conditions={'method': ['GET']})
328 action='edit_global_perms', conditions={'method': ['GET']})
316 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
329 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
317 action='update_global_perms', conditions={'method': ['PUT']})
330 action='update_global_perms', conditions={'method': ['PUT']})
318
331
319 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
332 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
320 action='edit_perms_summary', conditions={'method': ['GET']})
333 action='edit_perms_summary', conditions={'method': ['GET']})
321
334
322 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
335 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
323 action='edit_emails', conditions={'method': ['GET']})
336 action='edit_emails', conditions={'method': ['GET']})
324 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
337 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
325 action='add_email', conditions={'method': ['PUT']})
338 action='add_email', conditions={'method': ['PUT']})
326 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
339 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
327 action='delete_email', conditions={'method': ['DELETE']})
340 action='delete_email', conditions={'method': ['DELETE']})
328
341
329 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
342 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
330 action='edit_ips', conditions={'method': ['GET']})
343 action='edit_ips', conditions={'method': ['GET']})
331 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
344 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
332 action='add_ip', conditions={'method': ['PUT']})
345 action='add_ip', conditions={'method': ['PUT']})
333 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
346 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
334 action='delete_ip', conditions={'method': ['DELETE']})
347 action='delete_ip', conditions={'method': ['DELETE']})
335
348
336 # ADMIN USER GROUPS REST ROUTES
349 # ADMIN USER GROUPS REST ROUTES
337 with rmap.submapper(path_prefix=ADMIN_PREFIX,
350 with rmap.submapper(path_prefix=ADMIN_PREFIX,
338 controller='admin/user_groups') as m:
351 controller='admin/user_groups') as m:
339 m.connect('users_groups', '/user_groups',
352 m.connect('users_groups', '/user_groups',
340 action='create', conditions={'method': ['POST']})
353 action='create', conditions={'method': ['POST']})
341 m.connect('users_groups', '/user_groups',
354 m.connect('users_groups', '/user_groups',
342 action='index', conditions={'method': ['GET']})
355 action='index', conditions={'method': ['GET']})
343 m.connect('new_users_group', '/user_groups/new',
356 m.connect('new_users_group', '/user_groups/new',
344 action='new', conditions={'method': ['GET']})
357 action='new', conditions={'method': ['GET']})
345 m.connect('update_users_group', '/user_groups/{user_group_id}',
358 m.connect('update_users_group', '/user_groups/{user_group_id}',
346 action='update', conditions={'method': ['PUT']})
359 action='update', conditions={'method': ['PUT']})
347 m.connect('delete_users_group', '/user_groups/{user_group_id}',
360 m.connect('delete_users_group', '/user_groups/{user_group_id}',
348 action='delete', conditions={'method': ['DELETE']})
361 action='delete', conditions={'method': ['DELETE']})
349 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
362 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
350 action='edit', conditions={'method': ['GET']},
363 action='edit', conditions={'method': ['GET']},
351 function=check_user_group)
364 function=check_user_group)
352
365
353 # EXTRAS USER GROUP ROUTES
366 # EXTRAS USER GROUP ROUTES
354 m.connect('edit_user_group_global_perms',
367 m.connect('edit_user_group_global_perms',
355 '/user_groups/{user_group_id}/edit/global_permissions',
368 '/user_groups/{user_group_id}/edit/global_permissions',
356 action='edit_global_perms', conditions={'method': ['GET']})
369 action='edit_global_perms', conditions={'method': ['GET']})
357 m.connect('edit_user_group_global_perms',
370 m.connect('edit_user_group_global_perms',
358 '/user_groups/{user_group_id}/edit/global_permissions',
371 '/user_groups/{user_group_id}/edit/global_permissions',
359 action='update_global_perms', conditions={'method': ['PUT']})
372 action='update_global_perms', conditions={'method': ['PUT']})
360 m.connect('edit_user_group_perms_summary',
373 m.connect('edit_user_group_perms_summary',
361 '/user_groups/{user_group_id}/edit/permissions_summary',
374 '/user_groups/{user_group_id}/edit/permissions_summary',
362 action='edit_perms_summary', conditions={'method': ['GET']})
375 action='edit_perms_summary', conditions={'method': ['GET']})
363
376
364 m.connect('edit_user_group_perms',
377 m.connect('edit_user_group_perms',
365 '/user_groups/{user_group_id}/edit/permissions',
378 '/user_groups/{user_group_id}/edit/permissions',
366 action='edit_perms', conditions={'method': ['GET']})
379 action='edit_perms', conditions={'method': ['GET']})
367 m.connect('edit_user_group_perms',
380 m.connect('edit_user_group_perms',
368 '/user_groups/{user_group_id}/edit/permissions',
381 '/user_groups/{user_group_id}/edit/permissions',
369 action='update_perms', conditions={'method': ['PUT']})
382 action='update_perms', conditions={'method': ['PUT']})
370
383
371 m.connect('edit_user_group_advanced',
384 m.connect('edit_user_group_advanced',
372 '/user_groups/{user_group_id}/edit/advanced',
385 '/user_groups/{user_group_id}/edit/advanced',
373 action='edit_advanced', conditions={'method': ['GET']})
386 action='edit_advanced', conditions={'method': ['GET']})
374
387
375 m.connect('edit_user_group_members',
388 m.connect('edit_user_group_members',
376 '/user_groups/{user_group_id}/edit/members', jsroute=True,
389 '/user_groups/{user_group_id}/edit/members', jsroute=True,
377 action='edit_members', conditions={'method': ['GET']})
390 action='edit_members', conditions={'method': ['GET']})
378
391
379 # ADMIN PERMISSIONS ROUTES
392 # ADMIN PERMISSIONS ROUTES
380 with rmap.submapper(path_prefix=ADMIN_PREFIX,
393 with rmap.submapper(path_prefix=ADMIN_PREFIX,
381 controller='admin/permissions') as m:
394 controller='admin/permissions') as m:
382 m.connect('admin_permissions_application', '/permissions/application',
395 m.connect('admin_permissions_application', '/permissions/application',
383 action='permission_application_update', conditions={'method': ['POST']})
396 action='permission_application_update', conditions={'method': ['POST']})
384 m.connect('admin_permissions_application', '/permissions/application',
397 m.connect('admin_permissions_application', '/permissions/application',
385 action='permission_application', conditions={'method': ['GET']})
398 action='permission_application', conditions={'method': ['GET']})
386
399
387 m.connect('admin_permissions_global', '/permissions/global',
400 m.connect('admin_permissions_global', '/permissions/global',
388 action='permission_global_update', conditions={'method': ['POST']})
401 action='permission_global_update', conditions={'method': ['POST']})
389 m.connect('admin_permissions_global', '/permissions/global',
402 m.connect('admin_permissions_global', '/permissions/global',
390 action='permission_global', conditions={'method': ['GET']})
403 action='permission_global', conditions={'method': ['GET']})
391
404
392 m.connect('admin_permissions_object', '/permissions/object',
405 m.connect('admin_permissions_object', '/permissions/object',
393 action='permission_objects_update', conditions={'method': ['POST']})
406 action='permission_objects_update', conditions={'method': ['POST']})
394 m.connect('admin_permissions_object', '/permissions/object',
407 m.connect('admin_permissions_object', '/permissions/object',
395 action='permission_objects', conditions={'method': ['GET']})
408 action='permission_objects', conditions={'method': ['GET']})
396
409
397 m.connect('admin_permissions_ips', '/permissions/ips',
410 m.connect('admin_permissions_ips', '/permissions/ips',
398 action='permission_ips', conditions={'method': ['POST']})
411 action='permission_ips', conditions={'method': ['POST']})
399 m.connect('admin_permissions_ips', '/permissions/ips',
412 m.connect('admin_permissions_ips', '/permissions/ips',
400 action='permission_ips', conditions={'method': ['GET']})
413 action='permission_ips', conditions={'method': ['GET']})
401
414
402 m.connect('admin_permissions_overview', '/permissions/overview',
415 m.connect('admin_permissions_overview', '/permissions/overview',
403 action='permission_perms', conditions={'method': ['GET']})
416 action='permission_perms', conditions={'method': ['GET']})
404
417
405 # ADMIN DEFAULTS REST ROUTES
418 # ADMIN DEFAULTS REST ROUTES
406 with rmap.submapper(path_prefix=ADMIN_PREFIX,
419 with rmap.submapper(path_prefix=ADMIN_PREFIX,
407 controller='admin/defaults') as m:
420 controller='admin/defaults') as m:
408 m.connect('admin_defaults_repositories', '/defaults/repositories',
421 m.connect('admin_defaults_repositories', '/defaults/repositories',
409 action='update_repository_defaults', conditions={'method': ['POST']})
422 action='update_repository_defaults', conditions={'method': ['POST']})
410 m.connect('admin_defaults_repositories', '/defaults/repositories',
423 m.connect('admin_defaults_repositories', '/defaults/repositories',
411 action='index', conditions={'method': ['GET']})
424 action='index', conditions={'method': ['GET']})
412
425
413 # ADMIN DEBUG STYLE ROUTES
426 # ADMIN DEBUG STYLE ROUTES
414 if str2bool(config.get('debug_style')):
427 if str2bool(config.get('debug_style')):
415 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
428 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
416 controller='debug_style') as m:
429 controller='debug_style') as m:
417 m.connect('debug_style_home', '',
430 m.connect('debug_style_home', '',
418 action='index', conditions={'method': ['GET']})
431 action='index', conditions={'method': ['GET']})
419 m.connect('debug_style_template', '/t/{t_path}',
432 m.connect('debug_style_template', '/t/{t_path}',
420 action='template', conditions={'method': ['GET']})
433 action='template', conditions={'method': ['GET']})
421
434
422 # ADMIN SETTINGS ROUTES
435 # ADMIN SETTINGS ROUTES
423 with rmap.submapper(path_prefix=ADMIN_PREFIX,
436 with rmap.submapper(path_prefix=ADMIN_PREFIX,
424 controller='admin/settings') as m:
437 controller='admin/settings') as m:
425
438
426 # default
439 # default
427 m.connect('admin_settings', '/settings',
440 m.connect('admin_settings', '/settings',
428 action='settings_global_update',
441 action='settings_global_update',
429 conditions={'method': ['POST']})
442 conditions={'method': ['POST']})
430 m.connect('admin_settings', '/settings',
443 m.connect('admin_settings', '/settings',
431 action='settings_global', conditions={'method': ['GET']})
444 action='settings_global', conditions={'method': ['GET']})
432
445
433 m.connect('admin_settings_vcs', '/settings/vcs',
446 m.connect('admin_settings_vcs', '/settings/vcs',
434 action='settings_vcs_update',
447 action='settings_vcs_update',
435 conditions={'method': ['POST']})
448 conditions={'method': ['POST']})
436 m.connect('admin_settings_vcs', '/settings/vcs',
449 m.connect('admin_settings_vcs', '/settings/vcs',
437 action='settings_vcs',
450 action='settings_vcs',
438 conditions={'method': ['GET']})
451 conditions={'method': ['GET']})
439 m.connect('admin_settings_vcs', '/settings/vcs',
452 m.connect('admin_settings_vcs', '/settings/vcs',
440 action='delete_svn_pattern',
453 action='delete_svn_pattern',
441 conditions={'method': ['DELETE']})
454 conditions={'method': ['DELETE']})
442
455
443 m.connect('admin_settings_mapping', '/settings/mapping',
456 m.connect('admin_settings_mapping', '/settings/mapping',
444 action='settings_mapping_update',
457 action='settings_mapping_update',
445 conditions={'method': ['POST']})
458 conditions={'method': ['POST']})
446 m.connect('admin_settings_mapping', '/settings/mapping',
459 m.connect('admin_settings_mapping', '/settings/mapping',
447 action='settings_mapping', conditions={'method': ['GET']})
460 action='settings_mapping', conditions={'method': ['GET']})
448
461
449 m.connect('admin_settings_global', '/settings/global',
462 m.connect('admin_settings_global', '/settings/global',
450 action='settings_global_update',
463 action='settings_global_update',
451 conditions={'method': ['POST']})
464 conditions={'method': ['POST']})
452 m.connect('admin_settings_global', '/settings/global',
465 m.connect('admin_settings_global', '/settings/global',
453 action='settings_global', conditions={'method': ['GET']})
466 action='settings_global', conditions={'method': ['GET']})
454
467
455 m.connect('admin_settings_visual', '/settings/visual',
468 m.connect('admin_settings_visual', '/settings/visual',
456 action='settings_visual_update',
469 action='settings_visual_update',
457 conditions={'method': ['POST']})
470 conditions={'method': ['POST']})
458 m.connect('admin_settings_visual', '/settings/visual',
471 m.connect('admin_settings_visual', '/settings/visual',
459 action='settings_visual', conditions={'method': ['GET']})
472 action='settings_visual', conditions={'method': ['GET']})
460
473
461 m.connect('admin_settings_issuetracker',
474 m.connect('admin_settings_issuetracker',
462 '/settings/issue-tracker', action='settings_issuetracker',
475 '/settings/issue-tracker', action='settings_issuetracker',
463 conditions={'method': ['GET']})
476 conditions={'method': ['GET']})
464 m.connect('admin_settings_issuetracker_save',
477 m.connect('admin_settings_issuetracker_save',
465 '/settings/issue-tracker/save',
478 '/settings/issue-tracker/save',
466 action='settings_issuetracker_save',
479 action='settings_issuetracker_save',
467 conditions={'method': ['POST']})
480 conditions={'method': ['POST']})
468 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
481 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
469 action='settings_issuetracker_test',
482 action='settings_issuetracker_test',
470 conditions={'method': ['POST']})
483 conditions={'method': ['POST']})
471 m.connect('admin_issuetracker_delete',
484 m.connect('admin_issuetracker_delete',
472 '/settings/issue-tracker/delete',
485 '/settings/issue-tracker/delete',
473 action='settings_issuetracker_delete',
486 action='settings_issuetracker_delete',
474 conditions={'method': ['DELETE']})
487 conditions={'method': ['DELETE']})
475
488
476 m.connect('admin_settings_email', '/settings/email',
489 m.connect('admin_settings_email', '/settings/email',
477 action='settings_email_update',
490 action='settings_email_update',
478 conditions={'method': ['POST']})
491 conditions={'method': ['POST']})
479 m.connect('admin_settings_email', '/settings/email',
492 m.connect('admin_settings_email', '/settings/email',
480 action='settings_email', conditions={'method': ['GET']})
493 action='settings_email', conditions={'method': ['GET']})
481
494
482 m.connect('admin_settings_hooks', '/settings/hooks',
495 m.connect('admin_settings_hooks', '/settings/hooks',
483 action='settings_hooks_update',
496 action='settings_hooks_update',
484 conditions={'method': ['POST', 'DELETE']})
497 conditions={'method': ['POST', 'DELETE']})
485 m.connect('admin_settings_hooks', '/settings/hooks',
498 m.connect('admin_settings_hooks', '/settings/hooks',
486 action='settings_hooks', conditions={'method': ['GET']})
499 action='settings_hooks', conditions={'method': ['GET']})
487
500
488 m.connect('admin_settings_search', '/settings/search',
501 m.connect('admin_settings_search', '/settings/search',
489 action='settings_search', conditions={'method': ['GET']})
502 action='settings_search', conditions={'method': ['GET']})
490
503
491 m.connect('admin_settings_system', '/settings/system',
504 m.connect('admin_settings_system', '/settings/system',
492 action='settings_system', conditions={'method': ['GET']})
505 action='settings_system', conditions={'method': ['GET']})
493
506
494 m.connect('admin_settings_system_update', '/settings/system/updates',
507 m.connect('admin_settings_system_update', '/settings/system/updates',
495 action='settings_system_update', conditions={'method': ['GET']})
508 action='settings_system_update', conditions={'method': ['GET']})
496
509
497 m.connect('admin_settings_supervisor', '/settings/supervisor',
510 m.connect('admin_settings_supervisor', '/settings/supervisor',
498 action='settings_supervisor', conditions={'method': ['GET']})
511 action='settings_supervisor', conditions={'method': ['GET']})
499 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
512 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
500 action='settings_supervisor_log', conditions={'method': ['GET']})
513 action='settings_supervisor_log', conditions={'method': ['GET']})
501
514
502 m.connect('admin_settings_labs', '/settings/labs',
515 m.connect('admin_settings_labs', '/settings/labs',
503 action='settings_labs_update',
516 action='settings_labs_update',
504 conditions={'method': ['POST']})
517 conditions={'method': ['POST']})
505 m.connect('admin_settings_labs', '/settings/labs',
518 m.connect('admin_settings_labs', '/settings/labs',
506 action='settings_labs', conditions={'method': ['GET']})
519 action='settings_labs', conditions={'method': ['GET']})
507
520
508 # ADMIN MY ACCOUNT
521 # ADMIN MY ACCOUNT
509 with rmap.submapper(path_prefix=ADMIN_PREFIX,
522 with rmap.submapper(path_prefix=ADMIN_PREFIX,
510 controller='admin/my_account') as m:
523 controller='admin/my_account') as m:
511
524
512 m.connect('my_account', '/my_account',
525 m.connect('my_account', '/my_account',
513 action='my_account', conditions={'method': ['GET']})
526 action='my_account', conditions={'method': ['GET']})
514 m.connect('my_account_edit', '/my_account/edit',
527 m.connect('my_account_edit', '/my_account/edit',
515 action='my_account_edit', conditions={'method': ['GET']})
528 action='my_account_edit', conditions={'method': ['GET']})
516 m.connect('my_account', '/my_account',
529 m.connect('my_account', '/my_account',
517 action='my_account_update', conditions={'method': ['POST']})
530 action='my_account_update', conditions={'method': ['POST']})
518
531
519 m.connect('my_account_password', '/my_account/password',
532 m.connect('my_account_password', '/my_account/password',
520 action='my_account_password', conditions={'method': ['GET']})
533 action='my_account_password', conditions={'method': ['GET']})
521 m.connect('my_account_password', '/my_account/password',
534 m.connect('my_account_password', '/my_account/password',
522 action='my_account_password_update', conditions={'method': ['POST']})
535 action='my_account_password_update', conditions={'method': ['POST']})
523
536
524 m.connect('my_account_repos', '/my_account/repos',
537 m.connect('my_account_repos', '/my_account/repos',
525 action='my_account_repos', conditions={'method': ['GET']})
538 action='my_account_repos', conditions={'method': ['GET']})
526
539
527 m.connect('my_account_watched', '/my_account/watched',
540 m.connect('my_account_watched', '/my_account/watched',
528 action='my_account_watched', conditions={'method': ['GET']})
541 action='my_account_watched', conditions={'method': ['GET']})
529
542
530 m.connect('my_account_pullrequests', '/my_account/pull_requests',
543 m.connect('my_account_pullrequests', '/my_account/pull_requests',
531 action='my_account_pullrequests', conditions={'method': ['GET']})
544 action='my_account_pullrequests', conditions={'method': ['GET']})
532
545
533 m.connect('my_account_perms', '/my_account/perms',
546 m.connect('my_account_perms', '/my_account/perms',
534 action='my_account_perms', conditions={'method': ['GET']})
547 action='my_account_perms', conditions={'method': ['GET']})
535
548
536 m.connect('my_account_emails', '/my_account/emails',
549 m.connect('my_account_emails', '/my_account/emails',
537 action='my_account_emails', conditions={'method': ['GET']})
550 action='my_account_emails', conditions={'method': ['GET']})
538 m.connect('my_account_emails', '/my_account/emails',
551 m.connect('my_account_emails', '/my_account/emails',
539 action='my_account_emails_add', conditions={'method': ['POST']})
552 action='my_account_emails_add', conditions={'method': ['POST']})
540 m.connect('my_account_emails', '/my_account/emails',
553 m.connect('my_account_emails', '/my_account/emails',
541 action='my_account_emails_delete', conditions={'method': ['DELETE']})
554 action='my_account_emails_delete', conditions={'method': ['DELETE']})
542
555
543 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
556 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
544 action='my_account_auth_tokens', conditions={'method': ['GET']})
557 action='my_account_auth_tokens', conditions={'method': ['GET']})
545 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
558 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
546 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
559 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
547 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
560 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
548 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
561 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
549
562
550 # NOTIFICATION REST ROUTES
563 # NOTIFICATION REST ROUTES
551 with rmap.submapper(path_prefix=ADMIN_PREFIX,
564 with rmap.submapper(path_prefix=ADMIN_PREFIX,
552 controller='admin/notifications') as m:
565 controller='admin/notifications') as m:
553 m.connect('notifications', '/notifications',
566 m.connect('notifications', '/notifications',
554 action='index', conditions={'method': ['GET']})
567 action='index', conditions={'method': ['GET']})
555 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
568 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
556 action='mark_all_read', conditions={'method': ['POST']})
569 action='mark_all_read', conditions={'method': ['POST']})
557
570
558 m.connect('/notifications/{notification_id}',
571 m.connect('/notifications/{notification_id}',
559 action='update', conditions={'method': ['PUT']})
572 action='update', conditions={'method': ['PUT']})
560 m.connect('/notifications/{notification_id}',
573 m.connect('/notifications/{notification_id}',
561 action='delete', conditions={'method': ['DELETE']})
574 action='delete', conditions={'method': ['DELETE']})
562 m.connect('notification', '/notifications/{notification_id}',
575 m.connect('notification', '/notifications/{notification_id}',
563 action='show', conditions={'method': ['GET']})
576 action='show', conditions={'method': ['GET']})
564
577
565 # ADMIN GIST
578 # ADMIN GIST
566 with rmap.submapper(path_prefix=ADMIN_PREFIX,
579 with rmap.submapper(path_prefix=ADMIN_PREFIX,
567 controller='admin/gists') as m:
580 controller='admin/gists') as m:
568 m.connect('gists', '/gists',
581 m.connect('gists', '/gists',
569 action='create', conditions={'method': ['POST']})
582 action='create', conditions={'method': ['POST']})
570 m.connect('gists', '/gists', jsroute=True,
583 m.connect('gists', '/gists', jsroute=True,
571 action='index', conditions={'method': ['GET']})
584 action='index', conditions={'method': ['GET']})
572 m.connect('new_gist', '/gists/new', jsroute=True,
585 m.connect('new_gist', '/gists/new', jsroute=True,
573 action='new', conditions={'method': ['GET']})
586 action='new', conditions={'method': ['GET']})
574
587
575 m.connect('/gists/{gist_id}',
588 m.connect('/gists/{gist_id}',
576 action='delete', conditions={'method': ['DELETE']})
589 action='delete', conditions={'method': ['DELETE']})
577 m.connect('edit_gist', '/gists/{gist_id}/edit',
590 m.connect('edit_gist', '/gists/{gist_id}/edit',
578 action='edit_form', conditions={'method': ['GET']})
591 action='edit_form', conditions={'method': ['GET']})
579 m.connect('edit_gist', '/gists/{gist_id}/edit',
592 m.connect('edit_gist', '/gists/{gist_id}/edit',
580 action='edit', conditions={'method': ['POST']})
593 action='edit', conditions={'method': ['POST']})
581 m.connect(
594 m.connect(
582 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
595 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
583 action='check_revision', conditions={'method': ['GET']})
596 action='check_revision', conditions={'method': ['GET']})
584
597
585 m.connect('gist', '/gists/{gist_id}',
598 m.connect('gist', '/gists/{gist_id}',
586 action='show', conditions={'method': ['GET']})
599 action='show', conditions={'method': ['GET']})
587 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
600 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
588 revision='tip',
601 revision='tip',
589 action='show', conditions={'method': ['GET']})
602 action='show', conditions={'method': ['GET']})
590 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
603 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
591 revision='tip',
604 revision='tip',
592 action='show', conditions={'method': ['GET']})
605 action='show', conditions={'method': ['GET']})
593 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
606 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
594 revision='tip',
607 revision='tip',
595 action='show', conditions={'method': ['GET']},
608 action='show', conditions={'method': ['GET']},
596 requirements=URL_NAME_REQUIREMENTS)
609 requirements=URL_NAME_REQUIREMENTS)
597
610
598 # ADMIN MAIN PAGES
611 # ADMIN MAIN PAGES
599 with rmap.submapper(path_prefix=ADMIN_PREFIX,
612 with rmap.submapper(path_prefix=ADMIN_PREFIX,
600 controller='admin/admin') as m:
613 controller='admin/admin') as m:
601 m.connect('admin_home', '', action='index')
614 m.connect('admin_home', '', action='index')
602 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
615 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
603 action='add_repo')
616 action='add_repo')
604 m.connect(
617 m.connect(
605 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
618 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
606 action='pull_requests')
619 action='pull_requests')
607 m.connect(
620 m.connect(
608 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
621 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
609 action='pull_requests')
622 action='pull_requests')
610
623
611
624
612 # USER JOURNAL
625 # USER JOURNAL
613 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
626 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
614 controller='journal', action='index')
627 controller='journal', action='index')
615 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
628 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
616 controller='journal', action='journal_rss')
629 controller='journal', action='journal_rss')
617 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
630 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
618 controller='journal', action='journal_atom')
631 controller='journal', action='journal_atom')
619
632
620 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
633 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
621 controller='journal', action='public_journal')
634 controller='journal', action='public_journal')
622
635
623 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
636 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
624 controller='journal', action='public_journal_rss')
637 controller='journal', action='public_journal_rss')
625
638
626 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
639 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
627 controller='journal', action='public_journal_rss')
640 controller='journal', action='public_journal_rss')
628
641
629 rmap.connect('public_journal_atom',
642 rmap.connect('public_journal_atom',
630 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
643 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
631 action='public_journal_atom')
644 action='public_journal_atom')
632
645
633 rmap.connect('public_journal_atom_old',
646 rmap.connect('public_journal_atom_old',
634 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
647 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
635 action='public_journal_atom')
648 action='public_journal_atom')
636
649
637 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
650 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
638 controller='journal', action='toggle_following', jsroute=True,
651 controller='journal', action='toggle_following', jsroute=True,
639 conditions={'method': ['POST']})
652 conditions={'method': ['POST']})
640
653
641 # FULL TEXT SEARCH
654 # FULL TEXT SEARCH
642 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
655 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
643 controller='search')
656 controller='search')
644 rmap.connect('search_repo_home', '/{repo_name}/search',
657 rmap.connect('search_repo_home', '/{repo_name}/search',
645 controller='search',
658 controller='search',
646 action='index',
659 action='index',
647 conditions={'function': check_repo},
660 conditions={'function': check_repo},
648 requirements=URL_NAME_REQUIREMENTS)
661 requirements=URL_NAME_REQUIREMENTS)
649
662
650 # FEEDS
663 # FEEDS
651 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
664 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
652 controller='feed', action='rss',
665 controller='feed', action='rss',
653 conditions={'function': check_repo},
666 conditions={'function': check_repo},
654 requirements=URL_NAME_REQUIREMENTS)
667 requirements=URL_NAME_REQUIREMENTS)
655
668
656 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
669 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
657 controller='feed', action='atom',
670 controller='feed', action='atom',
658 conditions={'function': check_repo},
671 conditions={'function': check_repo},
659 requirements=URL_NAME_REQUIREMENTS)
672 requirements=URL_NAME_REQUIREMENTS)
660
673
661 #==========================================================================
674 #==========================================================================
662 # REPOSITORY ROUTES
675 # REPOSITORY ROUTES
663 #==========================================================================
676 #==========================================================================
664
677
665 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
678 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
666 controller='admin/repos', action='repo_creating',
679 controller='admin/repos', action='repo_creating',
667 requirements=URL_NAME_REQUIREMENTS)
680 requirements=URL_NAME_REQUIREMENTS)
668 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
681 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
669 controller='admin/repos', action='repo_check',
682 controller='admin/repos', action='repo_check',
670 requirements=URL_NAME_REQUIREMENTS)
683 requirements=URL_NAME_REQUIREMENTS)
671
684
672 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
685 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
673 controller='summary', action='repo_stats',
686 controller='summary', action='repo_stats',
674 conditions={'function': check_repo},
687 conditions={'function': check_repo},
675 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
688 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
676
689
677 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
690 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
678 controller='summary', action='repo_refs_data', jsroute=True,
691 controller='summary', action='repo_refs_data', jsroute=True,
679 requirements=URL_NAME_REQUIREMENTS)
692 requirements=URL_NAME_REQUIREMENTS)
680 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
693 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
681 controller='summary', action='repo_refs_changelog_data',
694 controller='summary', action='repo_refs_changelog_data',
682 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
695 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
683
696
684 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
697 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
685 controller='changeset', revision='tip', jsroute=True,
698 controller='changeset', revision='tip', jsroute=True,
686 conditions={'function': check_repo},
699 conditions={'function': check_repo},
687 requirements=URL_NAME_REQUIREMENTS)
700 requirements=URL_NAME_REQUIREMENTS)
688 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
701 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
689 controller='changeset', revision='tip', action='changeset_children',
702 controller='changeset', revision='tip', action='changeset_children',
690 conditions={'function': check_repo},
703 conditions={'function': check_repo},
691 requirements=URL_NAME_REQUIREMENTS)
704 requirements=URL_NAME_REQUIREMENTS)
692 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
705 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
693 controller='changeset', revision='tip', action='changeset_parents',
706 controller='changeset', revision='tip', action='changeset_parents',
694 conditions={'function': check_repo},
707 conditions={'function': check_repo},
695 requirements=URL_NAME_REQUIREMENTS)
708 requirements=URL_NAME_REQUIREMENTS)
696
709
697 # repo edit options
710 # repo edit options
698 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
711 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
699 controller='admin/repos', action='edit',
712 controller='admin/repos', action='edit',
700 conditions={'method': ['GET'], 'function': check_repo},
713 conditions={'method': ['GET'], 'function': check_repo},
701 requirements=URL_NAME_REQUIREMENTS)
714 requirements=URL_NAME_REQUIREMENTS)
702
715
703 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
716 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
704 jsroute=True,
717 jsroute=True,
705 controller='admin/repos', action='edit_permissions',
718 controller='admin/repos', action='edit_permissions',
706 conditions={'method': ['GET'], 'function': check_repo},
719 conditions={'method': ['GET'], 'function': check_repo},
707 requirements=URL_NAME_REQUIREMENTS)
720 requirements=URL_NAME_REQUIREMENTS)
708 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
721 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
709 controller='admin/repos', action='edit_permissions_update',
722 controller='admin/repos', action='edit_permissions_update',
710 conditions={'method': ['PUT'], 'function': check_repo},
723 conditions={'method': ['PUT'], 'function': check_repo},
711 requirements=URL_NAME_REQUIREMENTS)
724 requirements=URL_NAME_REQUIREMENTS)
712
725
713 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
726 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
714 controller='admin/repos', action='edit_fields',
727 controller='admin/repos', action='edit_fields',
715 conditions={'method': ['GET'], 'function': check_repo},
728 conditions={'method': ['GET'], 'function': check_repo},
716 requirements=URL_NAME_REQUIREMENTS)
729 requirements=URL_NAME_REQUIREMENTS)
717 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
730 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
718 controller='admin/repos', action='create_repo_field',
731 controller='admin/repos', action='create_repo_field',
719 conditions={'method': ['PUT'], 'function': check_repo},
732 conditions={'method': ['PUT'], 'function': check_repo},
720 requirements=URL_NAME_REQUIREMENTS)
733 requirements=URL_NAME_REQUIREMENTS)
721 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
734 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
722 controller='admin/repos', action='delete_repo_field',
735 controller='admin/repos', action='delete_repo_field',
723 conditions={'method': ['DELETE'], 'function': check_repo},
736 conditions={'method': ['DELETE'], 'function': check_repo},
724 requirements=URL_NAME_REQUIREMENTS)
737 requirements=URL_NAME_REQUIREMENTS)
725
738
726 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
739 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
727 controller='admin/repos', action='edit_advanced',
740 controller='admin/repos', action='edit_advanced',
728 conditions={'method': ['GET'], 'function': check_repo},
741 conditions={'method': ['GET'], 'function': check_repo},
729 requirements=URL_NAME_REQUIREMENTS)
742 requirements=URL_NAME_REQUIREMENTS)
730
743
731 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
744 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
732 controller='admin/repos', action='edit_advanced_locking',
745 controller='admin/repos', action='edit_advanced_locking',
733 conditions={'method': ['PUT'], 'function': check_repo},
746 conditions={'method': ['PUT'], 'function': check_repo},
734 requirements=URL_NAME_REQUIREMENTS)
747 requirements=URL_NAME_REQUIREMENTS)
735 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
748 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
736 controller='admin/repos', action='toggle_locking',
749 controller='admin/repos', action='toggle_locking',
737 conditions={'method': ['GET'], 'function': check_repo},
750 conditions={'method': ['GET'], 'function': check_repo},
738 requirements=URL_NAME_REQUIREMENTS)
751 requirements=URL_NAME_REQUIREMENTS)
739
752
740 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
753 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
741 controller='admin/repos', action='edit_advanced_journal',
754 controller='admin/repos', action='edit_advanced_journal',
742 conditions={'method': ['PUT'], 'function': check_repo},
755 conditions={'method': ['PUT'], 'function': check_repo},
743 requirements=URL_NAME_REQUIREMENTS)
756 requirements=URL_NAME_REQUIREMENTS)
744
757
745 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
758 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
746 controller='admin/repos', action='edit_advanced_fork',
759 controller='admin/repos', action='edit_advanced_fork',
747 conditions={'method': ['PUT'], 'function': check_repo},
760 conditions={'method': ['PUT'], 'function': check_repo},
748 requirements=URL_NAME_REQUIREMENTS)
761 requirements=URL_NAME_REQUIREMENTS)
749
762
750 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
763 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
751 controller='admin/repos', action='edit_caches_form',
764 controller='admin/repos', action='edit_caches_form',
752 conditions={'method': ['GET'], 'function': check_repo},
765 conditions={'method': ['GET'], 'function': check_repo},
753 requirements=URL_NAME_REQUIREMENTS)
766 requirements=URL_NAME_REQUIREMENTS)
754 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
767 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
755 controller='admin/repos', action='edit_caches',
768 controller='admin/repos', action='edit_caches',
756 conditions={'method': ['PUT'], 'function': check_repo},
769 conditions={'method': ['PUT'], 'function': check_repo},
757 requirements=URL_NAME_REQUIREMENTS)
770 requirements=URL_NAME_REQUIREMENTS)
758
771
759 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
772 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
760 controller='admin/repos', action='edit_remote_form',
773 controller='admin/repos', action='edit_remote_form',
761 conditions={'method': ['GET'], 'function': check_repo},
774 conditions={'method': ['GET'], 'function': check_repo},
762 requirements=URL_NAME_REQUIREMENTS)
775 requirements=URL_NAME_REQUIREMENTS)
763 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
776 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
764 controller='admin/repos', action='edit_remote',
777 controller='admin/repos', action='edit_remote',
765 conditions={'method': ['PUT'], 'function': check_repo},
778 conditions={'method': ['PUT'], 'function': check_repo},
766 requirements=URL_NAME_REQUIREMENTS)
779 requirements=URL_NAME_REQUIREMENTS)
767
780
768 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
781 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
769 controller='admin/repos', action='edit_statistics_form',
782 controller='admin/repos', action='edit_statistics_form',
770 conditions={'method': ['GET'], 'function': check_repo},
783 conditions={'method': ['GET'], 'function': check_repo},
771 requirements=URL_NAME_REQUIREMENTS)
784 requirements=URL_NAME_REQUIREMENTS)
772 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
785 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
773 controller='admin/repos', action='edit_statistics',
786 controller='admin/repos', action='edit_statistics',
774 conditions={'method': ['PUT'], 'function': check_repo},
787 conditions={'method': ['PUT'], 'function': check_repo},
775 requirements=URL_NAME_REQUIREMENTS)
788 requirements=URL_NAME_REQUIREMENTS)
776 rmap.connect('repo_settings_issuetracker',
789 rmap.connect('repo_settings_issuetracker',
777 '/{repo_name}/settings/issue-tracker',
790 '/{repo_name}/settings/issue-tracker',
778 controller='admin/repos', action='repo_issuetracker',
791 controller='admin/repos', action='repo_issuetracker',
779 conditions={'method': ['GET'], 'function': check_repo},
792 conditions={'method': ['GET'], 'function': check_repo},
780 requirements=URL_NAME_REQUIREMENTS)
793 requirements=URL_NAME_REQUIREMENTS)
781 rmap.connect('repo_issuetracker_test',
794 rmap.connect('repo_issuetracker_test',
782 '/{repo_name}/settings/issue-tracker/test',
795 '/{repo_name}/settings/issue-tracker/test',
783 controller='admin/repos', action='repo_issuetracker_test',
796 controller='admin/repos', action='repo_issuetracker_test',
784 conditions={'method': ['POST'], 'function': check_repo},
797 conditions={'method': ['POST'], 'function': check_repo},
785 requirements=URL_NAME_REQUIREMENTS)
798 requirements=URL_NAME_REQUIREMENTS)
786 rmap.connect('repo_issuetracker_delete',
799 rmap.connect('repo_issuetracker_delete',
787 '/{repo_name}/settings/issue-tracker/delete',
800 '/{repo_name}/settings/issue-tracker/delete',
788 controller='admin/repos', action='repo_issuetracker_delete',
801 controller='admin/repos', action='repo_issuetracker_delete',
789 conditions={'method': ['DELETE'], 'function': check_repo},
802 conditions={'method': ['DELETE'], 'function': check_repo},
790 requirements=URL_NAME_REQUIREMENTS)
803 requirements=URL_NAME_REQUIREMENTS)
791 rmap.connect('repo_issuetracker_save',
804 rmap.connect('repo_issuetracker_save',
792 '/{repo_name}/settings/issue-tracker/save',
805 '/{repo_name}/settings/issue-tracker/save',
793 controller='admin/repos', action='repo_issuetracker_save',
806 controller='admin/repos', action='repo_issuetracker_save',
794 conditions={'method': ['POST'], 'function': check_repo},
807 conditions={'method': ['POST'], 'function': check_repo},
795 requirements=URL_NAME_REQUIREMENTS)
808 requirements=URL_NAME_REQUIREMENTS)
796 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
809 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
797 controller='admin/repos', action='repo_settings_vcs_update',
810 controller='admin/repos', action='repo_settings_vcs_update',
798 conditions={'method': ['POST'], 'function': check_repo},
811 conditions={'method': ['POST'], 'function': check_repo},
799 requirements=URL_NAME_REQUIREMENTS)
812 requirements=URL_NAME_REQUIREMENTS)
800 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
813 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
801 controller='admin/repos', action='repo_settings_vcs',
814 controller='admin/repos', action='repo_settings_vcs',
802 conditions={'method': ['GET'], 'function': check_repo},
815 conditions={'method': ['GET'], 'function': check_repo},
803 requirements=URL_NAME_REQUIREMENTS)
816 requirements=URL_NAME_REQUIREMENTS)
804 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
817 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
805 controller='admin/repos', action='repo_delete_svn_pattern',
818 controller='admin/repos', action='repo_delete_svn_pattern',
806 conditions={'method': ['DELETE'], 'function': check_repo},
819 conditions={'method': ['DELETE'], 'function': check_repo},
807 requirements=URL_NAME_REQUIREMENTS)
820 requirements=URL_NAME_REQUIREMENTS)
808
821
809 # still working url for backward compat.
822 # still working url for backward compat.
810 rmap.connect('raw_changeset_home_depraced',
823 rmap.connect('raw_changeset_home_depraced',
811 '/{repo_name}/raw-changeset/{revision}',
824 '/{repo_name}/raw-changeset/{revision}',
812 controller='changeset', action='changeset_raw',
825 controller='changeset', action='changeset_raw',
813 revision='tip', conditions={'function': check_repo},
826 revision='tip', conditions={'function': check_repo},
814 requirements=URL_NAME_REQUIREMENTS)
827 requirements=URL_NAME_REQUIREMENTS)
815
828
816 # new URLs
829 # new URLs
817 rmap.connect('changeset_raw_home',
830 rmap.connect('changeset_raw_home',
818 '/{repo_name}/changeset-diff/{revision}',
831 '/{repo_name}/changeset-diff/{revision}',
819 controller='changeset', action='changeset_raw',
832 controller='changeset', action='changeset_raw',
820 revision='tip', conditions={'function': check_repo},
833 revision='tip', conditions={'function': check_repo},
821 requirements=URL_NAME_REQUIREMENTS)
834 requirements=URL_NAME_REQUIREMENTS)
822
835
823 rmap.connect('changeset_patch_home',
836 rmap.connect('changeset_patch_home',
824 '/{repo_name}/changeset-patch/{revision}',
837 '/{repo_name}/changeset-patch/{revision}',
825 controller='changeset', action='changeset_patch',
838 controller='changeset', action='changeset_patch',
826 revision='tip', conditions={'function': check_repo},
839 revision='tip', conditions={'function': check_repo},
827 requirements=URL_NAME_REQUIREMENTS)
840 requirements=URL_NAME_REQUIREMENTS)
828
841
829 rmap.connect('changeset_download_home',
842 rmap.connect('changeset_download_home',
830 '/{repo_name}/changeset-download/{revision}',
843 '/{repo_name}/changeset-download/{revision}',
831 controller='changeset', action='changeset_download',
844 controller='changeset', action='changeset_download',
832 revision='tip', conditions={'function': check_repo},
845 revision='tip', conditions={'function': check_repo},
833 requirements=URL_NAME_REQUIREMENTS)
846 requirements=URL_NAME_REQUIREMENTS)
834
847
835 rmap.connect('changeset_comment',
848 rmap.connect('changeset_comment',
836 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
849 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
837 controller='changeset', revision='tip', action='comment',
850 controller='changeset', revision='tip', action='comment',
838 conditions={'function': check_repo},
851 conditions={'function': check_repo},
839 requirements=URL_NAME_REQUIREMENTS)
852 requirements=URL_NAME_REQUIREMENTS)
840
853
841 rmap.connect('changeset_comment_preview',
854 rmap.connect('changeset_comment_preview',
842 '/{repo_name}/changeset/comment/preview', jsroute=True,
855 '/{repo_name}/changeset/comment/preview', jsroute=True,
843 controller='changeset', action='preview_comment',
856 controller='changeset', action='preview_comment',
844 conditions={'function': check_repo, 'method': ['POST']},
857 conditions={'function': check_repo, 'method': ['POST']},
845 requirements=URL_NAME_REQUIREMENTS)
858 requirements=URL_NAME_REQUIREMENTS)
846
859
847 rmap.connect('changeset_comment_delete',
860 rmap.connect('changeset_comment_delete',
848 '/{repo_name}/changeset/comment/{comment_id}/delete',
861 '/{repo_name}/changeset/comment/{comment_id}/delete',
849 controller='changeset', action='delete_comment',
862 controller='changeset', action='delete_comment',
850 conditions={'function': check_repo, 'method': ['DELETE']},
863 conditions={'function': check_repo, 'method': ['DELETE']},
851 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
864 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
852
865
853 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
866 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
854 controller='changeset', action='changeset_info',
867 controller='changeset', action='changeset_info',
855 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
868 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
856
869
857 rmap.connect('compare_home',
870 rmap.connect('compare_home',
858 '/{repo_name}/compare',
871 '/{repo_name}/compare',
859 controller='compare', action='index',
872 controller='compare', action='index',
860 conditions={'function': check_repo},
873 conditions={'function': check_repo},
861 requirements=URL_NAME_REQUIREMENTS)
874 requirements=URL_NAME_REQUIREMENTS)
862
875
863 rmap.connect('compare_url',
876 rmap.connect('compare_url',
864 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
877 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
865 controller='compare', action='compare',
878 controller='compare', action='compare',
866 conditions={'function': check_repo},
879 conditions={'function': check_repo},
867 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
880 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
868
881
869 rmap.connect('pullrequest_home',
882 rmap.connect('pullrequest_home',
870 '/{repo_name}/pull-request/new', controller='pullrequests',
883 '/{repo_name}/pull-request/new', controller='pullrequests',
871 action='index', conditions={'function': check_repo,
884 action='index', conditions={'function': check_repo,
872 'method': ['GET']},
885 'method': ['GET']},
873 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
886 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
874
887
875 rmap.connect('pullrequest',
888 rmap.connect('pullrequest',
876 '/{repo_name}/pull-request/new', controller='pullrequests',
889 '/{repo_name}/pull-request/new', controller='pullrequests',
877 action='create', conditions={'function': check_repo,
890 action='create', conditions={'function': check_repo,
878 'method': ['POST']},
891 'method': ['POST']},
879 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
892 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
880
893
881 rmap.connect('pullrequest_repo_refs',
894 rmap.connect('pullrequest_repo_refs',
882 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
895 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
883 controller='pullrequests',
896 controller='pullrequests',
884 action='get_repo_refs',
897 action='get_repo_refs',
885 conditions={'function': check_repo, 'method': ['GET']},
898 conditions={'function': check_repo, 'method': ['GET']},
886 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
899 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
887
900
888 rmap.connect('pullrequest_repo_destinations',
901 rmap.connect('pullrequest_repo_destinations',
889 '/{repo_name}/pull-request/repo-destinations',
902 '/{repo_name}/pull-request/repo-destinations',
890 controller='pullrequests',
903 controller='pullrequests',
891 action='get_repo_destinations',
904 action='get_repo_destinations',
892 conditions={'function': check_repo, 'method': ['GET']},
905 conditions={'function': check_repo, 'method': ['GET']},
893 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
906 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
894
907
895 rmap.connect('pullrequest_show',
908 rmap.connect('pullrequest_show',
896 '/{repo_name}/pull-request/{pull_request_id}',
909 '/{repo_name}/pull-request/{pull_request_id}',
897 controller='pullrequests',
910 controller='pullrequests',
898 action='show', conditions={'function': check_repo,
911 action='show', conditions={'function': check_repo,
899 'method': ['GET']},
912 'method': ['GET']},
900 requirements=URL_NAME_REQUIREMENTS)
913 requirements=URL_NAME_REQUIREMENTS)
901
914
902 rmap.connect('pullrequest_update',
915 rmap.connect('pullrequest_update',
903 '/{repo_name}/pull-request/{pull_request_id}',
916 '/{repo_name}/pull-request/{pull_request_id}',
904 controller='pullrequests',
917 controller='pullrequests',
905 action='update', conditions={'function': check_repo,
918 action='update', conditions={'function': check_repo,
906 'method': ['PUT']},
919 'method': ['PUT']},
907 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
920 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
908
921
909 rmap.connect('pullrequest_merge',
922 rmap.connect('pullrequest_merge',
910 '/{repo_name}/pull-request/{pull_request_id}',
923 '/{repo_name}/pull-request/{pull_request_id}',
911 controller='pullrequests',
924 controller='pullrequests',
912 action='merge', conditions={'function': check_repo,
925 action='merge', conditions={'function': check_repo,
913 'method': ['POST']},
926 'method': ['POST']},
914 requirements=URL_NAME_REQUIREMENTS)
927 requirements=URL_NAME_REQUIREMENTS)
915
928
916 rmap.connect('pullrequest_delete',
929 rmap.connect('pullrequest_delete',
917 '/{repo_name}/pull-request/{pull_request_id}',
930 '/{repo_name}/pull-request/{pull_request_id}',
918 controller='pullrequests',
931 controller='pullrequests',
919 action='delete', conditions={'function': check_repo,
932 action='delete', conditions={'function': check_repo,
920 'method': ['DELETE']},
933 'method': ['DELETE']},
921 requirements=URL_NAME_REQUIREMENTS)
934 requirements=URL_NAME_REQUIREMENTS)
922
935
923 rmap.connect('pullrequest_show_all',
936 rmap.connect('pullrequest_show_all',
924 '/{repo_name}/pull-request',
937 '/{repo_name}/pull-request',
925 controller='pullrequests',
938 controller='pullrequests',
926 action='show_all', conditions={'function': check_repo,
939 action='show_all', conditions={'function': check_repo,
927 'method': ['GET']},
940 'method': ['GET']},
928 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
941 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
929
942
930 rmap.connect('pullrequest_comment',
943 rmap.connect('pullrequest_comment',
931 '/{repo_name}/pull-request-comment/{pull_request_id}',
944 '/{repo_name}/pull-request-comment/{pull_request_id}',
932 controller='pullrequests',
945 controller='pullrequests',
933 action='comment', conditions={'function': check_repo,
946 action='comment', conditions={'function': check_repo,
934 'method': ['POST']},
947 'method': ['POST']},
935 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
948 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
936
949
937 rmap.connect('pullrequest_comment_delete',
950 rmap.connect('pullrequest_comment_delete',
938 '/{repo_name}/pull-request-comment/{comment_id}/delete',
951 '/{repo_name}/pull-request-comment/{comment_id}/delete',
939 controller='pullrequests', action='delete_comment',
952 controller='pullrequests', action='delete_comment',
940 conditions={'function': check_repo, 'method': ['DELETE']},
953 conditions={'function': check_repo, 'method': ['DELETE']},
941 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
954 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
942
955
943 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
956 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
944 controller='summary', conditions={'function': check_repo},
957 controller='summary', conditions={'function': check_repo},
945 requirements=URL_NAME_REQUIREMENTS)
958 requirements=URL_NAME_REQUIREMENTS)
946
959
947 rmap.connect('branches_home', '/{repo_name}/branches',
960 rmap.connect('branches_home', '/{repo_name}/branches',
948 controller='branches', conditions={'function': check_repo},
961 controller='branches', conditions={'function': check_repo},
949 requirements=URL_NAME_REQUIREMENTS)
962 requirements=URL_NAME_REQUIREMENTS)
950
963
951 rmap.connect('tags_home', '/{repo_name}/tags',
964 rmap.connect('tags_home', '/{repo_name}/tags',
952 controller='tags', conditions={'function': check_repo},
965 controller='tags', conditions={'function': check_repo},
953 requirements=URL_NAME_REQUIREMENTS)
966 requirements=URL_NAME_REQUIREMENTS)
954
967
955 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
968 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
956 controller='bookmarks', conditions={'function': check_repo},
969 controller='bookmarks', conditions={'function': check_repo},
957 requirements=URL_NAME_REQUIREMENTS)
970 requirements=URL_NAME_REQUIREMENTS)
958
971
959 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
972 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
960 controller='changelog', conditions={'function': check_repo},
973 controller='changelog', conditions={'function': check_repo},
961 requirements=URL_NAME_REQUIREMENTS)
974 requirements=URL_NAME_REQUIREMENTS)
962
975
963 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
976 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
964 controller='changelog', action='changelog_summary',
977 controller='changelog', action='changelog_summary',
965 conditions={'function': check_repo},
978 conditions={'function': check_repo},
966 requirements=URL_NAME_REQUIREMENTS)
979 requirements=URL_NAME_REQUIREMENTS)
967
980
968 rmap.connect('changelog_file_home',
981 rmap.connect('changelog_file_home',
969 '/{repo_name}/changelog/{revision}/{f_path}',
982 '/{repo_name}/changelog/{revision}/{f_path}',
970 controller='changelog', f_path=None,
983 controller='changelog', f_path=None,
971 conditions={'function': check_repo},
984 conditions={'function': check_repo},
972 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
985 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
973
986
974 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
987 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
975 controller='changelog', action='changelog_details',
988 controller='changelog', action='changelog_details',
976 conditions={'function': check_repo},
989 conditions={'function': check_repo},
977 requirements=URL_NAME_REQUIREMENTS)
990 requirements=URL_NAME_REQUIREMENTS)
978
991
979 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
992 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
980 controller='files', revision='tip', f_path='',
993 controller='files', revision='tip', f_path='',
981 conditions={'function': check_repo},
994 conditions={'function': check_repo},
982 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
995 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
983
996
984 rmap.connect('files_home_simple_catchrev',
997 rmap.connect('files_home_simple_catchrev',
985 '/{repo_name}/files/{revision}',
998 '/{repo_name}/files/{revision}',
986 controller='files', revision='tip', f_path='',
999 controller='files', revision='tip', f_path='',
987 conditions={'function': check_repo},
1000 conditions={'function': check_repo},
988 requirements=URL_NAME_REQUIREMENTS)
1001 requirements=URL_NAME_REQUIREMENTS)
989
1002
990 rmap.connect('files_home_simple_catchall',
1003 rmap.connect('files_home_simple_catchall',
991 '/{repo_name}/files',
1004 '/{repo_name}/files',
992 controller='files', revision='tip', f_path='',
1005 controller='files', revision='tip', f_path='',
993 conditions={'function': check_repo},
1006 conditions={'function': check_repo},
994 requirements=URL_NAME_REQUIREMENTS)
1007 requirements=URL_NAME_REQUIREMENTS)
995
1008
996 rmap.connect('files_history_home',
1009 rmap.connect('files_history_home',
997 '/{repo_name}/history/{revision}/{f_path}',
1010 '/{repo_name}/history/{revision}/{f_path}',
998 controller='files', action='history', revision='tip', f_path='',
1011 controller='files', action='history', revision='tip', f_path='',
999 conditions={'function': check_repo},
1012 conditions={'function': check_repo},
1000 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1013 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1001
1014
1002 rmap.connect('files_authors_home',
1015 rmap.connect('files_authors_home',
1003 '/{repo_name}/authors/{revision}/{f_path}',
1016 '/{repo_name}/authors/{revision}/{f_path}',
1004 controller='files', action='authors', revision='tip', f_path='',
1017 controller='files', action='authors', revision='tip', f_path='',
1005 conditions={'function': check_repo},
1018 conditions={'function': check_repo},
1006 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1019 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1007
1020
1008 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1021 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1009 controller='files', action='diff', f_path='',
1022 controller='files', action='diff', f_path='',
1010 conditions={'function': check_repo},
1023 conditions={'function': check_repo},
1011 requirements=URL_NAME_REQUIREMENTS)
1024 requirements=URL_NAME_REQUIREMENTS)
1012
1025
1013 rmap.connect('files_diff_2way_home',
1026 rmap.connect('files_diff_2way_home',
1014 '/{repo_name}/diff-2way/{f_path}',
1027 '/{repo_name}/diff-2way/{f_path}',
1015 controller='files', action='diff_2way', f_path='',
1028 controller='files', action='diff_2way', f_path='',
1016 conditions={'function': check_repo},
1029 conditions={'function': check_repo},
1017 requirements=URL_NAME_REQUIREMENTS)
1030 requirements=URL_NAME_REQUIREMENTS)
1018
1031
1019 rmap.connect('files_rawfile_home',
1032 rmap.connect('files_rawfile_home',
1020 '/{repo_name}/rawfile/{revision}/{f_path}',
1033 '/{repo_name}/rawfile/{revision}/{f_path}',
1021 controller='files', action='rawfile', revision='tip',
1034 controller='files', action='rawfile', revision='tip',
1022 f_path='', conditions={'function': check_repo},
1035 f_path='', conditions={'function': check_repo},
1023 requirements=URL_NAME_REQUIREMENTS)
1036 requirements=URL_NAME_REQUIREMENTS)
1024
1037
1025 rmap.connect('files_raw_home',
1038 rmap.connect('files_raw_home',
1026 '/{repo_name}/raw/{revision}/{f_path}',
1039 '/{repo_name}/raw/{revision}/{f_path}',
1027 controller='files', action='raw', revision='tip', f_path='',
1040 controller='files', action='raw', revision='tip', f_path='',
1028 conditions={'function': check_repo},
1041 conditions={'function': check_repo},
1029 requirements=URL_NAME_REQUIREMENTS)
1042 requirements=URL_NAME_REQUIREMENTS)
1030
1043
1031 rmap.connect('files_render_home',
1044 rmap.connect('files_render_home',
1032 '/{repo_name}/render/{revision}/{f_path}',
1045 '/{repo_name}/render/{revision}/{f_path}',
1033 controller='files', action='index', revision='tip', f_path='',
1046 controller='files', action='index', revision='tip', f_path='',
1034 rendered=True, conditions={'function': check_repo},
1047 rendered=True, conditions={'function': check_repo},
1035 requirements=URL_NAME_REQUIREMENTS)
1048 requirements=URL_NAME_REQUIREMENTS)
1036
1049
1037 rmap.connect('files_annotate_home',
1050 rmap.connect('files_annotate_home',
1038 '/{repo_name}/annotate/{revision}/{f_path}',
1051 '/{repo_name}/annotate/{revision}/{f_path}',
1039 controller='files', action='index', revision='tip',
1052 controller='files', action='index', revision='tip',
1040 f_path='', annotate=True, conditions={'function': check_repo},
1053 f_path='', annotate=True, conditions={'function': check_repo},
1041 requirements=URL_NAME_REQUIREMENTS)
1054 requirements=URL_NAME_REQUIREMENTS)
1042
1055
1043 rmap.connect('files_edit',
1056 rmap.connect('files_edit',
1044 '/{repo_name}/edit/{revision}/{f_path}',
1057 '/{repo_name}/edit/{revision}/{f_path}',
1045 controller='files', action='edit', revision='tip',
1058 controller='files', action='edit', revision='tip',
1046 f_path='',
1059 f_path='',
1047 conditions={'function': check_repo, 'method': ['POST']},
1060 conditions={'function': check_repo, 'method': ['POST']},
1048 requirements=URL_NAME_REQUIREMENTS)
1061 requirements=URL_NAME_REQUIREMENTS)
1049
1062
1050 rmap.connect('files_edit_home',
1063 rmap.connect('files_edit_home',
1051 '/{repo_name}/edit/{revision}/{f_path}',
1064 '/{repo_name}/edit/{revision}/{f_path}',
1052 controller='files', action='edit_home', revision='tip',
1065 controller='files', action='edit_home', revision='tip',
1053 f_path='', conditions={'function': check_repo},
1066 f_path='', conditions={'function': check_repo},
1054 requirements=URL_NAME_REQUIREMENTS)
1067 requirements=URL_NAME_REQUIREMENTS)
1055
1068
1056 rmap.connect('files_add',
1069 rmap.connect('files_add',
1057 '/{repo_name}/add/{revision}/{f_path}',
1070 '/{repo_name}/add/{revision}/{f_path}',
1058 controller='files', action='add', revision='tip',
1071 controller='files', action='add', revision='tip',
1059 f_path='',
1072 f_path='',
1060 conditions={'function': check_repo, 'method': ['POST']},
1073 conditions={'function': check_repo, 'method': ['POST']},
1061 requirements=URL_NAME_REQUIREMENTS)
1074 requirements=URL_NAME_REQUIREMENTS)
1062
1075
1063 rmap.connect('files_add_home',
1076 rmap.connect('files_add_home',
1064 '/{repo_name}/add/{revision}/{f_path}',
1077 '/{repo_name}/add/{revision}/{f_path}',
1065 controller='files', action='add_home', revision='tip',
1078 controller='files', action='add_home', revision='tip',
1066 f_path='', conditions={'function': check_repo},
1079 f_path='', conditions={'function': check_repo},
1067 requirements=URL_NAME_REQUIREMENTS)
1080 requirements=URL_NAME_REQUIREMENTS)
1068
1081
1069 rmap.connect('files_delete',
1082 rmap.connect('files_delete',
1070 '/{repo_name}/delete/{revision}/{f_path}',
1083 '/{repo_name}/delete/{revision}/{f_path}',
1071 controller='files', action='delete', revision='tip',
1084 controller='files', action='delete', revision='tip',
1072 f_path='',
1085 f_path='',
1073 conditions={'function': check_repo, 'method': ['POST']},
1086 conditions={'function': check_repo, 'method': ['POST']},
1074 requirements=URL_NAME_REQUIREMENTS)
1087 requirements=URL_NAME_REQUIREMENTS)
1075
1088
1076 rmap.connect('files_delete_home',
1089 rmap.connect('files_delete_home',
1077 '/{repo_name}/delete/{revision}/{f_path}',
1090 '/{repo_name}/delete/{revision}/{f_path}',
1078 controller='files', action='delete_home', revision='tip',
1091 controller='files', action='delete_home', revision='tip',
1079 f_path='', conditions={'function': check_repo},
1092 f_path='', conditions={'function': check_repo},
1080 requirements=URL_NAME_REQUIREMENTS)
1093 requirements=URL_NAME_REQUIREMENTS)
1081
1094
1082 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1095 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1083 controller='files', action='archivefile',
1096 controller='files', action='archivefile',
1084 conditions={'function': check_repo},
1097 conditions={'function': check_repo},
1085 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1098 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1086
1099
1087 rmap.connect('files_nodelist_home',
1100 rmap.connect('files_nodelist_home',
1088 '/{repo_name}/nodelist/{revision}/{f_path}',
1101 '/{repo_name}/nodelist/{revision}/{f_path}',
1089 controller='files', action='nodelist',
1102 controller='files', action='nodelist',
1090 conditions={'function': check_repo},
1103 conditions={'function': check_repo},
1091 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1104 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1092
1105
1093 rmap.connect('files_metadata_list_home',
1106 rmap.connect('files_metadata_list_home',
1094 '/{repo_name}/metadata_list/{revision}/{f_path}',
1107 '/{repo_name}/metadata_list/{revision}/{f_path}',
1095 controller='files', action='metadata_list',
1108 controller='files', action='metadata_list',
1096 conditions={'function': check_repo},
1109 conditions={'function': check_repo},
1097 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1110 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1098
1111
1099 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1112 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1100 controller='forks', action='fork_create',
1113 controller='forks', action='fork_create',
1101 conditions={'function': check_repo, 'method': ['POST']},
1114 conditions={'function': check_repo, 'method': ['POST']},
1102 requirements=URL_NAME_REQUIREMENTS)
1115 requirements=URL_NAME_REQUIREMENTS)
1103
1116
1104 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1117 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1105 controller='forks', action='fork',
1118 controller='forks', action='fork',
1106 conditions={'function': check_repo},
1119 conditions={'function': check_repo},
1107 requirements=URL_NAME_REQUIREMENTS)
1120 requirements=URL_NAME_REQUIREMENTS)
1108
1121
1109 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1122 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1110 controller='forks', action='forks',
1123 controller='forks', action='forks',
1111 conditions={'function': check_repo},
1124 conditions={'function': check_repo},
1112 requirements=URL_NAME_REQUIREMENTS)
1125 requirements=URL_NAME_REQUIREMENTS)
1113
1126
1114 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1127 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1115 controller='followers', action='followers',
1128 controller='followers', action='followers',
1116 conditions={'function': check_repo},
1129 conditions={'function': check_repo},
1117 requirements=URL_NAME_REQUIREMENTS)
1130 requirements=URL_NAME_REQUIREMENTS)
1118
1131
1119 # must be here for proper group/repo catching pattern
1132 # must be here for proper group/repo catching pattern
1120 _connect_with_slash(
1133 _connect_with_slash(
1121 rmap, 'repo_group_home', '/{group_name}',
1134 rmap, 'repo_group_home', '/{group_name}',
1122 controller='home', action='index_repo_group',
1135 controller='home', action='index_repo_group',
1123 conditions={'function': check_group},
1136 conditions={'function': check_group},
1124 requirements=URL_NAME_REQUIREMENTS)
1137 requirements=URL_NAME_REQUIREMENTS)
1125
1138
1126 # catch all, at the end
1139 # catch all, at the end
1127 _connect_with_slash(
1140 _connect_with_slash(
1128 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1141 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1129 controller='summary', action='index',
1142 controller='summary', action='index',
1130 conditions={'function': check_repo},
1143 conditions={'function': check_repo},
1131 requirements=URL_NAME_REQUIREMENTS)
1144 requirements=URL_NAME_REQUIREMENTS)
1132
1145
1133 return rmap
1146 return rmap
1134
1147
1135
1148
1136 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1149 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1137 """
1150 """
1138 Connect a route with an optional trailing slash in `path`.
1151 Connect a route with an optional trailing slash in `path`.
1139 """
1152 """
1140 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1153 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1141 mapper.connect(name, path, *args, **kwargs)
1154 mapper.connect(name, path, *args, **kwargs)
@@ -1,57 +1,66 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from pyramid.threadlocal import get_current_registry
19 from pyramid.threadlocal import get_current_registry
20
20
21
21
22 def trigger(event):
22 def trigger(event, registry=None):
23 """
23 """
24 Helper method to send an event. This wraps the pyramid logic to send an
24 Helper method to send an event. This wraps the pyramid logic to send an
25 event.
25 event.
26 """
26 """
27 # For the first step we are using pyramids thread locals here. If the
27 # For the first step we are using pyramids thread locals here. If the
28 # event mechanism works out as a good solution we should think about
28 # event mechanism works out as a good solution we should think about
29 # passing the registry as an argument to get rid of it.
29 # passing the registry as an argument to get rid of it.
30 registry = get_current_registry()
30 registry = registry or get_current_registry()
31 registry.notify(event)
31 registry.notify(event)
32
32
33 # Until we can work around the problem that VCS operations do not have a
34 # pyramid context to work with, we send the events to integrations directly
35
36 # Later it will be possible to use regular pyramid subscribers ie:
37 # config.add_subscriber(integrations_event_handler, RhodecodeEvent)
38 from rhodecode.integrations import integrations_event_handler
39 if isinstance(event, RhodecodeEvent):
40 integrations_event_handler(event)
41
33
42
34 from rhodecode.events.base import RhodecodeEvent
43 from rhodecode.events.base import RhodecodeEvent
35
44
36 from rhodecode.events.user import (
45 from rhodecode.events.user import (
37 UserPreCreate,
46 UserPreCreate,
38 UserPreUpdate,
47 UserPreUpdate,
39 UserRegistered
48 UserRegistered
40 )
49 )
41
50
42 from rhodecode.events.repo import (
51 from rhodecode.events.repo import (
43 RepoEvent,
52 RepoEvent,
44 RepoPreCreateEvent, RepoCreatedEvent,
53 RepoPreCreateEvent, RepoCreateEvent,
45 RepoPreDeleteEvent, RepoDeletedEvent,
54 RepoPreDeleteEvent, RepoDeleteEvent,
46 RepoPrePushEvent, RepoPushEvent,
55 RepoPrePushEvent, RepoPushEvent,
47 RepoPrePullEvent, RepoPullEvent,
56 RepoPrePullEvent, RepoPullEvent,
48 )
57 )
49
58
50 from rhodecode.events.pullrequest import (
59 from rhodecode.events.pullrequest import (
51 PullRequestEvent,
60 PullRequestEvent,
52 PullRequestCreateEvent,
61 PullRequestCreateEvent,
53 PullRequestUpdateEvent,
62 PullRequestUpdateEvent,
54 PullRequestReviewEvent,
63 PullRequestReviewEvent,
55 PullRequestMergeEvent,
64 PullRequestMergeEvent,
56 PullRequestCloseEvent,
65 PullRequestCloseEvent,
57 ) No newline at end of file
66 )
@@ -1,71 +1,59 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from datetime import datetime
19 from datetime import datetime
20 from marshmallow import Schema, fields
21 from pyramid.threadlocal import get_current_request
20 from pyramid.threadlocal import get_current_request
22 from rhodecode.lib.utils2 import AttributeDict
21 from rhodecode.lib.utils2 import AttributeDict
23
22
24
23
25 # this is a user object to be used for events caused by the system (eg. shell)
24 # this is a user object to be used for events caused by the system (eg. shell)
26 SYSTEM_USER = AttributeDict(dict(
25 SYSTEM_USER = AttributeDict(dict(
27 username='__SYSTEM__'
26 username='__SYSTEM__'
28 ))
27 ))
29
28
30
29
31 class UserSchema(Schema):
32 """
33 Marshmallow schema for a user
34 """
35 username = fields.Str()
36
37
38 class RhodecodeEventSchema(Schema):
39 """
40 Marshmallow schema for a rhodecode event
41 """
42 utc_timestamp = fields.DateTime()
43 actor = fields.Nested(UserSchema)
44 actor_ip = fields.Str()
45 name = fields.Str()
46
47
48 class RhodecodeEvent(object):
30 class RhodecodeEvent(object):
49 """
31 """
50 Base event class for all Rhodecode events
32 Base event class for all Rhodecode events
51 """
33 """
52 MarshmallowSchema = RhodecodeEventSchema
53
54 def __init__(self):
34 def __init__(self):
55 self.request = get_current_request()
35 self.request = get_current_request()
56 self.utc_timestamp = datetime.utcnow()
36 self.utc_timestamp = datetime.utcnow()
57
37
58 @property
38 @property
59 def actor(self):
39 def actor(self):
60 if self.request:
40 if self.request:
61 return self.request.user.get_instance()
41 return self.request.user.get_instance()
62 return SYSTEM_USER
42 return SYSTEM_USER
63
43
64 @property
44 @property
65 def actor_ip(self):
45 def actor_ip(self):
66 if self.request:
46 if self.request:
67 return self.request.user.ip_addr
47 return self.request.user.ip_addr
68 return '<no ip available>'
48 return '<no ip available>'
69
49
70 def as_dict(self):
50 def as_dict(self):
71 return self.MarshmallowSchema().dump(self).data
51 data = {
52 'name': self.name,
53 'utc_timestamp': self.utc_timestamp,
54 'actor_ip': self.actor_ip,
55 'actor': {
56 'username': self.actor.username
57 }
58 }
59 return data No newline at end of file
@@ -1,97 +1,97 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
20
19
20 from rhodecode.translation import lazy_ugettext
21 from rhodecode.events.repo import RepoEvent
21 from rhodecode.events.repo import RepoEvent
22
22
23
23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
45 class PullRequestEvent(RepoEvent):
24 class PullRequestEvent(RepoEvent):
46 """
25 """
47 Base class for pull request events.
26 Base class for pull request events.
48
27
49 :param pullrequest: a :class:`PullRequest` instance
28 :param pullrequest: a :class:`PullRequest` instance
50 """
29 """
51 MarshmallowSchema = PullRequestEventSchema
52
30
53 def __init__(self, pullrequest):
31 def __init__(self, pullrequest):
54 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
32 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
55 self.pullrequest = pullrequest
33 self.pullrequest = pullrequest
56
34
35 def as_dict(self):
36 from rhodecode.model.pull_request import PullRequestModel
37 data = super(PullRequestEvent, self).as_dict()
38
39 commits = self._commits_as_dict(self.pullrequest.revisions)
40 issues = self._issues_as_dict(commits)
41
42 data.update({
43 'pullrequest': {
44 'title': self.pullrequest.title,
45 'issues': issues,
46 'pull_request_id': self.pullrequest.pull_request_id,
47 'url': PullRequestModel().get_url(self.pullrequest)
48 }
49 })
50 return data
51
57
52
58 class PullRequestCreateEvent(PullRequestEvent):
53 class PullRequestCreateEvent(PullRequestEvent):
59 """
54 """
60 An instance of this class is emitted as an :term:`event` after a pull
55 An instance of this class is emitted as an :term:`event` after a pull
61 request is created.
56 request is created.
62 """
57 """
63 name = 'pullrequest-create'
58 name = 'pullrequest-create'
59 display_name = lazy_ugettext('pullrequest created')
64
60
65
61
66 class PullRequestCloseEvent(PullRequestEvent):
62 class PullRequestCloseEvent(PullRequestEvent):
67 """
63 """
68 An instance of this class is emitted as an :term:`event` after a pull
64 An instance of this class is emitted as an :term:`event` after a pull
69 request is closed.
65 request is closed.
70 """
66 """
71 name = 'pullrequest-close'
67 name = 'pullrequest-close'
68 display_name = lazy_ugettext('pullrequest closed')
72
69
73
70
74 class PullRequestUpdateEvent(PullRequestEvent):
71 class PullRequestUpdateEvent(PullRequestEvent):
75 """
72 """
76 An instance of this class is emitted as an :term:`event` after a pull
73 An instance of this class is emitted as an :term:`event` after a pull
77 request is updated.
74 request is updated.
78 """
75 """
79 name = 'pullrequest-update'
76 name = 'pullrequest-update'
77 display_name = lazy_ugettext('pullrequest updated')
80
78
81
79
82 class PullRequestMergeEvent(PullRequestEvent):
80 class PullRequestMergeEvent(PullRequestEvent):
83 """
81 """
84 An instance of this class is emitted as an :term:`event` after a pull
82 An instance of this class is emitted as an :term:`event` after a pull
85 request is merged.
83 request is merged.
86 """
84 """
87 name = 'pullrequest-merge'
85 name = 'pullrequest-merge'
86 display_name = lazy_ugettext('pullrequest merged')
88
87
89
88
90 class PullRequestReviewEvent(PullRequestEvent):
89 class PullRequestReviewEvent(PullRequestEvent):
91 """
90 """
92 An instance of this class is emitted as an :term:`event` after a pull
91 An instance of this class is emitted as an :term:`event` after a pull
93 request is reviewed.
92 request is reviewed.
94 """
93 """
95 name = 'pullrequest-review'
94 name = 'pullrequest-review'
95 display_name = lazy_ugettext('pullrequest reviewed')
96
96
97
97
@@ -1,149 +1,219 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
19 import logging
20
20
21 from rhodecode.translation import lazy_ugettext
21 from rhodecode.model.db import User, Repository, Session
22 from rhodecode.model.db import User, Repository, Session
22 from rhodecode.events.base import RhodecodeEvent
23 from rhodecode.events.base import RhodecodeEvent
23
24
24
25 log = logging.getLogger()
25 def get_repo_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_repo_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repo = fields.Nested(RepositorySchema)
44
26
45
27
46 class RepoEvent(RhodecodeEvent):
28 class RepoEvent(RhodecodeEvent):
47 """
29 """
48 Base class for events acting on a repository.
30 Base class for events acting on a repository.
49
31
50 :param repo: a :class:`Repository` instance
32 :param repo: a :class:`Repository` instance
51 """
33 """
52 MarshmallowSchema = RepoEventSchema
53
34
54 def __init__(self, repo):
35 def __init__(self, repo):
55 super(RepoEvent, self).__init__()
36 super(RepoEvent, self).__init__()
56 self.repo = repo
37 self.repo = repo
57
38
39 def as_dict(self):
40 from rhodecode.model.repo import RepoModel
41 data = super(RepoEvent, self).as_dict()
42 data.update({
43 'repo': {
44 'repo_id': self.repo.repo_id,
45 'repo_name': self.repo.repo_name,
46 'url': RepoModel().get_url(self.repo)
47 }
48 })
49 return data
50
51 def _commits_as_dict(self, commit_ids):
52 """ Helper function to serialize commit_ids """
53
54 from rhodecode.lib.utils2 import extract_mentioned_users
55 from rhodecode.model.db import Repository
56 from rhodecode.lib import helpers as h
57 from rhodecode.lib.helpers import process_patterns
58 from rhodecode.lib.helpers import urlify_commit_message
59 if not commit_ids:
60 return []
61 commits = []
62 reviewers = []
63 vcs_repo = self.repo.scm_instance(cache=False)
64 try:
65 for commit_id in commit_ids:
66 cs = vcs_repo.get_changeset(commit_id)
67 cs_data = cs.__json__()
68 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
69 cs_data['reviewers'] = reviewers
70 cs_data['url'] = h.url('changeset_home',
71 repo_name=self.repo.repo_name,
72 revision=cs_data['raw_id'],
73 qualified=True
74 )
75 urlified_message, issues_data = process_patterns(
76 cs_data['message'], self.repo.repo_name)
77 cs_data['issues'] = issues_data
78 cs_data['message_html'] = urlify_commit_message(cs_data['message'],
79 self.repo.repo_name)
80 commits.append(cs_data)
81 except Exception as e:
82 log.exception(e)
83 # we don't send any commits when crash happens, only full list matters
84 # we short circuit then.
85 return []
86 return commits
87
88 def _issues_as_dict(self, commits):
89 """ Helper function to serialize issues from commits """
90 issues = {}
91 for commit in commits:
92 for issue in commit['issues']:
93 issues[issue['id']] = issue
94 return issues
95
58
96
59 class RepoPreCreateEvent(RepoEvent):
97 class RepoPreCreateEvent(RepoEvent):
60 """
98 """
61 An instance of this class is emitted as an :term:`event` before a repo is
99 An instance of this class is emitted as an :term:`event` before a repo is
62 created.
100 created.
63 """
101 """
64 name = 'repo-pre-create'
102 name = 'repo-pre-create'
103 display_name = lazy_ugettext('repository pre create')
65
104
66
105
67 class RepoCreatedEvent(RepoEvent):
106 class RepoCreateEvent(RepoEvent):
68 """
107 """
69 An instance of this class is emitted as an :term:`event` whenever a repo is
108 An instance of this class is emitted as an :term:`event` whenever a repo is
70 created.
109 created.
71 """
110 """
72 name = 'repo-created'
111 name = 'repo-create'
112 display_name = lazy_ugettext('repository created')
73
113
74
114
75 class RepoPreDeleteEvent(RepoEvent):
115 class RepoPreDeleteEvent(RepoEvent):
76 """
116 """
77 An instance of this class is emitted as an :term:`event` whenever a repo is
117 An instance of this class is emitted as an :term:`event` whenever a repo is
78 created.
118 created.
79 """
119 """
80 name = 'repo-pre-delete'
120 name = 'repo-pre-delete'
121 display_name = lazy_ugettext('repository pre delete')
81
122
82
123
83 class RepoDeletedEvent(RepoEvent):
124 class RepoDeleteEvent(RepoEvent):
84 """
125 """
85 An instance of this class is emitted as an :term:`event` whenever a repo is
126 An instance of this class is emitted as an :term:`event` whenever a repo is
86 created.
127 created.
87 """
128 """
88 name = 'repo-deleted'
129 name = 'repo-delete'
130 display_name = lazy_ugettext('repository deleted')
89
131
90
132
91 class RepoVCSEvent(RepoEvent):
133 class RepoVCSEvent(RepoEvent):
92 """
134 """
93 Base class for events triggered by the VCS
135 Base class for events triggered by the VCS
94 """
136 """
95 def __init__(self, repo_name, extras):
137 def __init__(self, repo_name, extras):
96 self.repo = Repository.get_by_repo_name(repo_name)
138 self.repo = Repository.get_by_repo_name(repo_name)
97 if not self.repo:
139 if not self.repo:
98 raise Exception('repo by this name %s does not exist' % repo_name)
140 raise Exception('repo by this name %s does not exist' % repo_name)
99 self.extras = extras
141 self.extras = extras
100 super(RepoVCSEvent, self).__init__(self.repo)
142 super(RepoVCSEvent, self).__init__(self.repo)
101
143
102 @property
144 @property
103 def actor(self):
145 def actor(self):
104 if self.extras.get('username'):
146 if self.extras.get('username'):
105 return User.get_by_username(self.extras['username'])
147 return User.get_by_username(self.extras['username'])
106
148
107 @property
149 @property
108 def actor_ip(self):
150 def actor_ip(self):
109 if self.extras.get('ip'):
151 if self.extras.get('ip'):
110 return self.extras['ip']
152 return self.extras['ip']
111
153
112
154
113 class RepoPrePullEvent(RepoVCSEvent):
155 class RepoPrePullEvent(RepoVCSEvent):
114 """
156 """
115 An instance of this class is emitted as an :term:`event` before commits
157 An instance of this class is emitted as an :term:`event` before commits
116 are pulled from a repo.
158 are pulled from a repo.
117 """
159 """
118 name = 'repo-pre-pull'
160 name = 'repo-pre-pull'
161 display_name = lazy_ugettext('repository pre pull')
119
162
120
163
121 class RepoPullEvent(RepoVCSEvent):
164 class RepoPullEvent(RepoVCSEvent):
122 """
165 """
123 An instance of this class is emitted as an :term:`event` after commits
166 An instance of this class is emitted as an :term:`event` after commits
124 are pulled from a repo.
167 are pulled from a repo.
125 """
168 """
126 name = 'repo-pull'
169 name = 'repo-pull'
170 display_name = lazy_ugettext('repository pull')
127
171
128
172
129 class RepoPrePushEvent(RepoVCSEvent):
173 class RepoPrePushEvent(RepoVCSEvent):
130 """
174 """
131 An instance of this class is emitted as an :term:`event` before commits
175 An instance of this class is emitted as an :term:`event` before commits
132 are pushed to a repo.
176 are pushed to a repo.
133 """
177 """
134 name = 'repo-pre-push'
178 name = 'repo-pre-push'
179 display_name = lazy_ugettext('repository pre push')
135
180
136
181
137 class RepoPushEvent(RepoVCSEvent):
182 class RepoPushEvent(RepoVCSEvent):
138 """
183 """
139 An instance of this class is emitted as an :term:`event` after commits
184 An instance of this class is emitted as an :term:`event` after commits
140 are pushed to a repo.
185 are pushed to a repo.
141
186
142 :param extras: (optional) dict of data from proxied VCS actions
187 :param extras: (optional) dict of data from proxied VCS actions
143 """
188 """
144 name = 'repo-push'
189 name = 'repo-push'
190 display_name = lazy_ugettext('repository push')
145
191
146 def __init__(self, repo_name, pushed_commit_ids, extras):
192 def __init__(self, repo_name, pushed_commit_ids, extras):
147 super(RepoPushEvent, self).__init__(repo_name, extras)
193 super(RepoPushEvent, self).__init__(repo_name, extras)
148 self.pushed_commit_ids = pushed_commit_ids
194 self.pushed_commit_ids = pushed_commit_ids
149
195
196 def as_dict(self):
197 data = super(RepoPushEvent, self).as_dict()
198 branch_url = repo_url = data['repo']['url']
199
200 commits = self._commits_as_dict(self.pushed_commit_ids)
201 issues = self._issues_as_dict(commits)
202
203 branches = set(
204 commit['branch'] for commit in commits if commit['branch'])
205 branches = [
206 {
207 'name': branch,
208 'url': '{}/changelog?branch={}'.format(
209 data['repo']['url'], branch)
210 }
211 for branch in branches
212 ]
213
214 data['push'] = {
215 'commits': commits,
216 'issues': issues,
217 'branches': branches,
218 }
219 return data No newline at end of file
@@ -1,55 +1,65 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from zope.interface import implementer
19 from zope.interface import implementer
20
20
21 from rhodecode.translation import lazy_ugettext
21 from rhodecode.events.base import RhodecodeEvent
22 from rhodecode.events.base import RhodecodeEvent
22 from rhodecode.events.interfaces import (
23 from rhodecode.events.interfaces import (
23 IUserRegistered, IUserPreCreate, IUserPreUpdate)
24 IUserRegistered, IUserPreCreate, IUserPreUpdate)
24
25
25
26
26 @implementer(IUserRegistered)
27 @implementer(IUserRegistered)
27 class UserRegistered(RhodecodeEvent):
28 class UserRegistered(RhodecodeEvent):
28 """
29 """
29 An instance of this class is emitted as an :term:`event` whenever a user
30 An instance of this class is emitted as an :term:`event` whenever a user
30 account is registered.
31 account is registered.
31 """
32 """
33 name = 'user-register'
34 display_name = lazy_ugettext('user registered')
35
32 def __init__(self, user, session):
36 def __init__(self, user, session):
33 self.user = user
37 self.user = user
34 self.session = session
38 self.session = session
35
39
36
40
37 @implementer(IUserPreCreate)
41 @implementer(IUserPreCreate)
38 class UserPreCreate(RhodecodeEvent):
42 class UserPreCreate(RhodecodeEvent):
39 """
43 """
40 An instance of this class is emitted as an :term:`event` before a new user
44 An instance of this class is emitted as an :term:`event` before a new user
41 object is created.
45 object is created.
42 """
46 """
47 name = 'user-pre-create'
48 display_name = lazy_ugettext('user pre create')
49
43 def __init__(self, user_data):
50 def __init__(self, user_data):
44 self.user_data = user_data
51 self.user_data = user_data
45
52
46
53
47 @implementer(IUserPreUpdate)
54 @implementer(IUserPreUpdate)
48 class UserPreUpdate(RhodecodeEvent):
55 class UserPreUpdate(RhodecodeEvent):
49 """
56 """
50 An instance of this class is emitted as an :term:`event` before a user
57 An instance of this class is emitted as an :term:`event` before a user
51 object is updated.
58 object is updated.
52 """
59 """
60 name = 'user-pre-update'
61 display_name = lazy_ugettext('user pre update')
62
53 def __init__(self, user, user_data):
63 def __init__(self, user, user_data):
54 self.user = user
64 self.user = user
55 self.user_data = user_data
65 self.user_data = user_data
@@ -1,1900 +1,1931 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 import pygments
39 import pygments
40
40
41 from datetime import datetime
41 from datetime import datetime
42 from functools import partial
42 from functools import partial
43 from pygments.formatters.html import HtmlFormatter
43 from pygments.formatters.html import HtmlFormatter
44 from pygments import highlight as code_highlight
44 from pygments import highlight as code_highlight
45 from pygments.lexers import (
45 from pygments.lexers import (
46 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
46 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
47 from pylons import url
47 from pylons import url as pylons_url
48 from pylons.i18n.translation import _, ungettext
48 from pylons.i18n.translation import _, ungettext
49 from pyramid.threadlocal import get_current_request
49 from pyramid.threadlocal import get_current_request
50
50
51 from webhelpers.html import literal, HTML, escape
51 from webhelpers.html import literal, HTML, escape
52 from webhelpers.html.tools import *
52 from webhelpers.html.tools import *
53 from webhelpers.html.builder import make_tag
53 from webhelpers.html.builder import make_tag
54 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
54 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
55 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
55 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
56 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
56 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
57 submit, text, password, textarea, title, ul, xml_declaration, radio
57 submit, text, password, textarea, title, ul, xml_declaration, radio
58 from webhelpers.html.tools import auto_link, button_to, highlight, \
58 from webhelpers.html.tools import auto_link, button_to, highlight, \
59 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
59 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
60 from webhelpers.pylonslib import Flash as _Flash
60 from webhelpers.pylonslib import Flash as _Flash
61 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
61 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
62 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
62 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
63 replace_whitespace, urlify, truncate, wrap_paragraphs
63 replace_whitespace, urlify, truncate, wrap_paragraphs
64 from webhelpers.date import time_ago_in_words
64 from webhelpers.date import time_ago_in_words
65 from webhelpers.paginate import Page as _Page
65 from webhelpers.paginate import Page as _Page
66 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
66 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
67 convert_boolean_attrs, NotGiven, _make_safe_id_component
67 convert_boolean_attrs, NotGiven, _make_safe_id_component
68 from webhelpers2.number import format_byte_size
68 from webhelpers2.number import format_byte_size
69
69
70 from rhodecode.lib.annotate import annotate_highlight
70 from rhodecode.lib.annotate import annotate_highlight
71 from rhodecode.lib.action_parser import action_parser
71 from rhodecode.lib.action_parser import action_parser
72 from rhodecode.lib.ext_json import json
72 from rhodecode.lib.ext_json import json
73 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
73 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
74 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
74 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
75 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
75 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
76 AttributeDict, safe_int, md5, md5_safe
76 AttributeDict, safe_int, md5, md5_safe
77 from rhodecode.lib.markup_renderer import MarkupRenderer
77 from rhodecode.lib.markup_renderer import MarkupRenderer
78 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
78 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
79 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
79 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
80 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
80 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
81 from rhodecode.model.changeset_status import ChangesetStatusModel
81 from rhodecode.model.changeset_status import ChangesetStatusModel
82 from rhodecode.model.db import Permission, User, Repository
82 from rhodecode.model.db import Permission, User, Repository
83 from rhodecode.model.repo_group import RepoGroupModel
83 from rhodecode.model.repo_group import RepoGroupModel
84 from rhodecode.model.settings import IssueTrackerSettingsModel
84 from rhodecode.model.settings import IssueTrackerSettingsModel
85
85
86 log = logging.getLogger(__name__)
86 log = logging.getLogger(__name__)
87
87
88 DEFAULT_USER = User.DEFAULT_USER
88 DEFAULT_USER = User.DEFAULT_USER
89 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
89 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
90
90
91 def url(*args, **kw):
92 return pylons_url(*args, **kw)
93
94 def pylons_url_current(*args, **kw):
95 """
96 This function overrides pylons.url.current() which returns the current
97 path so that it will also work from a pyramid only context. This
98 should be removed once port to pyramid is complete.
99 """
100 if not args and not kw:
101 request = get_current_request()
102 return request.path
103 return pylons_url.current(*args, **kw)
104
105 url.current = pylons_url_current
106
91
107
92 def html_escape(text, html_escape_table=None):
108 def html_escape(text, html_escape_table=None):
93 """Produce entities within text."""
109 """Produce entities within text."""
94 if not html_escape_table:
110 if not html_escape_table:
95 html_escape_table = {
111 html_escape_table = {
96 "&": "&amp;",
112 "&": "&amp;",
97 '"': "&quot;",
113 '"': "&quot;",
98 "'": "&apos;",
114 "'": "&apos;",
99 ">": "&gt;",
115 ">": "&gt;",
100 "<": "&lt;",
116 "<": "&lt;",
101 }
117 }
102 return "".join(html_escape_table.get(c, c) for c in text)
118 return "".join(html_escape_table.get(c, c) for c in text)
103
119
104
120
105 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
121 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
106 """
122 """
107 Truncate string ``s`` at the first occurrence of ``sub``.
123 Truncate string ``s`` at the first occurrence of ``sub``.
108
124
109 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
125 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
110 """
126 """
111 suffix_if_chopped = suffix_if_chopped or ''
127 suffix_if_chopped = suffix_if_chopped or ''
112 pos = s.find(sub)
128 pos = s.find(sub)
113 if pos == -1:
129 if pos == -1:
114 return s
130 return s
115
131
116 if inclusive:
132 if inclusive:
117 pos += len(sub)
133 pos += len(sub)
118
134
119 chopped = s[:pos]
135 chopped = s[:pos]
120 left = s[pos:].strip()
136 left = s[pos:].strip()
121
137
122 if left and suffix_if_chopped:
138 if left and suffix_if_chopped:
123 chopped += suffix_if_chopped
139 chopped += suffix_if_chopped
124
140
125 return chopped
141 return chopped
126
142
127
143
128 def shorter(text, size=20):
144 def shorter(text, size=20):
129 postfix = '...'
145 postfix = '...'
130 if len(text) > size:
146 if len(text) > size:
131 return text[:size - len(postfix)] + postfix
147 return text[:size - len(postfix)] + postfix
132 return text
148 return text
133
149
134
150
135 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
151 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
136 """
152 """
137 Reset button
153 Reset button
138 """
154 """
139 _set_input_attrs(attrs, type, name, value)
155 _set_input_attrs(attrs, type, name, value)
140 _set_id_attr(attrs, id, name)
156 _set_id_attr(attrs, id, name)
141 convert_boolean_attrs(attrs, ["disabled"])
157 convert_boolean_attrs(attrs, ["disabled"])
142 return HTML.input(**attrs)
158 return HTML.input(**attrs)
143
159
144 reset = _reset
160 reset = _reset
145 safeid = _make_safe_id_component
161 safeid = _make_safe_id_component
146
162
147
163
148 def branding(name, length=40):
164 def branding(name, length=40):
149 return truncate(name, length, indicator="")
165 return truncate(name, length, indicator="")
150
166
151
167
152 def FID(raw_id, path):
168 def FID(raw_id, path):
153 """
169 """
154 Creates a unique ID for filenode based on it's hash of path and commit
170 Creates a unique ID for filenode based on it's hash of path and commit
155 it's safe to use in urls
171 it's safe to use in urls
156
172
157 :param raw_id:
173 :param raw_id:
158 :param path:
174 :param path:
159 """
175 """
160
176
161 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
177 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
162
178
163
179
164 class _GetError(object):
180 class _GetError(object):
165 """Get error from form_errors, and represent it as span wrapped error
181 """Get error from form_errors, and represent it as span wrapped error
166 message
182 message
167
183
168 :param field_name: field to fetch errors for
184 :param field_name: field to fetch errors for
169 :param form_errors: form errors dict
185 :param form_errors: form errors dict
170 """
186 """
171
187
172 def __call__(self, field_name, form_errors):
188 def __call__(self, field_name, form_errors):
173 tmpl = """<span class="error_msg">%s</span>"""
189 tmpl = """<span class="error_msg">%s</span>"""
174 if form_errors and field_name in form_errors:
190 if form_errors and field_name in form_errors:
175 return literal(tmpl % form_errors.get(field_name))
191 return literal(tmpl % form_errors.get(field_name))
176
192
177 get_error = _GetError()
193 get_error = _GetError()
178
194
179
195
180 class _ToolTip(object):
196 class _ToolTip(object):
181
197
182 def __call__(self, tooltip_title, trim_at=50):
198 def __call__(self, tooltip_title, trim_at=50):
183 """
199 """
184 Special function just to wrap our text into nice formatted
200 Special function just to wrap our text into nice formatted
185 autowrapped text
201 autowrapped text
186
202
187 :param tooltip_title:
203 :param tooltip_title:
188 """
204 """
189 tooltip_title = escape(tooltip_title)
205 tooltip_title = escape(tooltip_title)
190 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
206 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
191 return tooltip_title
207 return tooltip_title
192 tooltip = _ToolTip()
208 tooltip = _ToolTip()
193
209
194
210
195 def files_breadcrumbs(repo_name, commit_id, file_path):
211 def files_breadcrumbs(repo_name, commit_id, file_path):
196 if isinstance(file_path, str):
212 if isinstance(file_path, str):
197 file_path = safe_unicode(file_path)
213 file_path = safe_unicode(file_path)
198
214
199 # TODO: johbo: Is this always a url like path, or is this operating
215 # TODO: johbo: Is this always a url like path, or is this operating
200 # system dependent?
216 # system dependent?
201 path_segments = file_path.split('/')
217 path_segments = file_path.split('/')
202
218
203 repo_name_html = escape(repo_name)
219 repo_name_html = escape(repo_name)
204 if len(path_segments) == 1 and path_segments[0] == '':
220 if len(path_segments) == 1 and path_segments[0] == '':
205 url_segments = [repo_name_html]
221 url_segments = [repo_name_html]
206 else:
222 else:
207 url_segments = [
223 url_segments = [
208 link_to(
224 link_to(
209 repo_name_html,
225 repo_name_html,
210 url('files_home',
226 url('files_home',
211 repo_name=repo_name,
227 repo_name=repo_name,
212 revision=commit_id,
228 revision=commit_id,
213 f_path=''),
229 f_path=''),
214 class_='pjax-link')]
230 class_='pjax-link')]
215
231
216 last_cnt = len(path_segments) - 1
232 last_cnt = len(path_segments) - 1
217 for cnt, segment in enumerate(path_segments):
233 for cnt, segment in enumerate(path_segments):
218 if not segment:
234 if not segment:
219 continue
235 continue
220 segment_html = escape(segment)
236 segment_html = escape(segment)
221
237
222 if cnt != last_cnt:
238 if cnt != last_cnt:
223 url_segments.append(
239 url_segments.append(
224 link_to(
240 link_to(
225 segment_html,
241 segment_html,
226 url('files_home',
242 url('files_home',
227 repo_name=repo_name,
243 repo_name=repo_name,
228 revision=commit_id,
244 revision=commit_id,
229 f_path='/'.join(path_segments[:cnt + 1])),
245 f_path='/'.join(path_segments[:cnt + 1])),
230 class_='pjax-link'))
246 class_='pjax-link'))
231 else:
247 else:
232 url_segments.append(segment_html)
248 url_segments.append(segment_html)
233
249
234 return literal('/'.join(url_segments))
250 return literal('/'.join(url_segments))
235
251
236
252
237 class CodeHtmlFormatter(HtmlFormatter):
253 class CodeHtmlFormatter(HtmlFormatter):
238 """
254 """
239 My code Html Formatter for source codes
255 My code Html Formatter for source codes
240 """
256 """
241
257
242 def wrap(self, source, outfile):
258 def wrap(self, source, outfile):
243 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
259 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
244
260
245 def _wrap_code(self, source):
261 def _wrap_code(self, source):
246 for cnt, it in enumerate(source):
262 for cnt, it in enumerate(source):
247 i, t = it
263 i, t = it
248 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
264 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
249 yield i, t
265 yield i, t
250
266
251 def _wrap_tablelinenos(self, inner):
267 def _wrap_tablelinenos(self, inner):
252 dummyoutfile = StringIO.StringIO()
268 dummyoutfile = StringIO.StringIO()
253 lncount = 0
269 lncount = 0
254 for t, line in inner:
270 for t, line in inner:
255 if t:
271 if t:
256 lncount += 1
272 lncount += 1
257 dummyoutfile.write(line)
273 dummyoutfile.write(line)
258
274
259 fl = self.linenostart
275 fl = self.linenostart
260 mw = len(str(lncount + fl - 1))
276 mw = len(str(lncount + fl - 1))
261 sp = self.linenospecial
277 sp = self.linenospecial
262 st = self.linenostep
278 st = self.linenostep
263 la = self.lineanchors
279 la = self.lineanchors
264 aln = self.anchorlinenos
280 aln = self.anchorlinenos
265 nocls = self.noclasses
281 nocls = self.noclasses
266 if sp:
282 if sp:
267 lines = []
283 lines = []
268
284
269 for i in range(fl, fl + lncount):
285 for i in range(fl, fl + lncount):
270 if i % st == 0:
286 if i % st == 0:
271 if i % sp == 0:
287 if i % sp == 0:
272 if aln:
288 if aln:
273 lines.append('<a href="#%s%d" class="special">%*d</a>' %
289 lines.append('<a href="#%s%d" class="special">%*d</a>' %
274 (la, i, mw, i))
290 (la, i, mw, i))
275 else:
291 else:
276 lines.append('<span class="special">%*d</span>' % (mw, i))
292 lines.append('<span class="special">%*d</span>' % (mw, i))
277 else:
293 else:
278 if aln:
294 if aln:
279 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
295 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
280 else:
296 else:
281 lines.append('%*d' % (mw, i))
297 lines.append('%*d' % (mw, i))
282 else:
298 else:
283 lines.append('')
299 lines.append('')
284 ls = '\n'.join(lines)
300 ls = '\n'.join(lines)
285 else:
301 else:
286 lines = []
302 lines = []
287 for i in range(fl, fl + lncount):
303 for i in range(fl, fl + lncount):
288 if i % st == 0:
304 if i % st == 0:
289 if aln:
305 if aln:
290 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
306 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
291 else:
307 else:
292 lines.append('%*d' % (mw, i))
308 lines.append('%*d' % (mw, i))
293 else:
309 else:
294 lines.append('')
310 lines.append('')
295 ls = '\n'.join(lines)
311 ls = '\n'.join(lines)
296
312
297 # in case you wonder about the seemingly redundant <div> here: since the
313 # in case you wonder about the seemingly redundant <div> here: since the
298 # content in the other cell also is wrapped in a div, some browsers in
314 # content in the other cell also is wrapped in a div, some browsers in
299 # some configurations seem to mess up the formatting...
315 # some configurations seem to mess up the formatting...
300 if nocls:
316 if nocls:
301 yield 0, ('<table class="%stable">' % self.cssclass +
317 yield 0, ('<table class="%stable">' % self.cssclass +
302 '<tr><td><div class="linenodiv" '
318 '<tr><td><div class="linenodiv" '
303 'style="background-color: #f0f0f0; padding-right: 10px">'
319 'style="background-color: #f0f0f0; padding-right: 10px">'
304 '<pre style="line-height: 125%">' +
320 '<pre style="line-height: 125%">' +
305 ls + '</pre></div></td><td id="hlcode" class="code">')
321 ls + '</pre></div></td><td id="hlcode" class="code">')
306 else:
322 else:
307 yield 0, ('<table class="%stable">' % self.cssclass +
323 yield 0, ('<table class="%stable">' % self.cssclass +
308 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
324 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
309 ls + '</pre></div></td><td id="hlcode" class="code">')
325 ls + '</pre></div></td><td id="hlcode" class="code">')
310 yield 0, dummyoutfile.getvalue()
326 yield 0, dummyoutfile.getvalue()
311 yield 0, '</td></tr></table>'
327 yield 0, '</td></tr></table>'
312
328
313
329
314 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
330 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
315 def __init__(self, **kw):
331 def __init__(self, **kw):
316 # only show these line numbers if set
332 # only show these line numbers if set
317 self.only_lines = kw.pop('only_line_numbers', [])
333 self.only_lines = kw.pop('only_line_numbers', [])
318 self.query_terms = kw.pop('query_terms', [])
334 self.query_terms = kw.pop('query_terms', [])
319 self.max_lines = kw.pop('max_lines', 5)
335 self.max_lines = kw.pop('max_lines', 5)
320 self.line_context = kw.pop('line_context', 3)
336 self.line_context = kw.pop('line_context', 3)
321 self.url = kw.pop('url', None)
337 self.url = kw.pop('url', None)
322
338
323 super(CodeHtmlFormatter, self).__init__(**kw)
339 super(CodeHtmlFormatter, self).__init__(**kw)
324
340
325 def _wrap_code(self, source):
341 def _wrap_code(self, source):
326 for cnt, it in enumerate(source):
342 for cnt, it in enumerate(source):
327 i, t = it
343 i, t = it
328 t = '<pre>%s</pre>' % t
344 t = '<pre>%s</pre>' % t
329 yield i, t
345 yield i, t
330
346
331 def _wrap_tablelinenos(self, inner):
347 def _wrap_tablelinenos(self, inner):
332 yield 0, '<table class="code-highlight %stable">' % self.cssclass
348 yield 0, '<table class="code-highlight %stable">' % self.cssclass
333
349
334 last_shown_line_number = 0
350 last_shown_line_number = 0
335 current_line_number = 1
351 current_line_number = 1
336
352
337 for t, line in inner:
353 for t, line in inner:
338 if not t:
354 if not t:
339 yield t, line
355 yield t, line
340 continue
356 continue
341
357
342 if current_line_number in self.only_lines:
358 if current_line_number in self.only_lines:
343 if last_shown_line_number + 1 != current_line_number:
359 if last_shown_line_number + 1 != current_line_number:
344 yield 0, '<tr>'
360 yield 0, '<tr>'
345 yield 0, '<td class="line">...</td>'
361 yield 0, '<td class="line">...</td>'
346 yield 0, '<td id="hlcode" class="code"></td>'
362 yield 0, '<td id="hlcode" class="code"></td>'
347 yield 0, '</tr>'
363 yield 0, '</tr>'
348
364
349 yield 0, '<tr>'
365 yield 0, '<tr>'
350 if self.url:
366 if self.url:
351 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
367 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
352 self.url, current_line_number, current_line_number)
368 self.url, current_line_number, current_line_number)
353 else:
369 else:
354 yield 0, '<td class="line"><a href="">%i</a></td>' % (
370 yield 0, '<td class="line"><a href="">%i</a></td>' % (
355 current_line_number)
371 current_line_number)
356 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
372 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
357 yield 0, '</tr>'
373 yield 0, '</tr>'
358
374
359 last_shown_line_number = current_line_number
375 last_shown_line_number = current_line_number
360
376
361 current_line_number += 1
377 current_line_number += 1
362
378
363
379
364 yield 0, '</table>'
380 yield 0, '</table>'
365
381
366
382
367 def extract_phrases(text_query):
383 def extract_phrases(text_query):
368 """
384 """
369 Extracts phrases from search term string making sure phrases
385 Extracts phrases from search term string making sure phrases
370 contained in double quotes are kept together - and discarding empty values
386 contained in double quotes are kept together - and discarding empty values
371 or fully whitespace values eg.
387 or fully whitespace values eg.
372
388
373 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
389 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
374
390
375 """
391 """
376
392
377 in_phrase = False
393 in_phrase = False
378 buf = ''
394 buf = ''
379 phrases = []
395 phrases = []
380 for char in text_query:
396 for char in text_query:
381 if in_phrase:
397 if in_phrase:
382 if char == '"': # end phrase
398 if char == '"': # end phrase
383 phrases.append(buf)
399 phrases.append(buf)
384 buf = ''
400 buf = ''
385 in_phrase = False
401 in_phrase = False
386 continue
402 continue
387 else:
403 else:
388 buf += char
404 buf += char
389 continue
405 continue
390 else:
406 else:
391 if char == '"': # start phrase
407 if char == '"': # start phrase
392 in_phrase = True
408 in_phrase = True
393 phrases.append(buf)
409 phrases.append(buf)
394 buf = ''
410 buf = ''
395 continue
411 continue
396 elif char == ' ':
412 elif char == ' ':
397 phrases.append(buf)
413 phrases.append(buf)
398 buf = ''
414 buf = ''
399 continue
415 continue
400 else:
416 else:
401 buf += char
417 buf += char
402
418
403 phrases.append(buf)
419 phrases.append(buf)
404 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
420 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
405 return phrases
421 return phrases
406
422
407
423
408 def get_matching_offsets(text, phrases):
424 def get_matching_offsets(text, phrases):
409 """
425 """
410 Returns a list of string offsets in `text` that the list of `terms` match
426 Returns a list of string offsets in `text` that the list of `terms` match
411
427
412 >>> get_matching_offsets('some text here', ['some', 'here'])
428 >>> get_matching_offsets('some text here', ['some', 'here'])
413 [(0, 4), (10, 14)]
429 [(0, 4), (10, 14)]
414
430
415 """
431 """
416 offsets = []
432 offsets = []
417 for phrase in phrases:
433 for phrase in phrases:
418 for match in re.finditer(phrase, text):
434 for match in re.finditer(phrase, text):
419 offsets.append((match.start(), match.end()))
435 offsets.append((match.start(), match.end()))
420
436
421 return offsets
437 return offsets
422
438
423
439
424 def normalize_text_for_matching(x):
440 def normalize_text_for_matching(x):
425 """
441 """
426 Replaces all non alnum characters to spaces and lower cases the string,
442 Replaces all non alnum characters to spaces and lower cases the string,
427 useful for comparing two text strings without punctuation
443 useful for comparing two text strings without punctuation
428 """
444 """
429 return re.sub(r'[^\w]', ' ', x.lower())
445 return re.sub(r'[^\w]', ' ', x.lower())
430
446
431
447
432 def get_matching_line_offsets(lines, terms):
448 def get_matching_line_offsets(lines, terms):
433 """ Return a set of `lines` indices (starting from 1) matching a
449 """ Return a set of `lines` indices (starting from 1) matching a
434 text search query, along with `context` lines above/below matching lines
450 text search query, along with `context` lines above/below matching lines
435
451
436 :param lines: list of strings representing lines
452 :param lines: list of strings representing lines
437 :param terms: search term string to match in lines eg. 'some text'
453 :param terms: search term string to match in lines eg. 'some text'
438 :param context: number of lines above/below a matching line to add to result
454 :param context: number of lines above/below a matching line to add to result
439 :param max_lines: cut off for lines of interest
455 :param max_lines: cut off for lines of interest
440 eg.
456 eg.
441
457
442 text = '''
458 text = '''
443 words words words
459 words words words
444 words words words
460 words words words
445 some text some
461 some text some
446 words words words
462 words words words
447 words words words
463 words words words
448 text here what
464 text here what
449 '''
465 '''
450 get_matching_line_offsets(text, 'text', context=1)
466 get_matching_line_offsets(text, 'text', context=1)
451 {3: [(5, 9)], 6: [(0, 4)]]
467 {3: [(5, 9)], 6: [(0, 4)]]
452
468
453 """
469 """
454 matching_lines = {}
470 matching_lines = {}
455 phrases = [normalize_text_for_matching(phrase)
471 phrases = [normalize_text_for_matching(phrase)
456 for phrase in extract_phrases(terms)]
472 for phrase in extract_phrases(terms)]
457
473
458 for line_index, line in enumerate(lines, start=1):
474 for line_index, line in enumerate(lines, start=1):
459 match_offsets = get_matching_offsets(
475 match_offsets = get_matching_offsets(
460 normalize_text_for_matching(line), phrases)
476 normalize_text_for_matching(line), phrases)
461 if match_offsets:
477 if match_offsets:
462 matching_lines[line_index] = match_offsets
478 matching_lines[line_index] = match_offsets
463
479
464 return matching_lines
480 return matching_lines
465
481
466
482
467 def get_lexer_safe(mimetype=None, filepath=None):
483 def get_lexer_safe(mimetype=None, filepath=None):
468 """
484 """
469 Tries to return a relevant pygments lexer using mimetype/filepath name,
485 Tries to return a relevant pygments lexer using mimetype/filepath name,
470 defaulting to plain text if none could be found
486 defaulting to plain text if none could be found
471 """
487 """
472 lexer = None
488 lexer = None
473 try:
489 try:
474 if mimetype:
490 if mimetype:
475 lexer = get_lexer_for_mimetype(mimetype)
491 lexer = get_lexer_for_mimetype(mimetype)
476 if not lexer:
492 if not lexer:
477 lexer = get_lexer_for_filename(filepath)
493 lexer = get_lexer_for_filename(filepath)
478 except pygments.util.ClassNotFound:
494 except pygments.util.ClassNotFound:
479 pass
495 pass
480
496
481 if not lexer:
497 if not lexer:
482 lexer = get_lexer_by_name('text')
498 lexer = get_lexer_by_name('text')
483
499
484 return lexer
500 return lexer
485
501
486
502
487 def pygmentize(filenode, **kwargs):
503 def pygmentize(filenode, **kwargs):
488 """
504 """
489 pygmentize function using pygments
505 pygmentize function using pygments
490
506
491 :param filenode:
507 :param filenode:
492 """
508 """
493 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
509 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
494 return literal(code_highlight(filenode.content, lexer,
510 return literal(code_highlight(filenode.content, lexer,
495 CodeHtmlFormatter(**kwargs)))
511 CodeHtmlFormatter(**kwargs)))
496
512
497
513
498 def pygmentize_annotation(repo_name, filenode, **kwargs):
514 def pygmentize_annotation(repo_name, filenode, **kwargs):
499 """
515 """
500 pygmentize function for annotation
516 pygmentize function for annotation
501
517
502 :param filenode:
518 :param filenode:
503 """
519 """
504
520
505 color_dict = {}
521 color_dict = {}
506
522
507 def gen_color(n=10000):
523 def gen_color(n=10000):
508 """generator for getting n of evenly distributed colors using
524 """generator for getting n of evenly distributed colors using
509 hsv color and golden ratio. It always return same order of colors
525 hsv color and golden ratio. It always return same order of colors
510
526
511 :returns: RGB tuple
527 :returns: RGB tuple
512 """
528 """
513
529
514 def hsv_to_rgb(h, s, v):
530 def hsv_to_rgb(h, s, v):
515 if s == 0.0:
531 if s == 0.0:
516 return v, v, v
532 return v, v, v
517 i = int(h * 6.0) # XXX assume int() truncates!
533 i = int(h * 6.0) # XXX assume int() truncates!
518 f = (h * 6.0) - i
534 f = (h * 6.0) - i
519 p = v * (1.0 - s)
535 p = v * (1.0 - s)
520 q = v * (1.0 - s * f)
536 q = v * (1.0 - s * f)
521 t = v * (1.0 - s * (1.0 - f))
537 t = v * (1.0 - s * (1.0 - f))
522 i = i % 6
538 i = i % 6
523 if i == 0:
539 if i == 0:
524 return v, t, p
540 return v, t, p
525 if i == 1:
541 if i == 1:
526 return q, v, p
542 return q, v, p
527 if i == 2:
543 if i == 2:
528 return p, v, t
544 return p, v, t
529 if i == 3:
545 if i == 3:
530 return p, q, v
546 return p, q, v
531 if i == 4:
547 if i == 4:
532 return t, p, v
548 return t, p, v
533 if i == 5:
549 if i == 5:
534 return v, p, q
550 return v, p, q
535
551
536 golden_ratio = 0.618033988749895
552 golden_ratio = 0.618033988749895
537 h = 0.22717784590367374
553 h = 0.22717784590367374
538
554
539 for _ in xrange(n):
555 for _ in xrange(n):
540 h += golden_ratio
556 h += golden_ratio
541 h %= 1
557 h %= 1
542 HSV_tuple = [h, 0.95, 0.95]
558 HSV_tuple = [h, 0.95, 0.95]
543 RGB_tuple = hsv_to_rgb(*HSV_tuple)
559 RGB_tuple = hsv_to_rgb(*HSV_tuple)
544 yield map(lambda x: str(int(x * 256)), RGB_tuple)
560 yield map(lambda x: str(int(x * 256)), RGB_tuple)
545
561
546 cgenerator = gen_color()
562 cgenerator = gen_color()
547
563
548 def get_color_string(commit_id):
564 def get_color_string(commit_id):
549 if commit_id in color_dict:
565 if commit_id in color_dict:
550 col = color_dict[commit_id]
566 col = color_dict[commit_id]
551 else:
567 else:
552 col = color_dict[commit_id] = cgenerator.next()
568 col = color_dict[commit_id] = cgenerator.next()
553 return "color: rgb(%s)! important;" % (', '.join(col))
569 return "color: rgb(%s)! important;" % (', '.join(col))
554
570
555 def url_func(repo_name):
571 def url_func(repo_name):
556
572
557 def _url_func(commit):
573 def _url_func(commit):
558 author = commit.author
574 author = commit.author
559 date = commit.date
575 date = commit.date
560 message = tooltip(commit.message)
576 message = tooltip(commit.message)
561
577
562 tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
578 tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
563 " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
579 " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
564 "</b> %s<br/></div>")
580 "</b> %s<br/></div>")
565
581
566 tooltip_html = tooltip_html % (author, date, message)
582 tooltip_html = tooltip_html % (author, date, message)
567 lnk_format = '%5s:%s' % ('r%s' % commit.idx, commit.short_id)
583 lnk_format = '%5s:%s' % ('r%s' % commit.idx, commit.short_id)
568 uri = link_to(
584 uri = link_to(
569 lnk_format,
585 lnk_format,
570 url('changeset_home', repo_name=repo_name,
586 url('changeset_home', repo_name=repo_name,
571 revision=commit.raw_id),
587 revision=commit.raw_id),
572 style=get_color_string(commit.raw_id),
588 style=get_color_string(commit.raw_id),
573 class_='tooltip',
589 class_='tooltip',
574 title=tooltip_html
590 title=tooltip_html
575 )
591 )
576
592
577 uri += '\n'
593 uri += '\n'
578 return uri
594 return uri
579 return _url_func
595 return _url_func
580
596
581 return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs))
597 return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs))
582
598
583
599
584 def is_following_repo(repo_name, user_id):
600 def is_following_repo(repo_name, user_id):
585 from rhodecode.model.scm import ScmModel
601 from rhodecode.model.scm import ScmModel
586 return ScmModel().is_following_repo(repo_name, user_id)
602 return ScmModel().is_following_repo(repo_name, user_id)
587
603
588
604
589 class _Message(object):
605 class _Message(object):
590 """A message returned by ``Flash.pop_messages()``.
606 """A message returned by ``Flash.pop_messages()``.
591
607
592 Converting the message to a string returns the message text. Instances
608 Converting the message to a string returns the message text. Instances
593 also have the following attributes:
609 also have the following attributes:
594
610
595 * ``message``: the message text.
611 * ``message``: the message text.
596 * ``category``: the category specified when the message was created.
612 * ``category``: the category specified when the message was created.
597 """
613 """
598
614
599 def __init__(self, category, message):
615 def __init__(self, category, message):
600 self.category = category
616 self.category = category
601 self.message = message
617 self.message = message
602
618
603 def __str__(self):
619 def __str__(self):
604 return self.message
620 return self.message
605
621
606 __unicode__ = __str__
622 __unicode__ = __str__
607
623
608 def __html__(self):
624 def __html__(self):
609 return escape(safe_unicode(self.message))
625 return escape(safe_unicode(self.message))
610
626
611
627
612 class Flash(_Flash):
628 class Flash(_Flash):
613
629
614 def pop_messages(self):
630 def pop_messages(self):
615 """Return all accumulated messages and delete them from the session.
631 """Return all accumulated messages and delete them from the session.
616
632
617 The return value is a list of ``Message`` objects.
633 The return value is a list of ``Message`` objects.
618 """
634 """
619 from pylons import session
635 from pylons import session
620
636
621 messages = []
637 messages = []
622
638
623 # Pop the 'old' pylons flash messages. They are tuples of the form
639 # Pop the 'old' pylons flash messages. They are tuples of the form
624 # (category, message)
640 # (category, message)
625 for cat, msg in session.pop(self.session_key, []):
641 for cat, msg in session.pop(self.session_key, []):
626 messages.append(_Message(cat, msg))
642 messages.append(_Message(cat, msg))
627
643
628 # Pop the 'new' pyramid flash messages for each category as list
644 # Pop the 'new' pyramid flash messages for each category as list
629 # of strings.
645 # of strings.
630 for cat in self.categories:
646 for cat in self.categories:
631 for msg in session.pop_flash(queue=cat):
647 for msg in session.pop_flash(queue=cat):
632 messages.append(_Message(cat, msg))
648 messages.append(_Message(cat, msg))
633 # Map messages from the default queue to the 'notice' category.
649 # Map messages from the default queue to the 'notice' category.
634 for msg in session.pop_flash():
650 for msg in session.pop_flash():
635 messages.append(_Message('notice', msg))
651 messages.append(_Message('notice', msg))
636
652
637 session.save()
653 session.save()
638 return messages
654 return messages
639
655
640 flash = Flash()
656 flash = Flash()
641
657
642 #==============================================================================
658 #==============================================================================
643 # SCM FILTERS available via h.
659 # SCM FILTERS available via h.
644 #==============================================================================
660 #==============================================================================
645 from rhodecode.lib.vcs.utils import author_name, author_email
661 from rhodecode.lib.vcs.utils import author_name, author_email
646 from rhodecode.lib.utils2 import credentials_filter, age as _age
662 from rhodecode.lib.utils2 import credentials_filter, age as _age
647 from rhodecode.model.db import User, ChangesetStatus
663 from rhodecode.model.db import User, ChangesetStatus
648
664
649 age = _age
665 age = _age
650 capitalize = lambda x: x.capitalize()
666 capitalize = lambda x: x.capitalize()
651 email = author_email
667 email = author_email
652 short_id = lambda x: x[:12]
668 short_id = lambda x: x[:12]
653 hide_credentials = lambda x: ''.join(credentials_filter(x))
669 hide_credentials = lambda x: ''.join(credentials_filter(x))
654
670
655
671
656 def age_component(datetime_iso, value=None, time_is_local=False):
672 def age_component(datetime_iso, value=None, time_is_local=False):
657 title = value or format_date(datetime_iso)
673 title = value or format_date(datetime_iso)
658
674
659 # detect if we have a timezone info, otherwise, add it
675 # detect if we have a timezone info, otherwise, add it
660 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
676 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
661 tzinfo = '+00:00'
677 tzinfo = '+00:00'
662
678
663 if time_is_local:
679 if time_is_local:
664 tzinfo = time.strftime("+%H:%M",
680 tzinfo = time.strftime("+%H:%M",
665 time.gmtime(
681 time.gmtime(
666 (datetime.now() - datetime.utcnow()).seconds + 1
682 (datetime.now() - datetime.utcnow()).seconds + 1
667 )
683 )
668 )
684 )
669
685
670 return literal(
686 return literal(
671 '<time class="timeago tooltip" '
687 '<time class="timeago tooltip" '
672 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
688 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
673 datetime_iso, title, tzinfo))
689 datetime_iso, title, tzinfo))
674
690
675
691
676 def _shorten_commit_id(commit_id):
692 def _shorten_commit_id(commit_id):
677 from rhodecode import CONFIG
693 from rhodecode import CONFIG
678 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
694 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
679 return commit_id[:def_len]
695 return commit_id[:def_len]
680
696
681
697
682 def show_id(commit):
698 def show_id(commit):
683 """
699 """
684 Configurable function that shows ID
700 Configurable function that shows ID
685 by default it's r123:fffeeefffeee
701 by default it's r123:fffeeefffeee
686
702
687 :param commit: commit instance
703 :param commit: commit instance
688 """
704 """
689 from rhodecode import CONFIG
705 from rhodecode import CONFIG
690 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
706 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
691
707
692 raw_id = _shorten_commit_id(commit.raw_id)
708 raw_id = _shorten_commit_id(commit.raw_id)
693 if show_idx:
709 if show_idx:
694 return 'r%s:%s' % (commit.idx, raw_id)
710 return 'r%s:%s' % (commit.idx, raw_id)
695 else:
711 else:
696 return '%s' % (raw_id, )
712 return '%s' % (raw_id, )
697
713
698
714
699 def format_date(date):
715 def format_date(date):
700 """
716 """
701 use a standardized formatting for dates used in RhodeCode
717 use a standardized formatting for dates used in RhodeCode
702
718
703 :param date: date/datetime object
719 :param date: date/datetime object
704 :return: formatted date
720 :return: formatted date
705 """
721 """
706
722
707 if date:
723 if date:
708 _fmt = "%a, %d %b %Y %H:%M:%S"
724 _fmt = "%a, %d %b %Y %H:%M:%S"
709 return safe_unicode(date.strftime(_fmt))
725 return safe_unicode(date.strftime(_fmt))
710
726
711 return u""
727 return u""
712
728
713
729
714 class _RepoChecker(object):
730 class _RepoChecker(object):
715
731
716 def __init__(self, backend_alias):
732 def __init__(self, backend_alias):
717 self._backend_alias = backend_alias
733 self._backend_alias = backend_alias
718
734
719 def __call__(self, repository):
735 def __call__(self, repository):
720 if hasattr(repository, 'alias'):
736 if hasattr(repository, 'alias'):
721 _type = repository.alias
737 _type = repository.alias
722 elif hasattr(repository, 'repo_type'):
738 elif hasattr(repository, 'repo_type'):
723 _type = repository.repo_type
739 _type = repository.repo_type
724 else:
740 else:
725 _type = repository
741 _type = repository
726 return _type == self._backend_alias
742 return _type == self._backend_alias
727
743
728 is_git = _RepoChecker('git')
744 is_git = _RepoChecker('git')
729 is_hg = _RepoChecker('hg')
745 is_hg = _RepoChecker('hg')
730 is_svn = _RepoChecker('svn')
746 is_svn = _RepoChecker('svn')
731
747
732
748
733 def get_repo_type_by_name(repo_name):
749 def get_repo_type_by_name(repo_name):
734 repo = Repository.get_by_repo_name(repo_name)
750 repo = Repository.get_by_repo_name(repo_name)
735 return repo.repo_type
751 return repo.repo_type
736
752
737
753
738 def is_svn_without_proxy(repository):
754 def is_svn_without_proxy(repository):
739 from rhodecode import CONFIG
755 from rhodecode import CONFIG
740 if is_svn(repository):
756 if is_svn(repository):
741 if not CONFIG.get('rhodecode_proxy_subversion_http_requests', False):
757 if not CONFIG.get('rhodecode_proxy_subversion_http_requests', False):
742 return True
758 return True
743 return False
759 return False
744
760
745
761
746 def discover_user(author):
762 def discover_user(author):
747 """
763 """
748 Tries to discover RhodeCode User based on the autho string. Author string
764 Tries to discover RhodeCode User based on the autho string. Author string
749 is typically `FirstName LastName <email@address.com>`
765 is typically `FirstName LastName <email@address.com>`
750 """
766 """
751
767
752 # if author is already an instance use it for extraction
768 # if author is already an instance use it for extraction
753 if isinstance(author, User):
769 if isinstance(author, User):
754 return author
770 return author
755
771
756 # Valid email in the attribute passed, see if they're in the system
772 # Valid email in the attribute passed, see if they're in the system
757 _email = author_email(author)
773 _email = author_email(author)
758 if _email != '':
774 if _email != '':
759 user = User.get_by_email(_email, case_insensitive=True, cache=True)
775 user = User.get_by_email(_email, case_insensitive=True, cache=True)
760 if user is not None:
776 if user is not None:
761 return user
777 return user
762
778
763 # Maybe it's a username, we try to extract it and fetch by username ?
779 # Maybe it's a username, we try to extract it and fetch by username ?
764 _author = author_name(author)
780 _author = author_name(author)
765 user = User.get_by_username(_author, case_insensitive=True, cache=True)
781 user = User.get_by_username(_author, case_insensitive=True, cache=True)
766 if user is not None:
782 if user is not None:
767 return user
783 return user
768
784
769 return None
785 return None
770
786
771
787
772 def email_or_none(author):
788 def email_or_none(author):
773 # extract email from the commit string
789 # extract email from the commit string
774 _email = author_email(author)
790 _email = author_email(author)
775
791
776 # If we have an email, use it, otherwise
792 # If we have an email, use it, otherwise
777 # see if it contains a username we can get an email from
793 # see if it contains a username we can get an email from
778 if _email != '':
794 if _email != '':
779 return _email
795 return _email
780 else:
796 else:
781 user = User.get_by_username(author_name(author), case_insensitive=True,
797 user = User.get_by_username(author_name(author), case_insensitive=True,
782 cache=True)
798 cache=True)
783
799
784 if user is not None:
800 if user is not None:
785 return user.email
801 return user.email
786
802
787 # No valid email, not a valid user in the system, none!
803 # No valid email, not a valid user in the system, none!
788 return None
804 return None
789
805
790
806
791 def link_to_user(author, length=0, **kwargs):
807 def link_to_user(author, length=0, **kwargs):
792 user = discover_user(author)
808 user = discover_user(author)
793 # user can be None, but if we have it already it means we can re-use it
809 # user can be None, but if we have it already it means we can re-use it
794 # in the person() function, so we save 1 intensive-query
810 # in the person() function, so we save 1 intensive-query
795 if user:
811 if user:
796 author = user
812 author = user
797
813
798 display_person = person(author, 'username_or_name_or_email')
814 display_person = person(author, 'username_or_name_or_email')
799 if length:
815 if length:
800 display_person = shorter(display_person, length)
816 display_person = shorter(display_person, length)
801
817
802 if user:
818 if user:
803 return link_to(
819 return link_to(
804 escape(display_person),
820 escape(display_person),
805 url('user_profile', username=user.username),
821 url('user_profile', username=user.username),
806 **kwargs)
822 **kwargs)
807 else:
823 else:
808 return escape(display_person)
824 return escape(display_person)
809
825
810
826
811 def person(author, show_attr="username_and_name"):
827 def person(author, show_attr="username_and_name"):
812 user = discover_user(author)
828 user = discover_user(author)
813 if user:
829 if user:
814 return getattr(user, show_attr)
830 return getattr(user, show_attr)
815 else:
831 else:
816 _author = author_name(author)
832 _author = author_name(author)
817 _email = email(author)
833 _email = email(author)
818 return _author or _email
834 return _author or _email
819
835
820
836
821 def author_string(email):
837 def author_string(email):
822 if email:
838 if email:
823 user = User.get_by_email(email, case_insensitive=True, cache=True)
839 user = User.get_by_email(email, case_insensitive=True, cache=True)
824 if user:
840 if user:
825 if user.firstname or user.lastname:
841 if user.firstname or user.lastname:
826 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
842 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
827 else:
843 else:
828 return email
844 return email
829 else:
845 else:
830 return email
846 return email
831 else:
847 else:
832 return None
848 return None
833
849
834
850
835 def person_by_id(id_, show_attr="username_and_name"):
851 def person_by_id(id_, show_attr="username_and_name"):
836 # attr to return from fetched user
852 # attr to return from fetched user
837 person_getter = lambda usr: getattr(usr, show_attr)
853 person_getter = lambda usr: getattr(usr, show_attr)
838
854
839 #maybe it's an ID ?
855 #maybe it's an ID ?
840 if str(id_).isdigit() or isinstance(id_, int):
856 if str(id_).isdigit() or isinstance(id_, int):
841 id_ = int(id_)
857 id_ = int(id_)
842 user = User.get(id_)
858 user = User.get(id_)
843 if user is not None:
859 if user is not None:
844 return person_getter(user)
860 return person_getter(user)
845 return id_
861 return id_
846
862
847
863
848 def gravatar_with_user(author, show_disabled=False):
864 def gravatar_with_user(author, show_disabled=False):
849 from rhodecode.lib.utils import PartialRenderer
865 from rhodecode.lib.utils import PartialRenderer
850 _render = PartialRenderer('base/base.html')
866 _render = PartialRenderer('base/base.html')
851 return _render('gravatar_with_user', author, show_disabled=show_disabled)
867 return _render('gravatar_with_user', author, show_disabled=show_disabled)
852
868
853
869
854 def desc_stylize(value):
870 def desc_stylize(value):
855 """
871 """
856 converts tags from value into html equivalent
872 converts tags from value into html equivalent
857
873
858 :param value:
874 :param value:
859 """
875 """
860 if not value:
876 if not value:
861 return ''
877 return ''
862
878
863 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
879 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
864 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
880 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
865 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
881 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
866 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
882 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
867 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
883 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
868 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
884 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
869 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
885 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
870 '<div class="metatag" tag="lang">\\2</div>', value)
886 '<div class="metatag" tag="lang">\\2</div>', value)
871 value = re.sub(r'\[([a-z]+)\]',
887 value = re.sub(r'\[([a-z]+)\]',
872 '<div class="metatag" tag="\\1">\\1</div>', value)
888 '<div class="metatag" tag="\\1">\\1</div>', value)
873
889
874 return value
890 return value
875
891
876
892
877 def escaped_stylize(value):
893 def escaped_stylize(value):
878 """
894 """
879 converts tags from value into html equivalent, but escaping its value first
895 converts tags from value into html equivalent, but escaping its value first
880 """
896 """
881 if not value:
897 if not value:
882 return ''
898 return ''
883
899
884 # Using default webhelper escape method, but has to force it as a
900 # Using default webhelper escape method, but has to force it as a
885 # plain unicode instead of a markup tag to be used in regex expressions
901 # plain unicode instead of a markup tag to be used in regex expressions
886 value = unicode(escape(safe_unicode(value)))
902 value = unicode(escape(safe_unicode(value)))
887
903
888 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
904 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
889 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
905 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
890 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
906 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
891 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
907 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
892 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
908 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
893 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
909 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
894 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
910 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
895 '<div class="metatag" tag="lang">\\2</div>', value)
911 '<div class="metatag" tag="lang">\\2</div>', value)
896 value = re.sub(r'\[([a-z]+)\]',
912 value = re.sub(r'\[([a-z]+)\]',
897 '<div class="metatag" tag="\\1">\\1</div>', value)
913 '<div class="metatag" tag="\\1">\\1</div>', value)
898
914
899 return value
915 return value
900
916
901
917
902 def bool2icon(value):
918 def bool2icon(value):
903 """
919 """
904 Returns boolean value of a given value, represented as html element with
920 Returns boolean value of a given value, represented as html element with
905 classes that will represent icons
921 classes that will represent icons
906
922
907 :param value: given value to convert to html node
923 :param value: given value to convert to html node
908 """
924 """
909
925
910 if value: # does bool conversion
926 if value: # does bool conversion
911 return HTML.tag('i', class_="icon-true")
927 return HTML.tag('i', class_="icon-true")
912 else: # not true as bool
928 else: # not true as bool
913 return HTML.tag('i', class_="icon-false")
929 return HTML.tag('i', class_="icon-false")
914
930
915
931
916 #==============================================================================
932 #==============================================================================
917 # PERMS
933 # PERMS
918 #==============================================================================
934 #==============================================================================
919 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
935 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
920 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
936 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
921 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token
937 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token
922
938
923
939
924 #==============================================================================
940 #==============================================================================
925 # GRAVATAR URL
941 # GRAVATAR URL
926 #==============================================================================
942 #==============================================================================
927 class InitialsGravatar(object):
943 class InitialsGravatar(object):
928 def __init__(self, email_address, first_name, last_name, size=30,
944 def __init__(self, email_address, first_name, last_name, size=30,
929 background=None, text_color='#fff'):
945 background=None, text_color='#fff'):
930 self.size = size
946 self.size = size
931 self.first_name = first_name
947 self.first_name = first_name
932 self.last_name = last_name
948 self.last_name = last_name
933 self.email_address = email_address
949 self.email_address = email_address
934 self.background = background or self.str2color(email_address)
950 self.background = background or self.str2color(email_address)
935 self.text_color = text_color
951 self.text_color = text_color
936
952
937 def get_color_bank(self):
953 def get_color_bank(self):
938 """
954 """
939 returns a predefined list of colors that gravatars can use.
955 returns a predefined list of colors that gravatars can use.
940 Those are randomized distinct colors that guarantee readability and
956 Those are randomized distinct colors that guarantee readability and
941 uniqueness.
957 uniqueness.
942
958
943 generated with: http://phrogz.net/css/distinct-colors.html
959 generated with: http://phrogz.net/css/distinct-colors.html
944 """
960 """
945 return [
961 return [
946 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
962 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
947 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
963 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
948 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
964 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
949 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
965 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
950 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
966 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
951 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
967 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
952 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
968 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
953 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
969 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
954 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
970 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
955 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
971 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
956 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
972 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
957 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
973 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
958 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
974 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
959 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
975 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
960 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
976 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
961 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
977 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
962 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
978 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
963 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
979 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
964 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
980 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
965 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
981 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
966 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
982 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
967 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
983 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
968 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
984 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
969 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
985 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
970 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
986 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
971 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
987 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
972 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
988 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
973 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
989 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
974 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
990 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
975 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
991 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
976 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
992 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
977 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
993 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
978 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
994 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
979 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
995 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
980 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
996 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
981 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
997 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
982 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
998 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
983 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
999 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
984 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1000 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
985 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1001 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
986 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1002 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
987 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1003 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
988 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1004 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
989 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1005 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
990 '#4f8c46', '#368dd9', '#5c0073'
1006 '#4f8c46', '#368dd9', '#5c0073'
991 ]
1007 ]
992
1008
993 def rgb_to_hex_color(self, rgb_tuple):
1009 def rgb_to_hex_color(self, rgb_tuple):
994 """
1010 """
995 Converts an rgb_tuple passed to an hex color.
1011 Converts an rgb_tuple passed to an hex color.
996
1012
997 :param rgb_tuple: tuple with 3 ints represents rgb color space
1013 :param rgb_tuple: tuple with 3 ints represents rgb color space
998 """
1014 """
999 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1015 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1000
1016
1001 def email_to_int_list(self, email_str):
1017 def email_to_int_list(self, email_str):
1002 """
1018 """
1003 Get every byte of the hex digest value of email and turn it to integer.
1019 Get every byte of the hex digest value of email and turn it to integer.
1004 It's going to be always between 0-255
1020 It's going to be always between 0-255
1005 """
1021 """
1006 digest = md5_safe(email_str.lower())
1022 digest = md5_safe(email_str.lower())
1007 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1023 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1008
1024
1009 def pick_color_bank_index(self, email_str, color_bank):
1025 def pick_color_bank_index(self, email_str, color_bank):
1010 return self.email_to_int_list(email_str)[0] % len(color_bank)
1026 return self.email_to_int_list(email_str)[0] % len(color_bank)
1011
1027
1012 def str2color(self, email_str):
1028 def str2color(self, email_str):
1013 """
1029 """
1014 Tries to map in a stable algorithm an email to color
1030 Tries to map in a stable algorithm an email to color
1015
1031
1016 :param email_str:
1032 :param email_str:
1017 """
1033 """
1018 color_bank = self.get_color_bank()
1034 color_bank = self.get_color_bank()
1019 # pick position (module it's length so we always find it in the
1035 # pick position (module it's length so we always find it in the
1020 # bank even if it's smaller than 256 values
1036 # bank even if it's smaller than 256 values
1021 pos = self.pick_color_bank_index(email_str, color_bank)
1037 pos = self.pick_color_bank_index(email_str, color_bank)
1022 return color_bank[pos]
1038 return color_bank[pos]
1023
1039
1024 def normalize_email(self, email_address):
1040 def normalize_email(self, email_address):
1025 import unicodedata
1041 import unicodedata
1026 # default host used to fill in the fake/missing email
1042 # default host used to fill in the fake/missing email
1027 default_host = u'localhost'
1043 default_host = u'localhost'
1028
1044
1029 if not email_address:
1045 if not email_address:
1030 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1046 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1031
1047
1032 email_address = safe_unicode(email_address)
1048 email_address = safe_unicode(email_address)
1033
1049
1034 if u'@' not in email_address:
1050 if u'@' not in email_address:
1035 email_address = u'%s@%s' % (email_address, default_host)
1051 email_address = u'%s@%s' % (email_address, default_host)
1036
1052
1037 if email_address.endswith(u'@'):
1053 if email_address.endswith(u'@'):
1038 email_address = u'%s%s' % (email_address, default_host)
1054 email_address = u'%s%s' % (email_address, default_host)
1039
1055
1040 email_address = unicodedata.normalize('NFKD', email_address)\
1056 email_address = unicodedata.normalize('NFKD', email_address)\
1041 .encode('ascii', 'ignore')
1057 .encode('ascii', 'ignore')
1042 return email_address
1058 return email_address
1043
1059
1044 def get_initials(self):
1060 def get_initials(self):
1045 """
1061 """
1046 Returns 2 letter initials calculated based on the input.
1062 Returns 2 letter initials calculated based on the input.
1047 The algorithm picks first given email address, and takes first letter
1063 The algorithm picks first given email address, and takes first letter
1048 of part before @, and then the first letter of server name. In case
1064 of part before @, and then the first letter of server name. In case
1049 the part before @ is in a format of `somestring.somestring2` it replaces
1065 the part before @ is in a format of `somestring.somestring2` it replaces
1050 the server letter with first letter of somestring2
1066 the server letter with first letter of somestring2
1051
1067
1052 In case function was initialized with both first and lastname, this
1068 In case function was initialized with both first and lastname, this
1053 overrides the extraction from email by first letter of the first and
1069 overrides the extraction from email by first letter of the first and
1054 last name. We add special logic to that functionality, In case Full name
1070 last name. We add special logic to that functionality, In case Full name
1055 is compound, like Guido Von Rossum, we use last part of the last name
1071 is compound, like Guido Von Rossum, we use last part of the last name
1056 (Von Rossum) picking `R`.
1072 (Von Rossum) picking `R`.
1057
1073
1058 Function also normalizes the non-ascii characters to they ascii
1074 Function also normalizes the non-ascii characters to they ascii
1059 representation, eg Ą => A
1075 representation, eg Ą => A
1060 """
1076 """
1061 import unicodedata
1077 import unicodedata
1062 # replace non-ascii to ascii
1078 # replace non-ascii to ascii
1063 first_name = unicodedata.normalize(
1079 first_name = unicodedata.normalize(
1064 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1080 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1065 last_name = unicodedata.normalize(
1081 last_name = unicodedata.normalize(
1066 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1082 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1067
1083
1068 # do NFKD encoding, and also make sure email has proper format
1084 # do NFKD encoding, and also make sure email has proper format
1069 email_address = self.normalize_email(self.email_address)
1085 email_address = self.normalize_email(self.email_address)
1070
1086
1071 # first push the email initials
1087 # first push the email initials
1072 prefix, server = email_address.split('@', 1)
1088 prefix, server = email_address.split('@', 1)
1073
1089
1074 # check if prefix is maybe a 'firstname.lastname' syntax
1090 # check if prefix is maybe a 'firstname.lastname' syntax
1075 _dot_split = prefix.rsplit('.', 1)
1091 _dot_split = prefix.rsplit('.', 1)
1076 if len(_dot_split) == 2:
1092 if len(_dot_split) == 2:
1077 initials = [_dot_split[0][0], _dot_split[1][0]]
1093 initials = [_dot_split[0][0], _dot_split[1][0]]
1078 else:
1094 else:
1079 initials = [prefix[0], server[0]]
1095 initials = [prefix[0], server[0]]
1080
1096
1081 # then try to replace either firtname or lastname
1097 # then try to replace either firtname or lastname
1082 fn_letter = (first_name or " ")[0].strip()
1098 fn_letter = (first_name or " ")[0].strip()
1083 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1099 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1084
1100
1085 if fn_letter:
1101 if fn_letter:
1086 initials[0] = fn_letter
1102 initials[0] = fn_letter
1087
1103
1088 if ln_letter:
1104 if ln_letter:
1089 initials[1] = ln_letter
1105 initials[1] = ln_letter
1090
1106
1091 return ''.join(initials).upper()
1107 return ''.join(initials).upper()
1092
1108
1093 def get_img_data_by_type(self, font_family, img_type):
1109 def get_img_data_by_type(self, font_family, img_type):
1094 default_user = """
1110 default_user = """
1095 <svg xmlns="http://www.w3.org/2000/svg"
1111 <svg xmlns="http://www.w3.org/2000/svg"
1096 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1112 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1097 viewBox="-15 -10 439.165 429.164"
1113 viewBox="-15 -10 439.165 429.164"
1098
1114
1099 xml:space="preserve"
1115 xml:space="preserve"
1100 style="background:{background};" >
1116 style="background:{background};" >
1101
1117
1102 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1118 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1103 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1119 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1104 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1120 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1105 168.596,153.916,216.671,
1121 168.596,153.916,216.671,
1106 204.583,216.671z" fill="{text_color}"/>
1122 204.583,216.671z" fill="{text_color}"/>
1107 <path d="M407.164,374.717L360.88,
1123 <path d="M407.164,374.717L360.88,
1108 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1124 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1109 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1125 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1110 15.366-44.203,23.488-69.076,23.488c-24.877,
1126 15.366-44.203,23.488-69.076,23.488c-24.877,
1111 0-48.762-8.122-69.078-23.488
1127 0-48.762-8.122-69.078-23.488
1112 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1128 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1113 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1129 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1114 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1130 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1115 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1131 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1116 19.402-10.527 C409.699,390.129,
1132 19.402-10.527 C409.699,390.129,
1117 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1133 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1118 </svg>""".format(
1134 </svg>""".format(
1119 size=self.size,
1135 size=self.size,
1120 background='#979797', # @grey4
1136 background='#979797', # @grey4
1121 text_color=self.text_color,
1137 text_color=self.text_color,
1122 font_family=font_family)
1138 font_family=font_family)
1123
1139
1124 return {
1140 return {
1125 "default_user": default_user
1141 "default_user": default_user
1126 }[img_type]
1142 }[img_type]
1127
1143
1128 def get_img_data(self, svg_type=None):
1144 def get_img_data(self, svg_type=None):
1129 """
1145 """
1130 generates the svg metadata for image
1146 generates the svg metadata for image
1131 """
1147 """
1132
1148
1133 font_family = ','.join([
1149 font_family = ','.join([
1134 'proximanovaregular',
1150 'proximanovaregular',
1135 'Proxima Nova Regular',
1151 'Proxima Nova Regular',
1136 'Proxima Nova',
1152 'Proxima Nova',
1137 'Arial',
1153 'Arial',
1138 'Lucida Grande',
1154 'Lucida Grande',
1139 'sans-serif'
1155 'sans-serif'
1140 ])
1156 ])
1141 if svg_type:
1157 if svg_type:
1142 return self.get_img_data_by_type(font_family, svg_type)
1158 return self.get_img_data_by_type(font_family, svg_type)
1143
1159
1144 initials = self.get_initials()
1160 initials = self.get_initials()
1145 img_data = """
1161 img_data = """
1146 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1162 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1147 width="{size}" height="{size}"
1163 width="{size}" height="{size}"
1148 style="width: 100%; height: 100%; background-color: {background}"
1164 style="width: 100%; height: 100%; background-color: {background}"
1149 viewBox="0 0 {size} {size}">
1165 viewBox="0 0 {size} {size}">
1150 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1166 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1151 pointer-events="auto" fill="{text_color}"
1167 pointer-events="auto" fill="{text_color}"
1152 font-family="{font_family}"
1168 font-family="{font_family}"
1153 style="font-weight: 400; font-size: {f_size}px;">{text}
1169 style="font-weight: 400; font-size: {f_size}px;">{text}
1154 </text>
1170 </text>
1155 </svg>""".format(
1171 </svg>""".format(
1156 size=self.size,
1172 size=self.size,
1157 f_size=self.size/1.85, # scale the text inside the box nicely
1173 f_size=self.size/1.85, # scale the text inside the box nicely
1158 background=self.background,
1174 background=self.background,
1159 text_color=self.text_color,
1175 text_color=self.text_color,
1160 text=initials.upper(),
1176 text=initials.upper(),
1161 font_family=font_family)
1177 font_family=font_family)
1162
1178
1163 return img_data
1179 return img_data
1164
1180
1165 def generate_svg(self, svg_type=None):
1181 def generate_svg(self, svg_type=None):
1166 img_data = self.get_img_data(svg_type)
1182 img_data = self.get_img_data(svg_type)
1167 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1183 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1168
1184
1169
1185
1170 def initials_gravatar(email_address, first_name, last_name, size=30):
1186 def initials_gravatar(email_address, first_name, last_name, size=30):
1171 svg_type = None
1187 svg_type = None
1172 if email_address == User.DEFAULT_USER_EMAIL:
1188 if email_address == User.DEFAULT_USER_EMAIL:
1173 svg_type = 'default_user'
1189 svg_type = 'default_user'
1174 klass = InitialsGravatar(email_address, first_name, last_name, size)
1190 klass = InitialsGravatar(email_address, first_name, last_name, size)
1175 return klass.generate_svg(svg_type=svg_type)
1191 return klass.generate_svg(svg_type=svg_type)
1176
1192
1177
1193
1178 def gravatar_url(email_address, size=30):
1194 def gravatar_url(email_address, size=30):
1179 # doh, we need to re-import those to mock it later
1195 # doh, we need to re-import those to mock it later
1180 from pylons import tmpl_context as c
1196 from pylons import tmpl_context as c
1181
1197
1182 _use_gravatar = c.visual.use_gravatar
1198 _use_gravatar = c.visual.use_gravatar
1183 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1199 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1184
1200
1185 email_address = email_address or User.DEFAULT_USER_EMAIL
1201 email_address = email_address or User.DEFAULT_USER_EMAIL
1186 if isinstance(email_address, unicode):
1202 if isinstance(email_address, unicode):
1187 # hashlib crashes on unicode items
1203 # hashlib crashes on unicode items
1188 email_address = safe_str(email_address)
1204 email_address = safe_str(email_address)
1189
1205
1190 # empty email or default user
1206 # empty email or default user
1191 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1207 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1192 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1208 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1193
1209
1194 if _use_gravatar:
1210 if _use_gravatar:
1195 # TODO: Disuse pyramid thread locals. Think about another solution to
1211 # TODO: Disuse pyramid thread locals. Think about another solution to
1196 # get the host and schema here.
1212 # get the host and schema here.
1197 request = get_current_request()
1213 request = get_current_request()
1198 tmpl = safe_str(_gravatar_url)
1214 tmpl = safe_str(_gravatar_url)
1199 tmpl = tmpl.replace('{email}', email_address)\
1215 tmpl = tmpl.replace('{email}', email_address)\
1200 .replace('{md5email}', md5_safe(email_address.lower())) \
1216 .replace('{md5email}', md5_safe(email_address.lower())) \
1201 .replace('{netloc}', request.host)\
1217 .replace('{netloc}', request.host)\
1202 .replace('{scheme}', request.scheme)\
1218 .replace('{scheme}', request.scheme)\
1203 .replace('{size}', safe_str(size))
1219 .replace('{size}', safe_str(size))
1204 return tmpl
1220 return tmpl
1205 else:
1221 else:
1206 return initials_gravatar(email_address, '', '', size=size)
1222 return initials_gravatar(email_address, '', '', size=size)
1207
1223
1208
1224
1209 class Page(_Page):
1225 class Page(_Page):
1210 """
1226 """
1211 Custom pager to match rendering style with paginator
1227 Custom pager to match rendering style with paginator
1212 """
1228 """
1213
1229
1214 def _get_pos(self, cur_page, max_page, items):
1230 def _get_pos(self, cur_page, max_page, items):
1215 edge = (items / 2) + 1
1231 edge = (items / 2) + 1
1216 if (cur_page <= edge):
1232 if (cur_page <= edge):
1217 radius = max(items / 2, items - cur_page)
1233 radius = max(items / 2, items - cur_page)
1218 elif (max_page - cur_page) < edge:
1234 elif (max_page - cur_page) < edge:
1219 radius = (items - 1) - (max_page - cur_page)
1235 radius = (items - 1) - (max_page - cur_page)
1220 else:
1236 else:
1221 radius = items / 2
1237 radius = items / 2
1222
1238
1223 left = max(1, (cur_page - (radius)))
1239 left = max(1, (cur_page - (radius)))
1224 right = min(max_page, cur_page + (radius))
1240 right = min(max_page, cur_page + (radius))
1225 return left, cur_page, right
1241 return left, cur_page, right
1226
1242
1227 def _range(self, regexp_match):
1243 def _range(self, regexp_match):
1228 """
1244 """
1229 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1245 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1230
1246
1231 Arguments:
1247 Arguments:
1232
1248
1233 regexp_match
1249 regexp_match
1234 A "re" (regular expressions) match object containing the
1250 A "re" (regular expressions) match object containing the
1235 radius of linked pages around the current page in
1251 radius of linked pages around the current page in
1236 regexp_match.group(1) as a string
1252 regexp_match.group(1) as a string
1237
1253
1238 This function is supposed to be called as a callable in
1254 This function is supposed to be called as a callable in
1239 re.sub.
1255 re.sub.
1240
1256
1241 """
1257 """
1242 radius = int(regexp_match.group(1))
1258 radius = int(regexp_match.group(1))
1243
1259
1244 # Compute the first and last page number within the radius
1260 # Compute the first and last page number within the radius
1245 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1261 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1246 # -> leftmost_page = 5
1262 # -> leftmost_page = 5
1247 # -> rightmost_page = 9
1263 # -> rightmost_page = 9
1248 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1264 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1249 self.last_page,
1265 self.last_page,
1250 (radius * 2) + 1)
1266 (radius * 2) + 1)
1251 nav_items = []
1267 nav_items = []
1252
1268
1253 # Create a link to the first page (unless we are on the first page
1269 # Create a link to the first page (unless we are on the first page
1254 # or there would be no need to insert '..' spacers)
1270 # or there would be no need to insert '..' spacers)
1255 if self.page != self.first_page and self.first_page < leftmost_page:
1271 if self.page != self.first_page and self.first_page < leftmost_page:
1256 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1272 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1257
1273
1258 # Insert dots if there are pages between the first page
1274 # Insert dots if there are pages between the first page
1259 # and the currently displayed page range
1275 # and the currently displayed page range
1260 if leftmost_page - self.first_page > 1:
1276 if leftmost_page - self.first_page > 1:
1261 # Wrap in a SPAN tag if nolink_attr is set
1277 # Wrap in a SPAN tag if nolink_attr is set
1262 text = '..'
1278 text = '..'
1263 if self.dotdot_attr:
1279 if self.dotdot_attr:
1264 text = HTML.span(c=text, **self.dotdot_attr)
1280 text = HTML.span(c=text, **self.dotdot_attr)
1265 nav_items.append(text)
1281 nav_items.append(text)
1266
1282
1267 for thispage in xrange(leftmost_page, rightmost_page + 1):
1283 for thispage in xrange(leftmost_page, rightmost_page + 1):
1268 # Hilight the current page number and do not use a link
1284 # Hilight the current page number and do not use a link
1269 if thispage == self.page:
1285 if thispage == self.page:
1270 text = '%s' % (thispage,)
1286 text = '%s' % (thispage,)
1271 # Wrap in a SPAN tag if nolink_attr is set
1287 # Wrap in a SPAN tag if nolink_attr is set
1272 if self.curpage_attr:
1288 if self.curpage_attr:
1273 text = HTML.span(c=text, **self.curpage_attr)
1289 text = HTML.span(c=text, **self.curpage_attr)
1274 nav_items.append(text)
1290 nav_items.append(text)
1275 # Otherwise create just a link to that page
1291 # Otherwise create just a link to that page
1276 else:
1292 else:
1277 text = '%s' % (thispage,)
1293 text = '%s' % (thispage,)
1278 nav_items.append(self._pagerlink(thispage, text))
1294 nav_items.append(self._pagerlink(thispage, text))
1279
1295
1280 # Insert dots if there are pages between the displayed
1296 # Insert dots if there are pages between the displayed
1281 # page numbers and the end of the page range
1297 # page numbers and the end of the page range
1282 if self.last_page - rightmost_page > 1:
1298 if self.last_page - rightmost_page > 1:
1283 text = '..'
1299 text = '..'
1284 # Wrap in a SPAN tag if nolink_attr is set
1300 # Wrap in a SPAN tag if nolink_attr is set
1285 if self.dotdot_attr:
1301 if self.dotdot_attr:
1286 text = HTML.span(c=text, **self.dotdot_attr)
1302 text = HTML.span(c=text, **self.dotdot_attr)
1287 nav_items.append(text)
1303 nav_items.append(text)
1288
1304
1289 # Create a link to the very last page (unless we are on the last
1305 # Create a link to the very last page (unless we are on the last
1290 # page or there would be no need to insert '..' spacers)
1306 # page or there would be no need to insert '..' spacers)
1291 if self.page != self.last_page and rightmost_page < self.last_page:
1307 if self.page != self.last_page and rightmost_page < self.last_page:
1292 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1308 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1293
1309
1294 ## prerender links
1310 ## prerender links
1295 #_page_link = url.current()
1311 #_page_link = url.current()
1296 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1312 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1297 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1313 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1298 return self.separator.join(nav_items)
1314 return self.separator.join(nav_items)
1299
1315
1300 def pager(self, format='~2~', page_param='page', partial_param='partial',
1316 def pager(self, format='~2~', page_param='page', partial_param='partial',
1301 show_if_single_page=False, separator=' ', onclick=None,
1317 show_if_single_page=False, separator=' ', onclick=None,
1302 symbol_first='<<', symbol_last='>>',
1318 symbol_first='<<', symbol_last='>>',
1303 symbol_previous='<', symbol_next='>',
1319 symbol_previous='<', symbol_next='>',
1304 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1320 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1305 curpage_attr={'class': 'pager_curpage'},
1321 curpage_attr={'class': 'pager_curpage'},
1306 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1322 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1307
1323
1308 self.curpage_attr = curpage_attr
1324 self.curpage_attr = curpage_attr
1309 self.separator = separator
1325 self.separator = separator
1310 self.pager_kwargs = kwargs
1326 self.pager_kwargs = kwargs
1311 self.page_param = page_param
1327 self.page_param = page_param
1312 self.partial_param = partial_param
1328 self.partial_param = partial_param
1313 self.onclick = onclick
1329 self.onclick = onclick
1314 self.link_attr = link_attr
1330 self.link_attr = link_attr
1315 self.dotdot_attr = dotdot_attr
1331 self.dotdot_attr = dotdot_attr
1316
1332
1317 # Don't show navigator if there is no more than one page
1333 # Don't show navigator if there is no more than one page
1318 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1334 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1319 return ''
1335 return ''
1320
1336
1321 from string import Template
1337 from string import Template
1322 # Replace ~...~ in token format by range of pages
1338 # Replace ~...~ in token format by range of pages
1323 result = re.sub(r'~(\d+)~', self._range, format)
1339 result = re.sub(r'~(\d+)~', self._range, format)
1324
1340
1325 # Interpolate '%' variables
1341 # Interpolate '%' variables
1326 result = Template(result).safe_substitute({
1342 result = Template(result).safe_substitute({
1327 'first_page': self.first_page,
1343 'first_page': self.first_page,
1328 'last_page': self.last_page,
1344 'last_page': self.last_page,
1329 'page': self.page,
1345 'page': self.page,
1330 'page_count': self.page_count,
1346 'page_count': self.page_count,
1331 'items_per_page': self.items_per_page,
1347 'items_per_page': self.items_per_page,
1332 'first_item': self.first_item,
1348 'first_item': self.first_item,
1333 'last_item': self.last_item,
1349 'last_item': self.last_item,
1334 'item_count': self.item_count,
1350 'item_count': self.item_count,
1335 'link_first': self.page > self.first_page and \
1351 'link_first': self.page > self.first_page and \
1336 self._pagerlink(self.first_page, symbol_first) or '',
1352 self._pagerlink(self.first_page, symbol_first) or '',
1337 'link_last': self.page < self.last_page and \
1353 'link_last': self.page < self.last_page and \
1338 self._pagerlink(self.last_page, symbol_last) or '',
1354 self._pagerlink(self.last_page, symbol_last) or '',
1339 'link_previous': self.previous_page and \
1355 'link_previous': self.previous_page and \
1340 self._pagerlink(self.previous_page, symbol_previous) \
1356 self._pagerlink(self.previous_page, symbol_previous) \
1341 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1357 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1342 'link_next': self.next_page and \
1358 'link_next': self.next_page and \
1343 self._pagerlink(self.next_page, symbol_next) \
1359 self._pagerlink(self.next_page, symbol_next) \
1344 or HTML.span(symbol_next, class_="pg-next disabled")
1360 or HTML.span(symbol_next, class_="pg-next disabled")
1345 })
1361 })
1346
1362
1347 return literal(result)
1363 return literal(result)
1348
1364
1349
1365
1350 #==============================================================================
1366 #==============================================================================
1351 # REPO PAGER, PAGER FOR REPOSITORY
1367 # REPO PAGER, PAGER FOR REPOSITORY
1352 #==============================================================================
1368 #==============================================================================
1353 class RepoPage(Page):
1369 class RepoPage(Page):
1354
1370
1355 def __init__(self, collection, page=1, items_per_page=20,
1371 def __init__(self, collection, page=1, items_per_page=20,
1356 item_count=None, url=None, **kwargs):
1372 item_count=None, url=None, **kwargs):
1357
1373
1358 """Create a "RepoPage" instance. special pager for paging
1374 """Create a "RepoPage" instance. special pager for paging
1359 repository
1375 repository
1360 """
1376 """
1361 self._url_generator = url
1377 self._url_generator = url
1362
1378
1363 # Safe the kwargs class-wide so they can be used in the pager() method
1379 # Safe the kwargs class-wide so they can be used in the pager() method
1364 self.kwargs = kwargs
1380 self.kwargs = kwargs
1365
1381
1366 # Save a reference to the collection
1382 # Save a reference to the collection
1367 self.original_collection = collection
1383 self.original_collection = collection
1368
1384
1369 self.collection = collection
1385 self.collection = collection
1370
1386
1371 # The self.page is the number of the current page.
1387 # The self.page is the number of the current page.
1372 # The first page has the number 1!
1388 # The first page has the number 1!
1373 try:
1389 try:
1374 self.page = int(page) # make it int() if we get it as a string
1390 self.page = int(page) # make it int() if we get it as a string
1375 except (ValueError, TypeError):
1391 except (ValueError, TypeError):
1376 self.page = 1
1392 self.page = 1
1377
1393
1378 self.items_per_page = items_per_page
1394 self.items_per_page = items_per_page
1379
1395
1380 # Unless the user tells us how many items the collections has
1396 # Unless the user tells us how many items the collections has
1381 # we calculate that ourselves.
1397 # we calculate that ourselves.
1382 if item_count is not None:
1398 if item_count is not None:
1383 self.item_count = item_count
1399 self.item_count = item_count
1384 else:
1400 else:
1385 self.item_count = len(self.collection)
1401 self.item_count = len(self.collection)
1386
1402
1387 # Compute the number of the first and last available page
1403 # Compute the number of the first and last available page
1388 if self.item_count > 0:
1404 if self.item_count > 0:
1389 self.first_page = 1
1405 self.first_page = 1
1390 self.page_count = int(math.ceil(float(self.item_count) /
1406 self.page_count = int(math.ceil(float(self.item_count) /
1391 self.items_per_page))
1407 self.items_per_page))
1392 self.last_page = self.first_page + self.page_count - 1
1408 self.last_page = self.first_page + self.page_count - 1
1393
1409
1394 # Make sure that the requested page number is the range of
1410 # Make sure that the requested page number is the range of
1395 # valid pages
1411 # valid pages
1396 if self.page > self.last_page:
1412 if self.page > self.last_page:
1397 self.page = self.last_page
1413 self.page = self.last_page
1398 elif self.page < self.first_page:
1414 elif self.page < self.first_page:
1399 self.page = self.first_page
1415 self.page = self.first_page
1400
1416
1401 # Note: the number of items on this page can be less than
1417 # Note: the number of items on this page can be less than
1402 # items_per_page if the last page is not full
1418 # items_per_page if the last page is not full
1403 self.first_item = max(0, (self.item_count) - (self.page *
1419 self.first_item = max(0, (self.item_count) - (self.page *
1404 items_per_page))
1420 items_per_page))
1405 self.last_item = ((self.item_count - 1) - items_per_page *
1421 self.last_item = ((self.item_count - 1) - items_per_page *
1406 (self.page - 1))
1422 (self.page - 1))
1407
1423
1408 self.items = list(self.collection[self.first_item:self.last_item + 1])
1424 self.items = list(self.collection[self.first_item:self.last_item + 1])
1409
1425
1410 # Links to previous and next page
1426 # Links to previous and next page
1411 if self.page > self.first_page:
1427 if self.page > self.first_page:
1412 self.previous_page = self.page - 1
1428 self.previous_page = self.page - 1
1413 else:
1429 else:
1414 self.previous_page = None
1430 self.previous_page = None
1415
1431
1416 if self.page < self.last_page:
1432 if self.page < self.last_page:
1417 self.next_page = self.page + 1
1433 self.next_page = self.page + 1
1418 else:
1434 else:
1419 self.next_page = None
1435 self.next_page = None
1420
1436
1421 # No items available
1437 # No items available
1422 else:
1438 else:
1423 self.first_page = None
1439 self.first_page = None
1424 self.page_count = 0
1440 self.page_count = 0
1425 self.last_page = None
1441 self.last_page = None
1426 self.first_item = None
1442 self.first_item = None
1427 self.last_item = None
1443 self.last_item = None
1428 self.previous_page = None
1444 self.previous_page = None
1429 self.next_page = None
1445 self.next_page = None
1430 self.items = []
1446 self.items = []
1431
1447
1432 # This is a subclass of the 'list' type. Initialise the list now.
1448 # This is a subclass of the 'list' type. Initialise the list now.
1433 list.__init__(self, reversed(self.items))
1449 list.__init__(self, reversed(self.items))
1434
1450
1435
1451
1436 def changed_tooltip(nodes):
1452 def changed_tooltip(nodes):
1437 """
1453 """
1438 Generates a html string for changed nodes in commit page.
1454 Generates a html string for changed nodes in commit page.
1439 It limits the output to 30 entries
1455 It limits the output to 30 entries
1440
1456
1441 :param nodes: LazyNodesGenerator
1457 :param nodes: LazyNodesGenerator
1442 """
1458 """
1443 if nodes:
1459 if nodes:
1444 pref = ': <br/> '
1460 pref = ': <br/> '
1445 suf = ''
1461 suf = ''
1446 if len(nodes) > 30:
1462 if len(nodes) > 30:
1447 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1463 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1448 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1464 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1449 for x in nodes[:30]]) + suf)
1465 for x in nodes[:30]]) + suf)
1450 else:
1466 else:
1451 return ': ' + _('No Files')
1467 return ': ' + _('No Files')
1452
1468
1453
1469
1454 def breadcrumb_repo_link(repo):
1470 def breadcrumb_repo_link(repo):
1455 """
1471 """
1456 Makes a breadcrumbs path link to repo
1472 Makes a breadcrumbs path link to repo
1457
1473
1458 ex::
1474 ex::
1459 group >> subgroup >> repo
1475 group >> subgroup >> repo
1460
1476
1461 :param repo: a Repository instance
1477 :param repo: a Repository instance
1462 """
1478 """
1463
1479
1464 path = [
1480 path = [
1465 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1481 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1466 for group in repo.groups_with_parents
1482 for group in repo.groups_with_parents
1467 ] + [
1483 ] + [
1468 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1484 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1469 ]
1485 ]
1470
1486
1471 return literal(' &raquo; '.join(path))
1487 return literal(' &raquo; '.join(path))
1472
1488
1473
1489
1474 def format_byte_size_binary(file_size):
1490 def format_byte_size_binary(file_size):
1475 """
1491 """
1476 Formats file/folder sizes to standard.
1492 Formats file/folder sizes to standard.
1477 """
1493 """
1478 formatted_size = format_byte_size(file_size, binary=True)
1494 formatted_size = format_byte_size(file_size, binary=True)
1479 return formatted_size
1495 return formatted_size
1480
1496
1481
1497
1482 def fancy_file_stats(stats):
1498 def fancy_file_stats(stats):
1483 """
1499 """
1484 Displays a fancy two colored bar for number of added/deleted
1500 Displays a fancy two colored bar for number of added/deleted
1485 lines of code on file
1501 lines of code on file
1486
1502
1487 :param stats: two element list of added/deleted lines of code
1503 :param stats: two element list of added/deleted lines of code
1488 """
1504 """
1489 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1505 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1490 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1506 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1491
1507
1492 def cgen(l_type, a_v, d_v):
1508 def cgen(l_type, a_v, d_v):
1493 mapping = {'tr': 'top-right-rounded-corner-mid',
1509 mapping = {'tr': 'top-right-rounded-corner-mid',
1494 'tl': 'top-left-rounded-corner-mid',
1510 'tl': 'top-left-rounded-corner-mid',
1495 'br': 'bottom-right-rounded-corner-mid',
1511 'br': 'bottom-right-rounded-corner-mid',
1496 'bl': 'bottom-left-rounded-corner-mid'}
1512 'bl': 'bottom-left-rounded-corner-mid'}
1497 map_getter = lambda x: mapping[x]
1513 map_getter = lambda x: mapping[x]
1498
1514
1499 if l_type == 'a' and d_v:
1515 if l_type == 'a' and d_v:
1500 #case when added and deleted are present
1516 #case when added and deleted are present
1501 return ' '.join(map(map_getter, ['tl', 'bl']))
1517 return ' '.join(map(map_getter, ['tl', 'bl']))
1502
1518
1503 if l_type == 'a' and not d_v:
1519 if l_type == 'a' and not d_v:
1504 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1520 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1505
1521
1506 if l_type == 'd' and a_v:
1522 if l_type == 'd' and a_v:
1507 return ' '.join(map(map_getter, ['tr', 'br']))
1523 return ' '.join(map(map_getter, ['tr', 'br']))
1508
1524
1509 if l_type == 'd' and not a_v:
1525 if l_type == 'd' and not a_v:
1510 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1526 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1511
1527
1512 a, d = stats['added'], stats['deleted']
1528 a, d = stats['added'], stats['deleted']
1513 width = 100
1529 width = 100
1514
1530
1515 if stats['binary']: # binary operations like chmod/rename etc
1531 if stats['binary']: # binary operations like chmod/rename etc
1516 lbl = []
1532 lbl = []
1517 bin_op = 0 # undefined
1533 bin_op = 0 # undefined
1518
1534
1519 # prefix with bin for binary files
1535 # prefix with bin for binary files
1520 if BIN_FILENODE in stats['ops']:
1536 if BIN_FILENODE in stats['ops']:
1521 lbl += ['bin']
1537 lbl += ['bin']
1522
1538
1523 if NEW_FILENODE in stats['ops']:
1539 if NEW_FILENODE in stats['ops']:
1524 lbl += [_('new file')]
1540 lbl += [_('new file')]
1525 bin_op = NEW_FILENODE
1541 bin_op = NEW_FILENODE
1526 elif MOD_FILENODE in stats['ops']:
1542 elif MOD_FILENODE in stats['ops']:
1527 lbl += [_('mod')]
1543 lbl += [_('mod')]
1528 bin_op = MOD_FILENODE
1544 bin_op = MOD_FILENODE
1529 elif DEL_FILENODE in stats['ops']:
1545 elif DEL_FILENODE in stats['ops']:
1530 lbl += [_('del')]
1546 lbl += [_('del')]
1531 bin_op = DEL_FILENODE
1547 bin_op = DEL_FILENODE
1532 elif RENAMED_FILENODE in stats['ops']:
1548 elif RENAMED_FILENODE in stats['ops']:
1533 lbl += [_('rename')]
1549 lbl += [_('rename')]
1534 bin_op = RENAMED_FILENODE
1550 bin_op = RENAMED_FILENODE
1535
1551
1536 # chmod can go with other operations, so we add a + to lbl if needed
1552 # chmod can go with other operations, so we add a + to lbl if needed
1537 if CHMOD_FILENODE in stats['ops']:
1553 if CHMOD_FILENODE in stats['ops']:
1538 lbl += [_('chmod')]
1554 lbl += [_('chmod')]
1539 if bin_op == 0:
1555 if bin_op == 0:
1540 bin_op = CHMOD_FILENODE
1556 bin_op = CHMOD_FILENODE
1541
1557
1542 lbl = '+'.join(lbl)
1558 lbl = '+'.join(lbl)
1543 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1559 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1544 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1560 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1545 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1561 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1546 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1562 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1547
1563
1548 t = stats['added'] + stats['deleted']
1564 t = stats['added'] + stats['deleted']
1549 unit = float(width) / (t or 1)
1565 unit = float(width) / (t or 1)
1550
1566
1551 # needs > 9% of width to be visible or 0 to be hidden
1567 # needs > 9% of width to be visible or 0 to be hidden
1552 a_p = max(9, unit * a) if a > 0 else 0
1568 a_p = max(9, unit * a) if a > 0 else 0
1553 d_p = max(9, unit * d) if d > 0 else 0
1569 d_p = max(9, unit * d) if d > 0 else 0
1554 p_sum = a_p + d_p
1570 p_sum = a_p + d_p
1555
1571
1556 if p_sum > width:
1572 if p_sum > width:
1557 #adjust the percentage to be == 100% since we adjusted to 9
1573 #adjust the percentage to be == 100% since we adjusted to 9
1558 if a_p > d_p:
1574 if a_p > d_p:
1559 a_p = a_p - (p_sum - width)
1575 a_p = a_p - (p_sum - width)
1560 else:
1576 else:
1561 d_p = d_p - (p_sum - width)
1577 d_p = d_p - (p_sum - width)
1562
1578
1563 a_v = a if a > 0 else ''
1579 a_v = a if a > 0 else ''
1564 d_v = d if d > 0 else ''
1580 d_v = d if d > 0 else ''
1565
1581
1566 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1582 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1567 cgen('a', a_v, d_v), a_p, a_v
1583 cgen('a', a_v, d_v), a_p, a_v
1568 )
1584 )
1569 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1585 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1570 cgen('d', a_v, d_v), d_p, d_v
1586 cgen('d', a_v, d_v), d_p, d_v
1571 )
1587 )
1572 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1588 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1573
1589
1574
1590
1575 def urlify_text(text_, safe=True):
1591 def urlify_text(text_, safe=True):
1576 """
1592 """
1577 Extrac urls from text and make html links out of them
1593 Extrac urls from text and make html links out of them
1578
1594
1579 :param text_:
1595 :param text_:
1580 """
1596 """
1581
1597
1582 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1598 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1583 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1599 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1584
1600
1585 def url_func(match_obj):
1601 def url_func(match_obj):
1586 url_full = match_obj.groups()[0]
1602 url_full = match_obj.groups()[0]
1587 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1603 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1588 _newtext = url_pat.sub(url_func, text_)
1604 _newtext = url_pat.sub(url_func, text_)
1589 if safe:
1605 if safe:
1590 return literal(_newtext)
1606 return literal(_newtext)
1591 return _newtext
1607 return _newtext
1592
1608
1593
1609
1594 def urlify_commits(text_, repository):
1610 def urlify_commits(text_, repository):
1595 """
1611 """
1596 Extract commit ids from text and make link from them
1612 Extract commit ids from text and make link from them
1597
1613
1598 :param text_:
1614 :param text_:
1599 :param repository: repo name to build the URL with
1615 :param repository: repo name to build the URL with
1600 """
1616 """
1601 from pylons import url # doh, we need to re-import url to mock it later
1617 from pylons import url # doh, we need to re-import url to mock it later
1602 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1618 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1603
1619
1604 def url_func(match_obj):
1620 def url_func(match_obj):
1605 commit_id = match_obj.groups()[1]
1621 commit_id = match_obj.groups()[1]
1606 pref = match_obj.groups()[0]
1622 pref = match_obj.groups()[0]
1607 suf = match_obj.groups()[2]
1623 suf = match_obj.groups()[2]
1608
1624
1609 tmpl = (
1625 tmpl = (
1610 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1626 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1611 '%(commit_id)s</a>%(suf)s'
1627 '%(commit_id)s</a>%(suf)s'
1612 )
1628 )
1613 return tmpl % {
1629 return tmpl % {
1614 'pref': pref,
1630 'pref': pref,
1615 'cls': 'revision-link',
1631 'cls': 'revision-link',
1616 'url': url('changeset_home', repo_name=repository,
1632 'url': url('changeset_home', repo_name=repository,
1617 revision=commit_id),
1633 revision=commit_id, qualified=True),
1618 'commit_id': commit_id,
1634 'commit_id': commit_id,
1619 'suf': suf
1635 'suf': suf
1620 }
1636 }
1621
1637
1622 newtext = URL_PAT.sub(url_func, text_)
1638 newtext = URL_PAT.sub(url_func, text_)
1623
1639
1624 return newtext
1640 return newtext
1625
1641
1626
1642
1627 def _process_url_func(match_obj, repo_name, uid, entry):
1643 def _process_url_func(match_obj, repo_name, uid, entry,
1644 return_raw_data=False):
1628 pref = ''
1645 pref = ''
1629 if match_obj.group().startswith(' '):
1646 if match_obj.group().startswith(' '):
1630 pref = ' '
1647 pref = ' '
1631
1648
1632 issue_id = ''.join(match_obj.groups())
1649 issue_id = ''.join(match_obj.groups())
1633 tmpl = (
1650 tmpl = (
1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1651 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 '%(issue-prefix)s%(id-repr)s'
1652 '%(issue-prefix)s%(id-repr)s'
1636 '</a>')
1653 '</a>')
1637
1654
1638 (repo_name_cleaned,
1655 (repo_name_cleaned,
1639 parent_group_name) = RepoGroupModel().\
1656 parent_group_name) = RepoGroupModel().\
1640 _get_group_name_and_parent(repo_name)
1657 _get_group_name_and_parent(repo_name)
1641
1658
1642 # variables replacement
1659 # variables replacement
1643 named_vars = {
1660 named_vars = {
1644 'id': issue_id,
1661 'id': issue_id,
1645 'repo': repo_name,
1662 'repo': repo_name,
1646 'repo_name': repo_name_cleaned,
1663 'repo_name': repo_name_cleaned,
1647 'group_name': parent_group_name
1664 'group_name': parent_group_name
1648 }
1665 }
1649 # named regex variables
1666 # named regex variables
1650 named_vars.update(match_obj.groupdict())
1667 named_vars.update(match_obj.groupdict())
1651 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1668 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1652
1669
1653 return tmpl % {
1670 data = {
1654 'pref': pref,
1671 'pref': pref,
1655 'cls': 'issue-tracker-link',
1672 'cls': 'issue-tracker-link',
1656 'url': _url,
1673 'url': _url,
1657 'id-repr': issue_id,
1674 'id-repr': issue_id,
1658 'issue-prefix': entry['pref'],
1675 'issue-prefix': entry['pref'],
1659 'serv': entry['url'],
1676 'serv': entry['url'],
1660 }
1677 }
1678 if return_raw_data:
1679 return {
1680 'id': issue_id,
1681 'url': _url
1682 }
1683 return tmpl % data
1661
1684
1662
1685
1663 def process_patterns(text_string, repo_name, config):
1686 def process_patterns(text_string, repo_name, config=None):
1664 repo = None
1687 repo = None
1665 if repo_name:
1688 if repo_name:
1666 # Retrieving repo_name to avoid invalid repo_name to explode on
1689 # Retrieving repo_name to avoid invalid repo_name to explode on
1667 # IssueTrackerSettingsModel but still passing invalid name further down
1690 # IssueTrackerSettingsModel but still passing invalid name further down
1668 repo = Repository.get_by_repo_name(repo_name, cache=True)
1691 repo = Repository.get_by_repo_name(repo_name, cache=True)
1669
1692
1670 settings_model = IssueTrackerSettingsModel(repo=repo)
1693 settings_model = IssueTrackerSettingsModel(repo=repo)
1671 active_entries = settings_model.get_settings(cache=True)
1694 active_entries = settings_model.get_settings(cache=True)
1672
1695
1696 issues_data = []
1673 newtext = text_string
1697 newtext = text_string
1674 for uid, entry in active_entries.items():
1698 for uid, entry in active_entries.items():
1675 url_func = partial(
1676 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1677
1678 log.debug('found issue tracker entry with uid %s' % (uid,))
1699 log.debug('found issue tracker entry with uid %s' % (uid,))
1679
1700
1680 if not (entry['pat'] and entry['url']):
1701 if not (entry['pat'] and entry['url']):
1681 log.debug('skipping due to missing data')
1702 log.debug('skipping due to missing data')
1682 continue
1703 continue
1683
1704
1684 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1705 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1685 % (uid, entry['pat'], entry['url'], entry['pref']))
1706 % (uid, entry['pat'], entry['url'], entry['pref']))
1686
1707
1687 try:
1708 try:
1688 pattern = re.compile(r'%s' % entry['pat'])
1709 pattern = re.compile(r'%s' % entry['pat'])
1689 except re.error:
1710 except re.error:
1690 log.exception(
1711 log.exception(
1691 'issue tracker pattern: `%s` failed to compile',
1712 'issue tracker pattern: `%s` failed to compile',
1692 entry['pat'])
1713 entry['pat'])
1693 continue
1714 continue
1694
1715
1716 data_func = partial(
1717 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1718 return_raw_data=True)
1719
1720 for match_obj in pattern.finditer(text_string):
1721 issues_data.append(data_func(match_obj))
1722
1723 url_func = partial(
1724 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1725
1695 newtext = pattern.sub(url_func, newtext)
1726 newtext = pattern.sub(url_func, newtext)
1696 log.debug('processed prefix:uid `%s`' % (uid,))
1727 log.debug('processed prefix:uid `%s`' % (uid,))
1697
1728
1698 return newtext
1729 return newtext, issues_data
1699
1730
1700
1731
1701 def urlify_commit_message(commit_text, repository=None):
1732 def urlify_commit_message(commit_text, repository=None):
1702 """
1733 """
1703 Parses given text message and makes proper links.
1734 Parses given text message and makes proper links.
1704 issues are linked to given issue-server, and rest is a commit link
1735 issues are linked to given issue-server, and rest is a commit link
1705
1736
1706 :param commit_text:
1737 :param commit_text:
1707 :param repository:
1738 :param repository:
1708 """
1739 """
1709 from pylons import url # doh, we need to re-import url to mock it later
1740 from pylons import url # doh, we need to re-import url to mock it later
1710 from rhodecode import CONFIG
1711
1741
1712 def escaper(string):
1742 def escaper(string):
1713 return string.replace('<', '&lt;').replace('>', '&gt;')
1743 return string.replace('<', '&lt;').replace('>', '&gt;')
1714
1744
1715 newtext = escaper(commit_text)
1745 newtext = escaper(commit_text)
1746
1747 # extract http/https links and make them real urls
1748 newtext = urlify_text(newtext, safe=False)
1749
1716 # urlify commits - extract commit ids and make link out of them, if we have
1750 # urlify commits - extract commit ids and make link out of them, if we have
1717 # the scope of repository present.
1751 # the scope of repository present.
1718 if repository:
1752 if repository:
1719 newtext = urlify_commits(newtext, repository)
1753 newtext = urlify_commits(newtext, repository)
1720
1754
1721 # extract http/https links and make them real urls
1722 newtext = urlify_text(newtext, safe=False)
1723
1724 # process issue tracker patterns
1755 # process issue tracker patterns
1725 newtext = process_patterns(newtext, repository or '', CONFIG)
1756 newtext, issues = process_patterns(newtext, repository or '')
1726
1757
1727 return literal(newtext)
1758 return literal(newtext)
1728
1759
1729
1760
1730 def rst(source, mentions=False):
1761 def rst(source, mentions=False):
1731 return literal('<div class="rst-block">%s</div>' %
1762 return literal('<div class="rst-block">%s</div>' %
1732 MarkupRenderer.rst(source, mentions=mentions))
1763 MarkupRenderer.rst(source, mentions=mentions))
1733
1764
1734
1765
1735 def markdown(source, mentions=False):
1766 def markdown(source, mentions=False):
1736 return literal('<div class="markdown-block">%s</div>' %
1767 return literal('<div class="markdown-block">%s</div>' %
1737 MarkupRenderer.markdown(source, flavored=True,
1768 MarkupRenderer.markdown(source, flavored=True,
1738 mentions=mentions))
1769 mentions=mentions))
1739
1770
1740 def renderer_from_filename(filename, exclude=None):
1771 def renderer_from_filename(filename, exclude=None):
1741 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1772 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1742
1773
1743
1774
1744 def render(source, renderer='rst', mentions=False):
1775 def render(source, renderer='rst', mentions=False):
1745 if renderer == 'rst':
1776 if renderer == 'rst':
1746 return rst(source, mentions=mentions)
1777 return rst(source, mentions=mentions)
1747 if renderer == 'markdown':
1778 if renderer == 'markdown':
1748 return markdown(source, mentions=mentions)
1779 return markdown(source, mentions=mentions)
1749
1780
1750
1781
1751 def commit_status(repo, commit_id):
1782 def commit_status(repo, commit_id):
1752 return ChangesetStatusModel().get_status(repo, commit_id)
1783 return ChangesetStatusModel().get_status(repo, commit_id)
1753
1784
1754
1785
1755 def commit_status_lbl(commit_status):
1786 def commit_status_lbl(commit_status):
1756 return dict(ChangesetStatus.STATUSES).get(commit_status)
1787 return dict(ChangesetStatus.STATUSES).get(commit_status)
1757
1788
1758
1789
1759 def commit_time(repo_name, commit_id):
1790 def commit_time(repo_name, commit_id):
1760 repo = Repository.get_by_repo_name(repo_name)
1791 repo = Repository.get_by_repo_name(repo_name)
1761 commit = repo.get_commit(commit_id=commit_id)
1792 commit = repo.get_commit(commit_id=commit_id)
1762 return commit.date
1793 return commit.date
1763
1794
1764
1795
1765 def get_permission_name(key):
1796 def get_permission_name(key):
1766 return dict(Permission.PERMS).get(key)
1797 return dict(Permission.PERMS).get(key)
1767
1798
1768
1799
1769 def journal_filter_help():
1800 def journal_filter_help():
1770 return _(
1801 return _(
1771 'Example filter terms:\n' +
1802 'Example filter terms:\n' +
1772 ' repository:vcs\n' +
1803 ' repository:vcs\n' +
1773 ' username:marcin\n' +
1804 ' username:marcin\n' +
1774 ' action:*push*\n' +
1805 ' action:*push*\n' +
1775 ' ip:127.0.0.1\n' +
1806 ' ip:127.0.0.1\n' +
1776 ' date:20120101\n' +
1807 ' date:20120101\n' +
1777 ' date:[20120101100000 TO 20120102]\n' +
1808 ' date:[20120101100000 TO 20120102]\n' +
1778 '\n' +
1809 '\n' +
1779 'Generate wildcards using \'*\' character:\n' +
1810 'Generate wildcards using \'*\' character:\n' +
1780 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1811 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1781 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1812 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1782 '\n' +
1813 '\n' +
1783 'Optional AND / OR operators in queries\n' +
1814 'Optional AND / OR operators in queries\n' +
1784 ' "repository:vcs OR repository:test"\n' +
1815 ' "repository:vcs OR repository:test"\n' +
1785 ' "username:test AND repository:test*"\n'
1816 ' "username:test AND repository:test*"\n'
1786 )
1817 )
1787
1818
1788
1819
1789 def not_mapped_error(repo_name):
1820 def not_mapped_error(repo_name):
1790 flash(_('%s repository is not mapped to db perhaps'
1821 flash(_('%s repository is not mapped to db perhaps'
1791 ' it was created or renamed from the filesystem'
1822 ' it was created or renamed from the filesystem'
1792 ' please run the application again'
1823 ' please run the application again'
1793 ' in order to rescan repositories') % repo_name, category='error')
1824 ' in order to rescan repositories') % repo_name, category='error')
1794
1825
1795
1826
1796 def ip_range(ip_addr):
1827 def ip_range(ip_addr):
1797 from rhodecode.model.db import UserIpMap
1828 from rhodecode.model.db import UserIpMap
1798 s, e = UserIpMap._get_ip_range(ip_addr)
1829 s, e = UserIpMap._get_ip_range(ip_addr)
1799 return '%s - %s' % (s, e)
1830 return '%s - %s' % (s, e)
1800
1831
1801
1832
1802 def form(url, method='post', needs_csrf_token=True, **attrs):
1833 def form(url, method='post', needs_csrf_token=True, **attrs):
1803 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1834 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1804 if method.lower() != 'get' and needs_csrf_token:
1835 if method.lower() != 'get' and needs_csrf_token:
1805 raise Exception(
1836 raise Exception(
1806 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1837 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1807 'CSRF token. If the endpoint does not require such token you can ' +
1838 'CSRF token. If the endpoint does not require such token you can ' +
1808 'explicitly set the parameter needs_csrf_token to false.')
1839 'explicitly set the parameter needs_csrf_token to false.')
1809
1840
1810 return wh_form(url, method=method, **attrs)
1841 return wh_form(url, method=method, **attrs)
1811
1842
1812
1843
1813 def secure_form(url, method="POST", multipart=False, **attrs):
1844 def secure_form(url, method="POST", multipart=False, **attrs):
1814 """Start a form tag that points the action to an url. This
1845 """Start a form tag that points the action to an url. This
1815 form tag will also include the hidden field containing
1846 form tag will also include the hidden field containing
1816 the auth token.
1847 the auth token.
1817
1848
1818 The url options should be given either as a string, or as a
1849 The url options should be given either as a string, or as a
1819 ``url()`` function. The method for the form defaults to POST.
1850 ``url()`` function. The method for the form defaults to POST.
1820
1851
1821 Options:
1852 Options:
1822
1853
1823 ``multipart``
1854 ``multipart``
1824 If set to True, the enctype is set to "multipart/form-data".
1855 If set to True, the enctype is set to "multipart/form-data".
1825 ``method``
1856 ``method``
1826 The method to use when submitting the form, usually either
1857 The method to use when submitting the form, usually either
1827 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1858 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1828 hidden input with name _method is added to simulate the verb
1859 hidden input with name _method is added to simulate the verb
1829 over POST.
1860 over POST.
1830
1861
1831 """
1862 """
1832 from webhelpers.pylonslib.secure_form import insecure_form
1863 from webhelpers.pylonslib.secure_form import insecure_form
1833 from rhodecode.lib.auth import get_csrf_token, csrf_token_key
1864 from rhodecode.lib.auth import get_csrf_token, csrf_token_key
1834 form = insecure_form(url, method, multipart, **attrs)
1865 form = insecure_form(url, method, multipart, **attrs)
1835 token = HTML.div(hidden(csrf_token_key, get_csrf_token()), style="display: none;")
1866 token = HTML.div(hidden(csrf_token_key, get_csrf_token()), style="display: none;")
1836 return literal("%s\n%s" % (form, token))
1867 return literal("%s\n%s" % (form, token))
1837
1868
1838 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1869 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1839 select_html = select(name, selected, options, **attrs)
1870 select_html = select(name, selected, options, **attrs)
1840 select2 = """
1871 select2 = """
1841 <script>
1872 <script>
1842 $(document).ready(function() {
1873 $(document).ready(function() {
1843 $('#%s').select2({
1874 $('#%s').select2({
1844 containerCssClass: 'drop-menu',
1875 containerCssClass: 'drop-menu',
1845 dropdownCssClass: 'drop-menu-dropdown',
1876 dropdownCssClass: 'drop-menu-dropdown',
1846 dropdownAutoWidth: true%s
1877 dropdownAutoWidth: true%s
1847 });
1878 });
1848 });
1879 });
1849 </script>
1880 </script>
1850 """
1881 """
1851 filter_option = """,
1882 filter_option = """,
1852 minimumResultsForSearch: -1
1883 minimumResultsForSearch: -1
1853 """
1884 """
1854 input_id = attrs.get('id') or name
1885 input_id = attrs.get('id') or name
1855 filter_enabled = "" if enable_filter else filter_option
1886 filter_enabled = "" if enable_filter else filter_option
1856 select_script = literal(select2 % (input_id, filter_enabled))
1887 select_script = literal(select2 % (input_id, filter_enabled))
1857
1888
1858 return literal(select_html+select_script)
1889 return literal(select_html+select_script)
1859
1890
1860
1891
1861 def get_visual_attr(tmpl_context_var, attr_name):
1892 def get_visual_attr(tmpl_context_var, attr_name):
1862 """
1893 """
1863 A safe way to get a variable from visual variable of template context
1894 A safe way to get a variable from visual variable of template context
1864
1895
1865 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1896 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1866 :param attr_name: name of the attribute we fetch from the c.visual
1897 :param attr_name: name of the attribute we fetch from the c.visual
1867 """
1898 """
1868 visual = getattr(tmpl_context_var, 'visual', None)
1899 visual = getattr(tmpl_context_var, 'visual', None)
1869 if not visual:
1900 if not visual:
1870 return
1901 return
1871 else:
1902 else:
1872 return getattr(visual, attr_name, None)
1903 return getattr(visual, attr_name, None)
1873
1904
1874
1905
1875 def get_last_path_part(file_node):
1906 def get_last_path_part(file_node):
1876 if not file_node.path:
1907 if not file_node.path:
1877 return u''
1908 return u''
1878
1909
1879 path = safe_unicode(file_node.path.split('/')[-1])
1910 path = safe_unicode(file_node.path.split('/')[-1])
1880 return u'../' + path
1911 return u'../' + path
1881
1912
1882
1913
1883 def route_path(*args, **kwds):
1914 def route_path(*args, **kwds):
1884 """
1915 """
1885 Wrapper around pyramids `route_path` function. It is used to generate
1916 Wrapper around pyramids `route_path` function. It is used to generate
1886 URLs from within pylons views or templates. This will be removed when
1917 URLs from within pylons views or templates. This will be removed when
1887 pyramid migration if finished.
1918 pyramid migration if finished.
1888 """
1919 """
1889 req = get_current_request()
1920 req = get_current_request()
1890 return req.route_path(*args, **kwds)
1921 return req.route_path(*args, **kwds)
1891
1922
1892
1923
1893 def resource_path(*args, **kwds):
1924 def resource_path(*args, **kwds):
1894 """
1925 """
1895 Wrapper around pyramids `route_path` function. It is used to generate
1926 Wrapper around pyramids `route_path` function. It is used to generate
1896 URLs from within pylons views or templates. This will be removed when
1927 URLs from within pylons views or templates. This will be removed when
1897 pyramid migration if finished.
1928 pyramid migration if finished.
1898 """
1929 """
1899 req = get_current_request()
1930 req = get_current_request()
1900 return req.resource_path(*args, **kwds)
1931 return req.resource_path(*args, **kwds)
@@ -1,260 +1,278 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 import urlparse
23 import threading
24 import threading
24 from BaseHTTPServer import BaseHTTPRequestHandler
25 from BaseHTTPServer import BaseHTTPRequestHandler
25 from SocketServer import TCPServer
26 from SocketServer import TCPServer
27 from routes.util import URLGenerator
26
28
27 import Pyro4
29 import Pyro4
30 import pylons
31 import rhodecode
28
32
29 from rhodecode.lib import hooks_base
33 from rhodecode.lib import hooks_base
30 from rhodecode.lib.utils2 import AttributeDict
34 from rhodecode.lib.utils2 import AttributeDict
31
35
32
36
33 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
34
38
35
39
36 class HooksHttpHandler(BaseHTTPRequestHandler):
40 class HooksHttpHandler(BaseHTTPRequestHandler):
37 def do_POST(self):
41 def do_POST(self):
38 method, extras = self._read_request()
42 method, extras = self._read_request()
39 try:
43 try:
40 result = self._call_hook(method, extras)
44 result = self._call_hook(method, extras)
41 except Exception as e:
45 except Exception as e:
42 result = {
46 result = {
43 'exception': e.__class__.__name__,
47 'exception': e.__class__.__name__,
44 'exception_args': e.args
48 'exception_args': e.args
45 }
49 }
46 self._write_response(result)
50 self._write_response(result)
47
51
48 def _read_request(self):
52 def _read_request(self):
49 length = int(self.headers['Content-Length'])
53 length = int(self.headers['Content-Length'])
50 body = self.rfile.read(length).decode('utf-8')
54 body = self.rfile.read(length).decode('utf-8')
51 data = json.loads(body)
55 data = json.loads(body)
52 return data['method'], data['extras']
56 return data['method'], data['extras']
53
57
54 def _write_response(self, result):
58 def _write_response(self, result):
55 self.send_response(200)
59 self.send_response(200)
56 self.send_header("Content-type", "text/json")
60 self.send_header("Content-type", "text/json")
57 self.end_headers()
61 self.end_headers()
58 self.wfile.write(json.dumps(result))
62 self.wfile.write(json.dumps(result))
59
63
60 def _call_hook(self, method, extras):
64 def _call_hook(self, method, extras):
61 hooks = Hooks()
65 hooks = Hooks()
62 result = getattr(hooks, method)(extras)
66 result = getattr(hooks, method)(extras)
63 return result
67 return result
64
68
65 def log_message(self, format, *args):
69 def log_message(self, format, *args):
66 """
70 """
67 This is an overriden method of BaseHTTPRequestHandler which logs using
71 This is an overriden method of BaseHTTPRequestHandler which logs using
68 logging library instead of writing directly to stderr.
72 logging library instead of writing directly to stderr.
69 """
73 """
70
74
71 message = format % args
75 message = format % args
72
76
73 # TODO: mikhail: add different log levels support
77 # TODO: mikhail: add different log levels support
74 log.debug(
78 log.debug(
75 "%s - - [%s] %s", self.client_address[0],
79 "%s - - [%s] %s", self.client_address[0],
76 self.log_date_time_string(), message)
80 self.log_date_time_string(), message)
77
81
78
82
79 class DummyHooksCallbackDaemon(object):
83 class DummyHooksCallbackDaemon(object):
80 def __init__(self):
84 def __init__(self):
81 self.hooks_module = Hooks.__module__
85 self.hooks_module = Hooks.__module__
82
86
83 def __enter__(self):
87 def __enter__(self):
84 log.debug('Running dummy hooks callback daemon')
88 log.debug('Running dummy hooks callback daemon')
85 return self
89 return self
86
90
87 def __exit__(self, exc_type, exc_val, exc_tb):
91 def __exit__(self, exc_type, exc_val, exc_tb):
88 log.debug('Exiting dummy hooks callback daemon')
92 log.debug('Exiting dummy hooks callback daemon')
89
93
90
94
91 class ThreadedHookCallbackDaemon(object):
95 class ThreadedHookCallbackDaemon(object):
92
96
93 _callback_thread = None
97 _callback_thread = None
94 _daemon = None
98 _daemon = None
95 _done = False
99 _done = False
96
100
97 def __init__(self):
101 def __init__(self):
98 self._prepare()
102 self._prepare()
99
103
100 def __enter__(self):
104 def __enter__(self):
101 self._run()
105 self._run()
102 return self
106 return self
103
107
104 def __exit__(self, exc_type, exc_val, exc_tb):
108 def __exit__(self, exc_type, exc_val, exc_tb):
105 self._stop()
109 self._stop()
106
110
107 def _prepare(self):
111 def _prepare(self):
108 raise NotImplementedError()
112 raise NotImplementedError()
109
113
110 def _run(self):
114 def _run(self):
111 raise NotImplementedError()
115 raise NotImplementedError()
112
116
113 def _stop(self):
117 def _stop(self):
114 raise NotImplementedError()
118 raise NotImplementedError()
115
119
116
120
117 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
121 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
118 """
122 """
119 Context manager which will run a callback daemon in a background thread.
123 Context manager which will run a callback daemon in a background thread.
120 """
124 """
121
125
122 hooks_uri = None
126 hooks_uri = None
123
127
124 def _prepare(self):
128 def _prepare(self):
125 log.debug("Preparing callback daemon and registering hook object")
129 log.debug("Preparing callback daemon and registering hook object")
126 self._daemon = Pyro4.Daemon()
130 self._daemon = Pyro4.Daemon()
127 hooks_interface = Hooks()
131 hooks_interface = Hooks()
128 self.hooks_uri = str(self._daemon.register(hooks_interface))
132 self.hooks_uri = str(self._daemon.register(hooks_interface))
129 log.debug("Hooks uri is: %s", self.hooks_uri)
133 log.debug("Hooks uri is: %s", self.hooks_uri)
130
134
131 def _run(self):
135 def _run(self):
132 log.debug("Running event loop of callback daemon in background thread")
136 log.debug("Running event loop of callback daemon in background thread")
133 callback_thread = threading.Thread(
137 callback_thread = threading.Thread(
134 target=self._daemon.requestLoop,
138 target=self._daemon.requestLoop,
135 kwargs={'loopCondition': lambda: not self._done})
139 kwargs={'loopCondition': lambda: not self._done})
136 callback_thread.daemon = True
140 callback_thread.daemon = True
137 callback_thread.start()
141 callback_thread.start()
138 self._callback_thread = callback_thread
142 self._callback_thread = callback_thread
139
143
140 def _stop(self):
144 def _stop(self):
141 log.debug("Waiting for background thread to finish.")
145 log.debug("Waiting for background thread to finish.")
142 self._done = True
146 self._done = True
143 self._callback_thread.join()
147 self._callback_thread.join()
144 self._daemon.close()
148 self._daemon.close()
145 self._daemon = None
149 self._daemon = None
146 self._callback_thread = None
150 self._callback_thread = None
147
151
148
152
149 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
153 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
150 """
154 """
151 Context manager which will run a callback daemon in a background thread.
155 Context manager which will run a callback daemon in a background thread.
152 """
156 """
153
157
154 hooks_uri = None
158 hooks_uri = None
155
159
156 IP_ADDRESS = '127.0.0.1'
160 IP_ADDRESS = '127.0.0.1'
157
161
158 # From Python docs: Polling reduces our responsiveness to a shutdown
162 # From Python docs: Polling reduces our responsiveness to a shutdown
159 # request and wastes cpu at all other times.
163 # request and wastes cpu at all other times.
160 POLL_INTERVAL = 0.1
164 POLL_INTERVAL = 0.1
161
165
162 def _prepare(self):
166 def _prepare(self):
163 log.debug("Preparing callback daemon and registering hook object")
167 log.debug("Preparing callback daemon and registering hook object")
164
168
165 self._done = False
169 self._done = False
166 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
170 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
167 _, port = self._daemon.server_address
171 _, port = self._daemon.server_address
168 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
172 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
169
173
170 log.debug("Hooks uri is: %s", self.hooks_uri)
174 log.debug("Hooks uri is: %s", self.hooks_uri)
171
175
172 def _run(self):
176 def _run(self):
173 log.debug("Running event loop of callback daemon in background thread")
177 log.debug("Running event loop of callback daemon in background thread")
174 callback_thread = threading.Thread(
178 callback_thread = threading.Thread(
175 target=self._daemon.serve_forever,
179 target=self._daemon.serve_forever,
176 kwargs={'poll_interval': self.POLL_INTERVAL})
180 kwargs={'poll_interval': self.POLL_INTERVAL})
177 callback_thread.daemon = True
181 callback_thread.daemon = True
178 callback_thread.start()
182 callback_thread.start()
179 self._callback_thread = callback_thread
183 self._callback_thread = callback_thread
180
184
181 def _stop(self):
185 def _stop(self):
182 log.debug("Waiting for background thread to finish.")
186 log.debug("Waiting for background thread to finish.")
183 self._daemon.shutdown()
187 self._daemon.shutdown()
184 self._callback_thread.join()
188 self._callback_thread.join()
185 self._daemon = None
189 self._daemon = None
186 self._callback_thread = None
190 self._callback_thread = None
187
191
188
192
189 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
193 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
190 callback_daemon = None
194 callback_daemon = None
191 protocol = protocol.lower() if protocol else None
195 protocol = protocol.lower() if protocol else None
192
196
193 if use_direct_calls:
197 if use_direct_calls:
194 callback_daemon = DummyHooksCallbackDaemon()
198 callback_daemon = DummyHooksCallbackDaemon()
195 extras['hooks_module'] = callback_daemon.hooks_module
199 extras['hooks_module'] = callback_daemon.hooks_module
196 else:
200 else:
197 callback_daemon = (
201 callback_daemon = (
198 Pyro4HooksCallbackDaemon()
202 Pyro4HooksCallbackDaemon()
199 if protocol == 'pyro4'
203 if protocol == 'pyro4'
200 else HttpHooksCallbackDaemon())
204 else HttpHooksCallbackDaemon())
201 extras['hooks_uri'] = callback_daemon.hooks_uri
205 extras['hooks_uri'] = callback_daemon.hooks_uri
202 extras['hooks_protocol'] = protocol
206 extras['hooks_protocol'] = protocol
203
207
204 return callback_daemon, extras
208 return callback_daemon, extras
205
209
206
210
207 class Hooks(object):
211 class Hooks(object):
208 """
212 """
209 Exposes the hooks for remote call backs
213 Exposes the hooks for remote call backs
210 """
214 """
211
215
212 @Pyro4.callback
216 @Pyro4.callback
213 def repo_size(self, extras):
217 def repo_size(self, extras):
214 log.debug("Called repo_size of Hooks object")
218 log.debug("Called repo_size of Hooks object")
215 return self._call_hook(hooks_base.repo_size, extras)
219 return self._call_hook(hooks_base.repo_size, extras)
216
220
217 @Pyro4.callback
221 @Pyro4.callback
218 def pre_pull(self, extras):
222 def pre_pull(self, extras):
219 log.debug("Called pre_pull of Hooks object")
223 log.debug("Called pre_pull of Hooks object")
220 return self._call_hook(hooks_base.pre_pull, extras)
224 return self._call_hook(hooks_base.pre_pull, extras)
221
225
222 @Pyro4.callback
226 @Pyro4.callback
223 def post_pull(self, extras):
227 def post_pull(self, extras):
224 log.debug("Called post_pull of Hooks object")
228 log.debug("Called post_pull of Hooks object")
225 return self._call_hook(hooks_base.post_pull, extras)
229 return self._call_hook(hooks_base.post_pull, extras)
226
230
227 @Pyro4.callback
231 @Pyro4.callback
228 def pre_push(self, extras):
232 def pre_push(self, extras):
229 log.debug("Called pre_push of Hooks object")
233 log.debug("Called pre_push of Hooks object")
230 return self._call_hook(hooks_base.pre_push, extras)
234 return self._call_hook(hooks_base.pre_push, extras)
231
235
232 @Pyro4.callback
236 @Pyro4.callback
233 def post_push(self, extras):
237 def post_push(self, extras):
234 log.debug("Called post_push of Hooks object")
238 log.debug("Called post_push of Hooks object")
235 return self._call_hook(hooks_base.post_push, extras)
239 return self._call_hook(hooks_base.post_push, extras)
236
240
237 def _call_hook(self, hook, extras):
241 def _call_hook(self, hook, extras):
238 extras = AttributeDict(extras)
242 extras = AttributeDict(extras)
243 netloc = urlparse.urlparse(extras.server_url).netloc
244 environ = {
245 'SERVER_NAME': netloc.split(':')[0],
246 'SERVER_PORT': ':' in netloc and netloc.split(':')[1] or '80',
247 'SCRIPT_NAME': '',
248 'PATH_INFO': '/',
249 'HTTP_HOST': 'localhost',
250 'REQUEST_METHOD': 'GET',
251 }
252 pylons_router = URLGenerator(rhodecode.CONFIG['routes.map'], environ)
253 pylons.url._push_object(pylons_router)
239
254
240 try:
255 try:
241 result = hook(extras)
256 result = hook(extras)
242 except Exception as error:
257 except Exception as error:
243 log.exception('Exception when handling hook %s', hook)
258 log.exception('Exception when handling hook %s', hook)
244 error_args = error.args
259 error_args = error.args
245 return {
260 return {
246 'status': 128,
261 'status': 128,
247 'output': '',
262 'output': '',
248 'exception': type(error).__name__,
263 'exception': type(error).__name__,
249 'exception_args': error_args,
264 'exception_args': error_args,
250 }
265 }
266 finally:
267 pylons.url._pop_object()
268
251 return {
269 return {
252 'status': result.status,
270 'status': result.status,
253 'output': result.output,
271 'output': result.output,
254 }
272 }
255
273
256 def __enter__(self):
274 def __enter__(self):
257 return self
275 return self
258
276
259 def __exit__(self, exc_type, exc_val, exc_tb):
277 def __exit__(self, exc_type, exc_val, exc_tb):
260 pass
278 pass
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now