Show More
@@ -0,0 +1,30 b'' | |||
|
1 | .. _debugging: | |
|
2 | ||
|
3 | =================== | |
|
4 | Debugging RhodeCode | |
|
5 | =================== | |
|
6 | ||
|
7 | If you encountered problems with RhodeCode here are some instructions how to | |
|
8 | possibly debug them. | |
|
9 | ||
|
10 | ** First make sure you're using the latest version available.** | |
|
11 | ||
|
12 | enable detailed debug | |
|
13 | --------------------- | |
|
14 | ||
|
15 | RhodeCode uses standard python logging modules to log it's output. | |
|
16 | By default only loggers with INFO level are displayed. To enable full output | |
|
17 | change `level = DEBUG` for all logging handlers in currently used .ini file. | |
|
18 | This change will allow to see much more detailed output in the logfile or | |
|
19 | console. This generally helps a lot to track issues. | |
|
20 | ||
|
21 | ||
|
22 | enable interactive debug mode | |
|
23 | ----------------------------- | |
|
24 | ||
|
25 | To enable interactive debug mode simply comment out `set debug = false` in | |
|
26 | .ini file, this will trigger and interactive debugger each time there an | |
|
27 | error in browser, or send a http link if error occured in the backend. This | |
|
28 | is a great tool for fast debugging as you get a handy python console right | |
|
29 | in the web view. ** NEVER ENABLE THIS ON PRODUCTION ** the interactive console | |
|
30 | can be a serious security threat to you system. |
@@ -0,0 +1,70 b'' | |||
|
1 | #!/bin/bash | |
|
2 | ########################################### | |
|
3 | #### THIS IS AN ARCH LINUX RC.D SCRIPT #### | |
|
4 | ########################################### | |
|
5 | ||
|
6 | . /etc/rc.conf | |
|
7 | . /etc/rc.d/functions | |
|
8 | ||
|
9 | DAEMON=rhodecode | |
|
10 | APP_HOMEDIR="/srv" | |
|
11 | APP_PATH="$APP_HOMEDIR/$DAEMON" | |
|
12 | CONF_NAME="production.ini" | |
|
13 | LOG_FILE="/var/log/$DAEMON.log" | |
|
14 | PID_FILE="/run/daemons/$DAEMON" | |
|
15 | APPL=/usr/bin/paster | |
|
16 | RUN_AS="*****" | |
|
17 | ||
|
18 | ARGS="serve --daemon \ | |
|
19 | --user=$RUN_AS \ | |
|
20 | --group=$RUN_AS \ | |
|
21 | --pid-file=$PID_FILE \ | |
|
22 | --log-file=$LOG_FILE \ | |
|
23 | $APP_PATH/$CONF_NAME" | |
|
24 | ||
|
25 | [ -r /etc/conf.d/$DAEMON ] && . /etc/conf.d/$DAEMON | |
|
26 | ||
|
27 | if [[ -r $PID_FILE ]]; then | |
|
28 | read -r PID < "$PID_FILE" | |
|
29 | if [[ $PID && ! -d /proc/$PID ]]; then | |
|
30 | unset PID | |
|
31 | rm_daemon $DAEMON | |
|
32 | fi | |
|
33 | fi | |
|
34 | ||
|
35 | case "$1" in | |
|
36 | start) | |
|
37 | stat_busy "Starting $DAEMON" | |
|
38 | export HOME=$APP_PATH | |
|
39 | [ -z "$PID" ] && $APPL $ARGS &>/dev/null | |
|
40 | if [ $? = 0 ]; then | |
|
41 | add_daemon $DAEMON | |
|
42 | stat_done | |
|
43 | else | |
|
44 | stat_fail | |
|
45 | exit 1 | |
|
46 | fi | |
|
47 | ;; | |
|
48 | stop) | |
|
49 | stat_busy "Stopping $DAEMON" | |
|
50 | [ -n "$PID" ] && kill $PID &>/dev/null | |
|
51 | if [ $? = 0 ]; then | |
|
52 | rm_daemon $DAEMON | |
|
53 | stat_done | |
|
54 | else | |
|
55 | stat_fail | |
|
56 | exit 1 | |
|
57 | fi | |
|
58 | ;; | |
|
59 | restart) | |
|
60 | $0 stop | |
|
61 | sleep 1 | |
|
62 | $0 start | |
|
63 | ;; | |
|
64 | status) | |
|
65 | stat_busy "Checking $name status"; | |
|
66 | ck_status $name | |
|
67 | ;; | |
|
68 | *) | |
|
69 | echo "usage: $0 {start|stop|restart|status}" | |
|
70 | esac No newline at end of file |
@@ -0,0 +1,58 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | """ | |
|
3 | package.rhodecode.config.conf | |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
|
5 | ||
|
6 | Various config settings for RhodeCode | |
|
7 | ||
|
8 | :created_on: Mar 7, 2012 | |
|
9 | :author: marcink | |
|
10 | :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> | |
|
11 | :license: <name>, see LICENSE_FILE for more details. | |
|
12 | """ | |
|
13 | from rhodecode import EXTENSIONS | |
|
14 | ||
|
15 | from rhodecode.lib.utils2 import __get_lem | |
|
16 | ||
|
17 | ||
|
18 | # language map is also used by whoosh indexer, which for those specified | |
|
19 | # extensions will index it's content | |
|
20 | LANGUAGES_EXTENSIONS_MAP = __get_lem() | |
|
21 | ||
|
22 | #============================================================================== | |
|
23 | # WHOOSH INDEX EXTENSIONS | |
|
24 | #============================================================================== | |
|
25 | # EXTENSIONS WE WANT TO INDEX CONTENT OFF USING WHOOSH | |
|
26 | INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys() | |
|
27 | ||
|
28 | # list of readme files to search in file tree and display in summary | |
|
29 | # attached weights defines the search order lower is first | |
|
30 | ALL_READMES = [ | |
|
31 | ('readme', 0), ('README', 0), ('Readme', 0), | |
|
32 | ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1), | |
|
33 | ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2), | |
|
34 | ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2), | |
|
35 | ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2), | |
|
36 | ] | |
|
37 | ||
|
38 | # extension together with weights to search lower is first | |
|
39 | RST_EXTS = [ | |
|
40 | ('', 0), ('.rst', 1), ('.rest', 1), | |
|
41 | ('.RST', 2), ('.REST', 2), | |
|
42 | ('.txt', 3), ('.TXT', 3) | |
|
43 | ] | |
|
44 | ||
|
45 | MARKDOWN_EXTS = [ | |
|
46 | ('.md', 1), ('.MD', 1), | |
|
47 | ('.mkdn', 2), ('.MKDN', 2), | |
|
48 | ('.mdown', 3), ('.MDOWN', 3), | |
|
49 | ('.markdown', 4), ('.MARKDOWN', 4) | |
|
50 | ] | |
|
51 | ||
|
52 | PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)] | |
|
53 | ||
|
54 | ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS | |
|
55 | ||
|
56 | DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" | |
|
57 | ||
|
58 | DATE_FORMAT = "%Y-%m-%d" |
@@ -0,0 +1,84 b'' | |||
|
1 | # Additional mappings that are not present in the pygments lexers | |
|
2 | # used for building stats | |
|
3 | # format is {'ext':'Name'} eg. {'py':'Python'} | |
|
4 | # NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP | |
|
5 | # build by pygments | |
|
6 | EXTRA_MAPPINGS = {} | |
|
7 | ||
|
8 | #============================================================================== | |
|
9 | # WHOOSH INDEX EXTENSIONS | |
|
10 | #============================================================================== | |
|
11 | # if INDEX_EXTENSIONS is [] it'll use pygments lexers extensions by default. | |
|
12 | # To set your own just add to this list extensions to index with content | |
|
13 | INDEX_EXTENSIONS = [] | |
|
14 | ||
|
15 | # additional extensions for indexing besides the default from pygments | |
|
16 | # those get's added to INDEX_EXTENSIONS | |
|
17 | EXTRA_INDEX_EXTENSIONS = [] | |
|
18 | ||
|
19 | ||
|
20 | #============================================================================== | |
|
21 | # POST CREATE REPOSITORY HOOK | |
|
22 | #============================================================================== | |
|
23 | # this function will be executed after each repository is created | |
|
24 | def _crhook(*args, **kwargs): | |
|
25 | """ | |
|
26 | Post create repository HOOK | |
|
27 | kwargs available: | |
|
28 | :param repo_name: | |
|
29 | :param repo_type: | |
|
30 | :param description: | |
|
31 | :param private: | |
|
32 | :param created_on: | |
|
33 | :param enable_downloads: | |
|
34 | :param repo_id: | |
|
35 | :param user_id: | |
|
36 | :param enable_statistics: | |
|
37 | :param clone_uri: | |
|
38 | :param fork_id: | |
|
39 | :param group_id: | |
|
40 | :param created_by: | |
|
41 | """ | |
|
42 | return 0 | |
|
43 | CREATE_REPO_HOOK = _crhook | |
|
44 | ||
|
45 | ||
|
46 | #============================================================================== | |
|
47 | # POST PUSH HOOK | |
|
48 | #============================================================================== | |
|
49 | ||
|
50 | # this function will be executed after each push it's runned after the build-in | |
|
51 | # hook that rhodecode uses for logging pushes | |
|
52 | def _pushhook(*args, **kwargs): | |
|
53 | """ | |
|
54 | Post push hook | |
|
55 | kwargs available: | |
|
56 | ||
|
57 | :param username: name of user who pushed | |
|
58 | :param ip: ip of who pushed | |
|
59 | :param action: pull | |
|
60 | :param repository: repository name | |
|
61 | :param pushed_revs: generator of pushed revisions | |
|
62 | """ | |
|
63 | return 0 | |
|
64 | PUSH_HOOK = _pushhook | |
|
65 | ||
|
66 | ||
|
67 | #============================================================================== | |
|
68 | # POST PULL HOOK | |
|
69 | #============================================================================== | |
|
70 | ||
|
71 | # this function will be executed after each push it's runned after the build-in | |
|
72 | # hook that rhodecode uses for logging pushes | |
|
73 | def _pullhook(*args, **kwargs): | |
|
74 | """ | |
|
75 | Post pull hook | |
|
76 | kwargs available:: | |
|
77 | ||
|
78 | :param username: name of user who pulled | |
|
79 | :param ip: ip of who pushed | |
|
80 | :param action: pull | |
|
81 | :param repository: repository name | |
|
82 | """ | |
|
83 | return 0 | |
|
84 | PULL_HOOK = _pullhook |
@@ -0,0 +1,79 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | """ | |
|
3 | rhodecode.config.rcextensions.make_rcextensions | |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
|
5 | ||
|
6 | Whoosh indexing module for RhodeCode | |
|
7 | ||
|
8 | :created_on: Mar 6, 2012 | |
|
9 | :author: marcink | |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
|
11 | :license: GPLv3, see COPYING for more details. | |
|
12 | """ | |
|
13 | # This program is free software: you can redistribute it and/or modify | |
|
14 | # it under the terms of the GNU General Public License as published by | |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
|
16 | # (at your option) any later version. | |
|
17 | # | |
|
18 | # This program is distributed in the hope that it will be useful, | |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
21 | # GNU General Public License for more details. | |
|
22 | # | |
|
23 | # You should have received a copy of the GNU General Public License | |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
25 | import os | |
|
26 | import sys | |
|
27 | import pkg_resources | |
|
28 | import traceback | |
|
29 | import logging | |
|
30 | from os.path import dirname as dn, join as jn | |
|
31 | ||
|
32 | #to get the rhodecode import | |
|
33 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
|
34 | ||
|
35 | from rhodecode.lib.utils import BasePasterCommand, Command, ask_ok | |
|
36 | ||
|
37 | log = logging.getLogger(__name__) | |
|
38 | ||
|
39 | ||
|
40 | class MakeRcExt(BasePasterCommand): | |
|
41 | ||
|
42 | max_args = 1 | |
|
43 | min_args = 1 | |
|
44 | ||
|
45 | usage = "CONFIG_FILE" | |
|
46 | summary = "Creates additional extensions for rhodecode" | |
|
47 | group_name = "RhodeCode" | |
|
48 | takes_config_file = -1 | |
|
49 | parser = Command.standard_parser(verbose=True) | |
|
50 | ||
|
51 | def command(self): | |
|
52 | logging.config.fileConfig(self.path_to_ini_file) | |
|
53 | from pylons import config | |
|
54 | ||
|
55 | def _make_file(ext_file): | |
|
56 | bdir = os.path.split(ext_file)[0] | |
|
57 | if not os.path.isdir(bdir): | |
|
58 | os.makedirs(bdir) | |
|
59 | with open(ext_file, 'wb') as f: | |
|
60 | f.write(tmpl) | |
|
61 | log.info('Writen new extensions file to %s' % ext_file) | |
|
62 | ||
|
63 | here = config['here'] | |
|
64 | tmpl = pkg_resources.resource_string( | |
|
65 | 'rhodecode', jn('config', 'rcextensions', '__init__.py') | |
|
66 | ) | |
|
67 | ext_file = jn(here, 'rcextensions', '__init__.py') | |
|
68 | if os.path.exists(ext_file): | |
|
69 | msg = ('Extension file already exists, do you want ' | |
|
70 | 'to overwrite it ? [y/n]') | |
|
71 | if ask_ok(msg): | |
|
72 | _make_file(ext_file) | |
|
73 | else: | |
|
74 | log.info('nothing done...') | |
|
75 | else: | |
|
76 | _make_file(ext_file) | |
|
77 | ||
|
78 | def update_parser(self): | |
|
79 | pass |
@@ -0,0 +1,405 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | """ | |
|
3 | rhodecode.lib.utils | |
|
4 | ~~~~~~~~~~~~~~~~~~~ | |
|
5 | ||
|
6 | Some simple helper functions | |
|
7 | ||
|
8 | :created_on: Jan 5, 2011 | |
|
9 | :author: marcink | |
|
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> | |
|
11 | :license: GPLv3, see COPYING for more details. | |
|
12 | """ | |
|
13 | # This program is free software: you can redistribute it and/or modify | |
|
14 | # it under the terms of the GNU General Public License as published by | |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
|
16 | # (at your option) any later version. | |
|
17 | # | |
|
18 | # This program is distributed in the hope that it will be useful, | |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
21 | # GNU General Public License for more details. | |
|
22 | # | |
|
23 | # You should have received a copy of the GNU General Public License | |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
25 | ||
|
26 | import re | |
|
27 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
|
28 | ||
|
29 | ||
|
30 | def __get_lem(): | |
|
31 | """ | |
|
32 | Get language extension map based on what's inside pygments lexers | |
|
33 | """ | |
|
34 | from pygments import lexers | |
|
35 | from string import lower | |
|
36 | from collections import defaultdict | |
|
37 | ||
|
38 | d = defaultdict(lambda: []) | |
|
39 | ||
|
40 | def __clean(s): | |
|
41 | s = s.lstrip('*') | |
|
42 | s = s.lstrip('.') | |
|
43 | ||
|
44 | if s.find('[') != -1: | |
|
45 | exts = [] | |
|
46 | start, stop = s.find('['), s.find(']') | |
|
47 | ||
|
48 | for suffix in s[start + 1:stop]: | |
|
49 | exts.append(s[:s.find('[')] + suffix) | |
|
50 | return map(lower, exts) | |
|
51 | else: | |
|
52 | return map(lower, [s]) | |
|
53 | ||
|
54 | for lx, t in sorted(lexers.LEXERS.items()): | |
|
55 | m = map(__clean, t[-2]) | |
|
56 | if m: | |
|
57 | m = reduce(lambda x, y: x + y, m) | |
|
58 | for ext in m: | |
|
59 | desc = lx.replace('Lexer', '') | |
|
60 | d[ext].append(desc) | |
|
61 | ||
|
62 | return dict(d) | |
|
63 | ||
|
64 | def str2bool(_str): | |
|
65 | """ | |
|
66 | returs True/False value from given string, it tries to translate the | |
|
67 | string into boolean | |
|
68 | ||
|
69 | :param _str: string value to translate into boolean | |
|
70 | :rtype: boolean | |
|
71 | :returns: boolean from given string | |
|
72 | """ | |
|
73 | if _str is None: | |
|
74 | return False | |
|
75 | if _str in (True, False): | |
|
76 | return _str | |
|
77 | _str = str(_str).strip().lower() | |
|
78 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') | |
|
79 | ||
|
80 | ||
|
81 | def convert_line_endings(line, mode): | |
|
82 | """ | |
|
83 | Converts a given line "line end" accordingly to given mode | |
|
84 | ||
|
85 | Available modes are:: | |
|
86 | 0 - Unix | |
|
87 | 1 - Mac | |
|
88 | 2 - DOS | |
|
89 | ||
|
90 | :param line: given line to convert | |
|
91 | :param mode: mode to convert to | |
|
92 | :rtype: str | |
|
93 | :return: converted line according to mode | |
|
94 | """ | |
|
95 | from string import replace | |
|
96 | ||
|
97 | if mode == 0: | |
|
98 | line = replace(line, '\r\n', '\n') | |
|
99 | line = replace(line, '\r', '\n') | |
|
100 | elif mode == 1: | |
|
101 | line = replace(line, '\r\n', '\r') | |
|
102 | line = replace(line, '\n', '\r') | |
|
103 | elif mode == 2: | |
|
104 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) | |
|
105 | return line | |
|
106 | ||
|
107 | ||
|
108 | def detect_mode(line, default): | |
|
109 | """ | |
|
110 | Detects line break for given line, if line break couldn't be found | |
|
111 | given default value is returned | |
|
112 | ||
|
113 | :param line: str line | |
|
114 | :param default: default | |
|
115 | :rtype: int | |
|
116 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
|
117 | """ | |
|
118 | if line.endswith('\r\n'): | |
|
119 | return 2 | |
|
120 | elif line.endswith('\n'): | |
|
121 | return 0 | |
|
122 | elif line.endswith('\r'): | |
|
123 | return 1 | |
|
124 | else: | |
|
125 | return default | |
|
126 | ||
|
127 | ||
|
128 | def generate_api_key(username, salt=None): | |
|
129 | """ | |
|
130 | Generates unique API key for given username, if salt is not given | |
|
131 | it'll be generated from some random string | |
|
132 | ||
|
133 | :param username: username as string | |
|
134 | :param salt: salt to hash generate KEY | |
|
135 | :rtype: str | |
|
136 | :returns: sha1 hash from username+salt | |
|
137 | """ | |
|
138 | from tempfile import _RandomNameSequence | |
|
139 | import hashlib | |
|
140 | ||
|
141 | if salt is None: | |
|
142 | salt = _RandomNameSequence().next() | |
|
143 | ||
|
144 | return hashlib.sha1(username + salt).hexdigest() | |
|
145 | ||
|
146 | ||
|
147 | def safe_unicode(str_, from_encoding=None): | |
|
148 | """ | |
|
149 | safe unicode function. Does few trick to turn str_ into unicode | |
|
150 | ||
|
151 | In case of UnicodeDecode error we try to return it with encoding detected | |
|
152 | by chardet library if it fails fallback to unicode with errors replaced | |
|
153 | ||
|
154 | :param str_: string to decode | |
|
155 | :rtype: unicode | |
|
156 | :returns: unicode object | |
|
157 | """ | |
|
158 | if isinstance(str_, unicode): | |
|
159 | return str_ | |
|
160 | ||
|
161 | if not from_encoding: | |
|
162 | import rhodecode | |
|
163 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') | |
|
164 | from_encoding = DEFAULT_ENCODING | |
|
165 | ||
|
166 | try: | |
|
167 | return unicode(str_) | |
|
168 | except UnicodeDecodeError: | |
|
169 | pass | |
|
170 | ||
|
171 | try: | |
|
172 | return unicode(str_, from_encoding) | |
|
173 | except UnicodeDecodeError: | |
|
174 | pass | |
|
175 | ||
|
176 | try: | |
|
177 | import chardet | |
|
178 | encoding = chardet.detect(str_)['encoding'] | |
|
179 | if encoding is None: | |
|
180 | raise Exception() | |
|
181 | return str_.decode(encoding) | |
|
182 | except (ImportError, UnicodeDecodeError, Exception): | |
|
183 | return unicode(str_, from_encoding, 'replace') | |
|
184 | ||
|
185 | ||
|
186 | def safe_str(unicode_, to_encoding=None): | |
|
187 | """ | |
|
188 | safe str function. Does few trick to turn unicode_ into string | |
|
189 | ||
|
190 | In case of UnicodeEncodeError we try to return it with encoding detected | |
|
191 | by chardet library if it fails fallback to string with errors replaced | |
|
192 | ||
|
193 | :param unicode_: unicode to encode | |
|
194 | :rtype: str | |
|
195 | :returns: str object | |
|
196 | """ | |
|
197 | ||
|
198 | # if it's not basestr cast to str | |
|
199 | if not isinstance(unicode_, basestring): | |
|
200 | return str(unicode_) | |
|
201 | ||
|
202 | if isinstance(unicode_, str): | |
|
203 | return unicode_ | |
|
204 | ||
|
205 | if not to_encoding: | |
|
206 | import rhodecode | |
|
207 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') | |
|
208 | to_encoding = DEFAULT_ENCODING | |
|
209 | ||
|
210 | try: | |
|
211 | return unicode_.encode(to_encoding) | |
|
212 | except UnicodeEncodeError: | |
|
213 | pass | |
|
214 | ||
|
215 | try: | |
|
216 | import chardet | |
|
217 | encoding = chardet.detect(unicode_)['encoding'] | |
|
218 | print encoding | |
|
219 | if encoding is None: | |
|
220 | raise UnicodeEncodeError() | |
|
221 | ||
|
222 | return unicode_.encode(encoding) | |
|
223 | except (ImportError, UnicodeEncodeError): | |
|
224 | return unicode_.encode(to_encoding, 'replace') | |
|
225 | ||
|
226 | return safe_str | |
|
227 | ||
|
228 | ||
|
229 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): | |
|
230 | """ | |
|
231 | Custom engine_from_config functions that makes sure we use NullPool for | |
|
232 | file based sqlite databases. This prevents errors on sqlite. This only | |
|
233 | applies to sqlalchemy versions < 0.7.0 | |
|
234 | ||
|
235 | """ | |
|
236 | import sqlalchemy | |
|
237 | from sqlalchemy import engine_from_config as efc | |
|
238 | import logging | |
|
239 | ||
|
240 | if int(sqlalchemy.__version__.split('.')[1]) < 7: | |
|
241 | ||
|
242 | # This solution should work for sqlalchemy < 0.7.0, and should use | |
|
243 | # proxy=TimerProxy() for execution time profiling | |
|
244 | ||
|
245 | from sqlalchemy.pool import NullPool | |
|
246 | url = configuration[prefix + 'url'] | |
|
247 | ||
|
248 | if url.startswith('sqlite'): | |
|
249 | kwargs.update({'poolclass': NullPool}) | |
|
250 | return efc(configuration, prefix, **kwargs) | |
|
251 | else: | |
|
252 | import time | |
|
253 | from sqlalchemy import event | |
|
254 | from sqlalchemy.engine import Engine | |
|
255 | ||
|
256 | log = logging.getLogger('sqlalchemy.engine') | |
|
257 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38) | |
|
258 | engine = efc(configuration, prefix, **kwargs) | |
|
259 | ||
|
260 | def color_sql(sql): | |
|
261 | COLOR_SEQ = "\033[1;%dm" | |
|
262 | COLOR_SQL = YELLOW | |
|
263 | normal = '\x1b[0m' | |
|
264 | return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal]) | |
|
265 | ||
|
266 | if configuration['debug']: | |
|
267 | #attach events only for debug configuration | |
|
268 | ||
|
269 | def before_cursor_execute(conn, cursor, statement, | |
|
270 | parameters, context, executemany): | |
|
271 | context._query_start_time = time.time() | |
|
272 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) | |
|
273 | ||
|
274 | ||
|
275 | def after_cursor_execute(conn, cursor, statement, | |
|
276 | parameters, context, executemany): | |
|
277 | total = time.time() - context._query_start_time | |
|
278 | log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total)) | |
|
279 | ||
|
280 | event.listen(engine, "before_cursor_execute", | |
|
281 | before_cursor_execute) | |
|
282 | event.listen(engine, "after_cursor_execute", | |
|
283 | after_cursor_execute) | |
|
284 | ||
|
285 | return engine | |
|
286 | ||
|
287 | ||
|
288 | def age(curdate): | |
|
289 | """ | |
|
290 | turns a datetime into an age string. | |
|
291 | ||
|
292 | :param curdate: datetime object | |
|
293 | :rtype: unicode | |
|
294 | :returns: unicode words describing age | |
|
295 | """ | |
|
296 | ||
|
297 | from datetime import datetime | |
|
298 | from webhelpers.date import time_ago_in_words | |
|
299 | ||
|
300 | _ = lambda s: s | |
|
301 | ||
|
302 | if not curdate: | |
|
303 | return '' | |
|
304 | ||
|
305 | agescales = [(_(u"year"), 3600 * 24 * 365), | |
|
306 | (_(u"month"), 3600 * 24 * 30), | |
|
307 | (_(u"day"), 3600 * 24), | |
|
308 | (_(u"hour"), 3600), | |
|
309 | (_(u"minute"), 60), | |
|
310 | (_(u"second"), 1), ] | |
|
311 | ||
|
312 | age = datetime.now() - curdate | |
|
313 | age_seconds = (age.days * agescales[2][1]) + age.seconds | |
|
314 | pos = 1 | |
|
315 | for scale in agescales: | |
|
316 | if scale[1] <= age_seconds: | |
|
317 | if pos == 6: | |
|
318 | pos = 5 | |
|
319 | return '%s %s' % (time_ago_in_words(curdate, | |
|
320 | agescales[pos][0]), _('ago')) | |
|
321 | pos += 1 | |
|
322 | ||
|
323 | return _(u'just now') | |
|
324 | ||
|
325 | ||
|
326 | def uri_filter(uri): | |
|
327 | """ | |
|
328 | Removes user:password from given url string | |
|
329 | ||
|
330 | :param uri: | |
|
331 | :rtype: unicode | |
|
332 | :returns: filtered list of strings | |
|
333 | """ | |
|
334 | if not uri: | |
|
335 | return '' | |
|
336 | ||
|
337 | proto = '' | |
|
338 | ||
|
339 | for pat in ('https://', 'http://'): | |
|
340 | if uri.startswith(pat): | |
|
341 | uri = uri[len(pat):] | |
|
342 | proto = pat | |
|
343 | break | |
|
344 | ||
|
345 | # remove passwords and username | |
|
346 | uri = uri[uri.find('@') + 1:] | |
|
347 | ||
|
348 | # get the port | |
|
349 | cred_pos = uri.find(':') | |
|
350 | if cred_pos == -1: | |
|
351 | host, port = uri, None | |
|
352 | else: | |
|
353 | host, port = uri[:cred_pos], uri[cred_pos + 1:] | |
|
354 | ||
|
355 | return filter(None, [proto, host, port]) | |
|
356 | ||
|
357 | ||
|
358 | def credentials_filter(uri): | |
|
359 | """ | |
|
360 | Returns a url with removed credentials | |
|
361 | ||
|
362 | :param uri: | |
|
363 | """ | |
|
364 | ||
|
365 | uri = uri_filter(uri) | |
|
366 | #check if we have port | |
|
367 | if len(uri) > 2 and uri[2]: | |
|
368 | uri[2] = ':' + uri[2] | |
|
369 | ||
|
370 | return ''.join(uri) | |
|
371 | ||
|
372 | ||
|
373 | def get_changeset_safe(repo, rev): | |
|
374 | """ | |
|
375 | Safe version of get_changeset if this changeset doesn't exists for a | |
|
376 | repo it returns a Dummy one instead | |
|
377 | ||
|
378 | :param repo: | |
|
379 | :param rev: | |
|
380 | """ | |
|
381 | from rhodecode.lib.vcs.backends.base import BaseRepository | |
|
382 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
|
383 | if not isinstance(repo, BaseRepository): | |
|
384 | raise Exception('You must pass an Repository ' | |
|
385 | 'object as first argument got %s', type(repo)) | |
|
386 | ||
|
387 | try: | |
|
388 | cs = repo.get_changeset(rev) | |
|
389 | except RepositoryError: | |
|
390 | from rhodecode.lib.utils import EmptyChangeset | |
|
391 | cs = EmptyChangeset(requested_revision=rev) | |
|
392 | return cs | |
|
393 | ||
|
394 | ||
|
395 | def extract_mentioned_users(s): | |
|
396 | """ | |
|
397 | Returns unique usernames from given string s that have @mention | |
|
398 | ||
|
399 | :param s: string to get mentions | |
|
400 | """ | |
|
401 | usrs = {} | |
|
402 | for username in re.findall(r'(?:^@|\s@)(\w+)', s): | |
|
403 | usrs[username] = username | |
|
404 | ||
|
405 | return sorted(usrs.keys()) |
@@ -6,6 +6,7 b' syntax: glob' | |||
|
6 | 6 | *.egg |
|
7 | 7 | |
|
8 | 8 | syntax: regexp |
|
9 | ^rcextensions | |
|
9 | 10 | ^build |
|
10 | 11 | ^docs/build/ |
|
11 | 12 | ^docs/_build/ |
@@ -15,10 +15,11 b' RhodeCode is similar in some respects to' | |||
|
15 | 15 | however RhodeCode can be run as standalone hosted application on your own server. |
|
16 | 16 | It is open source and donation ware and focuses more on providing a customized, |
|
17 | 17 | self administered interface for Mercurial_ and GIT_ repositories. |
|
18 | RhodeCode is powered by a vcs_ library that Lukasz Balcerzak and I created to | |
|
19 | handle multiple different version control systems. | |
|
18 | RhodeCode works on *nix systems and Windows it is powered by a vcs_ library | |
|
19 | that Lukasz Balcerzak and Marcin Kuzminski created to handle multiple | |
|
20 | different version control systems. | |
|
20 | 21 | |
|
21 |
RhodeCode uses ` |
|
|
22 | RhodeCode uses `PEP386 versioning http://www.python.org/dev/peps/pep-0386/`_ | |
|
22 | 23 | |
|
23 | 24 | Installation |
|
24 | 25 | ------------ |
@@ -99,7 +100,7 b' RhodeCode Features' | |||
|
99 | 100 | - Intelligent cache with invalidation after push or project change, provides |
|
100 | 101 | high performance and always up to date data. |
|
101 | 102 | - Rss / atom feeds, gravatar support, download sources as zip/tar/gz |
|
102 |
- |
|
|
103 | - Optional async tasks for speed and performance using celery_ | |
|
103 | 104 | - Backup scripts can do backup of whole app and send it over scp to desired |
|
104 | 105 | location |
|
105 | 106 | - Based on pylons / sqlalchemy / sqlite / whoosh / vcs |
@@ -93,6 +93,11 b' issue_prefix = #' | |||
|
93 | 93 | ## all running rhodecode instances. Leave empty if you don't use it |
|
94 | 94 | instance_id = |
|
95 | 95 | |
|
96 | ## alternative return HTTP header for failed authentication. Default HTTP | |
|
97 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
|
98 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
|
99 | auth_ret_code = | |
|
100 | ||
|
96 | 101 | #################################### |
|
97 | 102 | ### CELERY CONFIG #### |
|
98 | 103 | #################################### |
@@ -171,6 +176,7 b' beaker.cache.sql_cache_long.key_length =' | |||
|
171 | 176 | |
|
172 | 177 | beaker.session.type = file |
|
173 | 178 | beaker.session.key = rhodecode |
|
179 | # secure cookie requires AES python libraries | |
|
174 | 180 | #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu |
|
175 | 181 | #beaker.session.validate_key = 9712sds2212c--zxc123 |
|
176 | 182 | beaker.session.timeout = 36000 |
@@ -207,13 +213,13 b' logview.pylons.util = #eee' | |||
|
207 | 213 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode |
|
208 | 214 | sqlalchemy.db1.echo = false |
|
209 | 215 | sqlalchemy.db1.pool_recycle = 3600 |
|
210 | sqlalchemy.convert_unicode = true | |
|
216 | sqlalchemy.db1.convert_unicode = true | |
|
211 | 217 | |
|
212 | 218 | ################################ |
|
213 | 219 | ### LOGGING CONFIGURATION #### |
|
214 | 220 | ################################ |
|
215 | 221 | [loggers] |
|
216 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates | |
|
222 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
|
217 | 223 | |
|
218 | 224 | [handlers] |
|
219 | 225 | keys = console, console_sql |
@@ -259,6 +265,12 b' handlers = console_sql' | |||
|
259 | 265 | qualname = sqlalchemy.engine |
|
260 | 266 | propagate = 0 |
|
261 | 267 | |
|
268 | [logger_whoosh_indexer] | |
|
269 | level = DEBUG | |
|
270 | handlers = | |
|
271 | qualname = whoosh_indexer | |
|
272 | propagate = 1 | |
|
273 | ||
|
262 | 274 | ############## |
|
263 | 275 | ## HANDLERS ## |
|
264 | 276 | ############## |
@@ -27,7 +27,7 b' API ACCESS' | |||
|
27 | 27 | All clients are required to send JSON-RPC spec JSON data:: |
|
28 | 28 | |
|
29 | 29 | { |
|
30 | "id:<id>, | |
|
30 | "id:"<id>", | |
|
31 | 31 | "api_key":"<api_key>", |
|
32 | 32 | "method":"<method_name>", |
|
33 | 33 | "args":{"<arg_key>":"<arg_val>"} |
@@ -50,9 +50,9 b' Simply provide' | |||
|
50 | 50 | RhodeCode API will return always a JSON-RPC response:: |
|
51 | 51 | |
|
52 | 52 | { |
|
53 | "id":<id>, | |
|
54 | "result": "<result>", | |
|
55 | "error": null | |
|
53 | "id":<id>, # matching id sent by request | |
|
54 | "result": "<result>"|null, # JSON formatted result, null if any errors | |
|
55 | "error": "null"|<error_message> # JSON formatted error (if any) | |
|
56 | 56 | } |
|
57 | 57 | |
|
58 | 58 | All responses from API will be `HTTP/1.0 200 OK`, if there's an error while |
@@ -72,6 +72,7 b' belonging to user with admin rights' | |||
|
72 | 72 | |
|
73 | 73 | INPUT:: |
|
74 | 74 | |
|
75 | id : <id_for_response> | |
|
75 | 76 | api_key : "<api_key>" |
|
76 | 77 |
|
|
77 | 78 | args : { |
@@ -94,6 +95,7 b' rights.' | |||
|
94 | 95 | |
|
95 | 96 | INPUT:: |
|
96 | 97 | |
|
98 | id : <id_for_response> | |
|
97 | 99 | api_key : "<api_key>" |
|
98 | 100 |
|
|
99 | 101 | args : { |
@@ -111,7 +113,15 b' OUTPUT::' | |||
|
111 | 113 | "email" : "<email>", |
|
112 | 114 | "active" : "<bool>", |
|
113 | 115 | "admin" :Â "<bool>", |
|
114 | "ldap_dn" : "<ldap_dn>" | |
|
116 | "ldap_dn" : "<ldap_dn>", | |
|
117 | "last_login": "<last_login>", | |
|
118 | "permissions": { | |
|
119 | "global": ["hg.create.repository", | |
|
120 | "repository.read", | |
|
121 | "hg.register.manual_activate"], | |
|
122 | "repositories": {"repo1": "repository.none"}, | |
|
123 | "repositories_groups": {"Group1": "group.read"} | |
|
124 | }, | |
|
115 | 125 | } |
|
116 | 126 | |
|
117 | 127 | error: null |
@@ -126,6 +136,7 b' belonging to user with admin rights.' | |||
|
126 | 136 | |
|
127 | 137 | INPUT:: |
|
128 | 138 | |
|
139 | id : <id_for_response> | |
|
129 | 140 | api_key : "<api_key>" |
|
130 | 141 |
|
|
131 | 142 | args : { } |
@@ -141,7 +152,8 b' OUTPUT::' | |||
|
141 | 152 | "email" : "<email>", |
|
142 | 153 | "active" : "<bool>", |
|
143 | 154 | "admin" :Â "<bool>", |
|
144 | "ldap_dn" : "<ldap_dn>" | |
|
155 | "ldap_dn" : "<ldap_dn>", | |
|
156 | "last_login": "<last_login>", | |
|
145 | 157 | }, |
|
146 | 158 | … |
|
147 | 159 | ] |
@@ -157,6 +169,7 b' be executed only using api_key belonging' | |||
|
157 | 169 | |
|
158 | 170 | INPUT:: |
|
159 | 171 | |
|
172 | id : <id_for_response> | |
|
160 | 173 | api_key : "<api_key>" |
|
161 | 174 |
|
|
162 | 175 | args : { |
@@ -188,6 +201,7 b' be executed only using api_key belonging' | |||
|
188 | 201 | |
|
189 | 202 | INPUT:: |
|
190 | 203 | |
|
204 | id : <id_for_response> | |
|
191 | 205 | api_key : "<api_key>" |
|
192 | 206 |
|
|
193 | 207 | args : { |
@@ -220,6 +234,7 b' belonging to user with admin rights.' | |||
|
220 | 234 | |
|
221 | 235 | INPUT:: |
|
222 | 236 | |
|
237 | id : <id_for_response> | |
|
223 | 238 | api_key : "<api_key>" |
|
224 | 239 |
|
|
225 | 240 | args : { |
@@ -258,6 +273,7 b' api_key belonging to user with admin rig' | |||
|
258 | 273 | |
|
259 | 274 | INPUT:: |
|
260 | 275 | |
|
276 | id : <id_for_response> | |
|
261 | 277 | api_key : "<api_key>" |
|
262 | 278 |
|
|
263 | 279 | args : { } |
@@ -296,6 +312,7 b' belonging to user with admin rights' | |||
|
296 | 312 | |
|
297 | 313 | INPUT:: |
|
298 | 314 | |
|
315 | id : <id_for_response> | |
|
299 | 316 | api_key : "<api_key>" |
|
300 | 317 |
|
|
301 | 318 | args: { |
@@ -322,6 +339,7 b' belonging to user with admin rights' | |||
|
322 | 339 | |
|
323 | 340 | INPUT:: |
|
324 | 341 | |
|
342 | id : <id_for_response> | |
|
325 | 343 | api_key : "<api_key>" |
|
326 | 344 |
|
|
327 | 345 | args: { |
@@ -350,6 +368,7 b' using api_key belonging to user with adm' | |||
|
350 | 368 | |
|
351 | 369 | INPUT:: |
|
352 | 370 | |
|
371 | id : <id_for_response> | |
|
353 | 372 | api_key : "<api_key>" |
|
354 | 373 |
|
|
355 | 374 | args: { |
@@ -370,12 +389,14 b' OUTPUT::' | |||
|
370 | 389 | get_repo |
|
371 | 390 | -------- |
|
372 | 391 | |
|
373 |
Gets an existing repository by it's name or repository_id. |
|
|
392 | Gets an existing repository by it's name or repository_id. Members will return | |
|
393 | either users_group or user associated to that repository. This command can | |
|
374 | 394 | be executed only using api_key belonging to user with admin rights. |
|
375 | 395 | |
|
376 | 396 | |
|
377 | 397 | INPUT:: |
|
378 | 398 | |
|
399 | id : <id_for_response> | |
|
379 | 400 | api_key : "<api_key>" |
|
380 | 401 |
|
|
381 | 402 | args: { |
@@ -391,7 +412,9 b' OUTPUT::' | |||
|
391 | 412 | "type" : "<type>", |
|
392 | 413 | "description" : "<description>", |
|
393 | 414 | "members" : [ |
|
394 |
{ |
|
|
415 | { | |
|
416 | "type": "user", | |
|
417 | "id" : "<userid>", | |
|
395 | 418 | "username" : "<username>", |
|
396 | 419 | "firstname": "<firstname>", |
|
397 | 420 | "lastname" : "<lastname>", |
@@ -402,7 +425,8 b' OUTPUT::' | |||
|
402 | 425 | "permission" : "repository.(read|write|admin)" |
|
403 | 426 | }, |
|
404 | 427 | … |
|
405 | { | |
|
428 | { | |
|
429 | "type": "users_group", | |
|
406 | 430 | "id" : "<usersgroupid>", |
|
407 | 431 | "name" : "<usersgroupname>", |
|
408 | 432 | "active": "<bool>", |
@@ -423,6 +447,7 b' belonging to user with admin rights' | |||
|
423 | 447 | |
|
424 | 448 | INPUT:: |
|
425 | 449 | |
|
450 | id : <id_for_response> | |
|
426 | 451 | api_key : "<api_key>" |
|
427 | 452 |
|
|
428 | 453 | args: { } |
@@ -452,6 +477,7 b' with admin rights' | |||
|
452 | 477 | |
|
453 | 478 | INPUT:: |
|
454 | 479 | |
|
480 | id : <id_for_response> | |
|
455 | 481 | api_key : "<api_key>" |
|
456 | 482 |
|
|
457 | 483 | args: { |
@@ -485,6 +511,7 b' and create "baz" repository with "bar" a' | |||
|
485 | 511 | |
|
486 | 512 | INPUT:: |
|
487 | 513 | |
|
514 | id : <id_for_response> | |
|
488 | 515 | api_key : "<api_key>" |
|
489 | 516 |
|
|
490 | 517 | args: { |
@@ -514,6 +541,7 b' belonging to user with admin rights.' | |||
|
514 | 541 | |
|
515 | 542 | INPUT:: |
|
516 | 543 | |
|
544 | id : <id_for_response> | |
|
517 | 545 | api_key : "<api_key>" |
|
518 | 546 |
|
|
519 | 547 | args: { |
@@ -538,6 +566,7 b' with admin rights.' | |||
|
538 | 566 | |
|
539 | 567 | INPUT:: |
|
540 | 568 | |
|
569 | id : <id_for_response> | |
|
541 | 570 | api_key : "<api_key>" |
|
542 | 571 |
|
|
543 | 572 | args: { |
@@ -563,6 +592,7 b' only using api_key belonging to user wit' | |||
|
563 | 592 | |
|
564 | 593 | INPUT:: |
|
565 | 594 | |
|
595 | id : <id_for_response> | |
|
566 | 596 | api_key : "<api_key>" |
|
567 | 597 |
|
|
568 | 598 | args: { |
@@ -588,6 +618,7 b' api_key belonging to user with admin rig' | |||
|
588 | 618 | |
|
589 | 619 | INPUT:: |
|
590 | 620 | |
|
621 | id : <id_for_response> | |
|
591 | 622 | api_key : "<api_key>" |
|
592 | 623 |
|
|
593 | 624 | args: { |
@@ -612,6 +643,7 b' executed only using api_key belonging to' | |||
|
612 | 643 | |
|
613 | 644 | INPUT:: |
|
614 | 645 | |
|
646 | id : <id_for_response> | |
|
615 | 647 | api_key : "<api_key>" |
|
616 | 648 |
|
|
617 | 649 | args: { |
@@ -5,7 +5,45 b' Changelog' | |||
|
5 | 5 | ========= |
|
6 | 6 | |
|
7 | 7 | |
|
8 | 1.3.4 (**2012-03-28**) | |
|
9 | ---------------------- | |
|
8 | 10 | |
|
11 | news | |
|
12 | ++++ | |
|
13 | ||
|
14 | - Whoosh logging is now controlled by the .ini files logging setup | |
|
15 | - added clone-url into edit form on /settings page | |
|
16 | - added help text into repo add/edit forms | |
|
17 | - created rcextensions module with additional mappings (ref #322) and | |
|
18 | post push/pull/create repo hooks callbacks | |
|
19 | - implemented #377 Users view for his own permissions on account page | |
|
20 | - #399 added inheritance of permissions for users group on repos groups | |
|
21 | - #401 repository group is automatically pre-selected when adding repos | |
|
22 | inside a repository group | |
|
23 | - added alternative HTTP 403 response when client failed to authenticate. Helps | |
|
24 | solving issues with Mercurial and LDAP | |
|
25 | - #402 removed group prefix from repository name when listing repositories | |
|
26 | inside a group | |
|
27 | - added gravatars into permission view and permissions autocomplete | |
|
28 | - #347 when running multiple RhodeCode instances, properly invalidates cache | |
|
29 | for all registered servers | |
|
30 | ||
|
31 | fixes | |
|
32 | +++++ | |
|
33 | ||
|
34 | - fixed #390 cache invalidation problems on repos inside group | |
|
35 | - fixed #385 clone by ID url was loosing proxy prefix in URL | |
|
36 | - fixed some unicode problems with waitress | |
|
37 | - fixed issue with escaping < and > in changeset commits | |
|
38 | - fixed error occurring during recursive group creation in API | |
|
39 | create_repo function | |
|
40 | - fixed #393 py2.5 fixes for routes url generator | |
|
41 | - fixed #397 Private repository groups shows up before login | |
|
42 | - fixed #396 fixed problems with revoking users in nested groups | |
|
43 | - fixed mysql unicode issues + specified InnoDB as default engine with | |
|
44 | utf8 charset | |
|
45 | - #406 trim long branch/tag names in changelog to not break UI | |
|
46 | ||
|
9 | 47 | 1.3.3 (**2012-03-02**) |
|
10 | 48 | ---------------------- |
|
11 | 49 |
@@ -23,7 +23,8 b' Users Guide' | |||
|
23 | 23 | usage/git_support |
|
24 | 24 | usage/statistics |
|
25 | 25 | usage/backup |
|
26 | ||
|
26 | usage/debugging | |
|
27 | ||
|
27 | 28 | **Develop** |
|
28 | 29 | |
|
29 | 30 | .. toctree:: |
@@ -20,9 +20,10 b' following command to do this::' | |||
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | Next, you need to create the databases used by RhodeCode. I recommend that you |
|
23 |
use sqlite (default) |
|
|
23 | use postgresql or sqlite (default). If you choose a database other than the | |
|
24 | 24 | default ensure you properly adjust the db url in your production.ini |
|
25 |
configuration file to use this other database. |
|
|
25 | configuration file to use this other database. RhodeCode currently supports | |
|
26 | postgresql, sqlite and mysql databases. Create the database by running | |
|
26 | 27 | the following command:: |
|
27 | 28 | |
|
28 | 29 | paster setup-app production.ini |
@@ -57,15 +58,18 b' You are now ready to use RhodeCode, to r' | |||
|
57 | 58 | - In the admin panel you can toggle ldap, anonymous, permissions settings. As |
|
58 | 59 | well as edit more advanced options on users and repositories |
|
59 | 60 | |
|
60 | Try copying your own mercurial repository into the "root" directory you are | |
|
61 | using, then from within the RhodeCode web application choose Admin > | |
|
62 | repositories. Then choose Add New Repository. Add the repository you copied | |
|
63 | into the root. Test that you can browse your repository from within RhodeCode | |
|
64 | and then try cloning your repository from RhodeCode with:: | |
|
61 | Optionally users can create `rcextensions` package that extends RhodeCode | |
|
62 | functionality. To do this simply execute:: | |
|
63 | ||
|
64 | paster make-rcext production.ini | |
|
65 | 65 | |
|
66 | hg clone http://127.0.0.1:5000/<repository name> | |
|
66 | This will create `rcextensions` package in the same place that your `ini` file | |
|
67 | lives. With `rcextensions` it's possible to add additional mapping for whoosh, | |
|
68 | stats and add additional code into the push/pull/create repo hooks. For example | |
|
69 | for sending signals to build-bots such as jenkins. | |
|
70 | Please see the `__init__.py` file inside `rcextensions` package | |
|
71 | for more details. | |
|
67 | 72 | |
|
68 | where *repository name* is replaced by the name of your repository. | |
|
69 | 73 | |
|
70 | 74 | Using RhodeCode with SSH |
|
71 | 75 | ------------------------ |
@@ -47,6 +47,9 b' This will upgrade the schema and update ' | |||
|
47 | 47 | and will always recheck the settings of the application, if there are no new |
|
48 | 48 | options that need to be set. |
|
49 | 49 | |
|
50 | .. note:: | |
|
51 | If you're using Celery, make sure you restart all instances of it after | |
|
52 | upgrade. | |
|
50 | 53 | |
|
51 | 54 | .. _virtualenv: http://pypi.python.org/pypi/virtualenv |
|
52 | 55 | .. _python: http://www.python.org/ |
@@ -71,6 +71,11 b' RhodeCode will send mails on user regist' | |||
|
71 | 71 | on errors the mails will have a detailed traceback of error. |
|
72 | 72 | |
|
73 | 73 | |
|
74 | Mails are also sent for code comments. If someone comments on a changeset | |
|
75 | mail is sent to all participants, the person who commited the changeset | |
|
76 | (if present in RhodeCode), and to all people mentioned with @mention system. | |
|
77 | ||
|
78 | ||
|
74 | 79 | Trending source files |
|
75 | 80 | --------------------- |
|
76 | 81 |
@@ -93,6 +93,11 b' issue_prefix = #' | |||
|
93 | 93 | ## all running rhodecode instances. Leave empty if you don't use it |
|
94 | 94 | instance_id = |
|
95 | 95 | |
|
96 | ## alternative return HTTP header for failed authentication. Default HTTP | |
|
97 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
|
98 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
|
99 | auth_ret_code = | |
|
100 | ||
|
96 | 101 | #################################### |
|
97 | 102 | ### CELERY CONFIG #### |
|
98 | 103 | #################################### |
@@ -208,13 +213,13 b' logview.pylons.util = #eee' | |||
|
208 | 213 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode |
|
209 | 214 | sqlalchemy.db1.echo = false |
|
210 | 215 | sqlalchemy.db1.pool_recycle = 3600 |
|
211 | sqlalchemy.convert_unicode = true | |
|
216 | sqlalchemy.db1.convert_unicode = true | |
|
212 | 217 | |
|
213 | 218 | ################################ |
|
214 | 219 | ### LOGGING CONFIGURATION #### |
|
215 | 220 | ################################ |
|
216 | 221 | [loggers] |
|
217 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates | |
|
222 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
|
218 | 223 | |
|
219 | 224 | [handlers] |
|
220 | 225 | keys = console, console_sql |
@@ -260,6 +265,12 b' handlers = console_sql' | |||
|
260 | 265 | qualname = sqlalchemy.engine |
|
261 | 266 | propagate = 0 |
|
262 | 267 | |
|
268 | [logger_whoosh_indexer] | |
|
269 | level = DEBUG | |
|
270 | handlers = | |
|
271 | qualname = whoosh_indexer | |
|
272 | propagate = 1 | |
|
273 | ||
|
263 | 274 | ############## |
|
264 | 275 | ## HANDLERS ## |
|
265 | 276 | ############## |
@@ -1,17 +1,17 b'' | |||
|
1 | 1 | Pylons==1.0.0 |
|
2 | 2 | Beaker==1.6.3 |
|
3 |
WebHelpers |
|
|
3 | WebHelpers==1.3 | |
|
4 | 4 | formencode==1.2.4 |
|
5 |
SQLAlchemy==0.7. |
|
|
6 |
Mako==0. |
|
|
5 | SQLAlchemy==0.7.6 | |
|
6 | Mako==0.6.2 | |
|
7 | 7 | pygments>=1.4 |
|
8 | 8 | whoosh>=2.3.0,<2.4 |
|
9 | 9 | celery>=2.2.5,<2.3 |
|
10 | 10 | babel |
|
11 | 11 | python-dateutil>=1.5.0,<2.0.0 |
|
12 | dulwich>=0.8.0,<0.9.0 | |
|
12 | https://github.com/jelmer/dulwich/tarball/master | |
|
13 | 13 | webob==1.0.8 |
|
14 | 14 | markdown==2.1.1 |
|
15 | 15 | docutils==0.8.1 |
|
16 | 16 | py-bcrypt |
|
17 | mercurial>=2.1,<2.2 No newline at end of file | |
|
17 | mercurial>=2.1,<2.2 |
@@ -4,7 +4,7 b'' | |||
|
4 | 4 | ~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | RhodeCode, a web based repository management based on pylons |
|
7 |
versioning implementation: http:// |
|
|
7 | versioning implementation: http://www.python.org/dev/peps/pep-0386/ | |
|
8 | 8 | |
|
9 | 9 | :created_on: Apr 9, 2010 |
|
10 | 10 | :author: marcink |
@@ -26,8 +26,18 b'' | |||
|
26 | 26 | import sys |
|
27 | 27 | import platform |
|
28 | 28 | |
|
29 |
VERSION = (1, 3, |
|
|
30 | __version__ = '.'.join((str(each) for each in VERSION[:4])) | |
|
29 | VERSION = (1, 3, 4) | |
|
30 | ||
|
31 | try: | |
|
32 | from rhodecode.lib import get_current_revision | |
|
33 | _rev = get_current_revision() | |
|
34 | if _rev: | |
|
35 | VERSION += ('dev%s' % _rev[0],) | |
|
36 | except ImportError: | |
|
37 | pass | |
|
38 | ||
|
39 | __version__ = ('.'.join((str(each) for each in VERSION[:3])) + | |
|
40 | '.'.join(VERSION[3:])) | |
|
31 | 41 | __dbversion__ = 5 # defines current db version for migrations |
|
32 | 42 | __platform__ = platform.system() |
|
33 | 43 | __license__ = 'GPLv3' |
@@ -39,16 +49,16 b" PLATFORM_OTHERS = ('Linux', 'Darwin', 'F" | |||
|
39 | 49 | requirements = [ |
|
40 | 50 | "Pylons==1.0.0", |
|
41 | 51 | "Beaker==1.6.3", |
|
42 |
"WebHelpers |
|
|
52 | "WebHelpers==1.3", | |
|
43 | 53 | "formencode==1.2.4", |
|
44 |
"SQLAlchemy==0.7. |
|
|
45 |
"Mako==0. |
|
|
54 | "SQLAlchemy==0.7.6", | |
|
55 | "Mako==0.6.2", | |
|
46 | 56 | "pygments>=1.4", |
|
47 | 57 | "whoosh>=2.3.0,<2.4", |
|
48 | 58 | "celery>=2.2.5,<2.3", |
|
49 | 59 | "babel", |
|
50 | 60 | "python-dateutil>=1.5.0,<2.0.0", |
|
51 |
"dulwich>=0.8. |
|
|
61 | "dulwich>=0.8.4,<0.9.0", | |
|
52 | 62 | "webob==1.0.8", |
|
53 | 63 | "markdown==2.1.1", |
|
54 | 64 | "docutils==0.8.1", |
@@ -65,17 +75,6 b' else:' | |||
|
65 | 75 | requirements.append("mercurial>=2.1,<2.2") |
|
66 | 76 | |
|
67 | 77 | |
|
68 | try: | |
|
69 | from rhodecode.lib import get_current_revision | |
|
70 | _rev = get_current_revision(quiet=True) | |
|
71 | except ImportError: | |
|
72 | # this is needed when doing some setup.py operations | |
|
73 | _rev = False | |
|
74 | ||
|
75 | if len(VERSION) > 3 and _rev: | |
|
76 | __version__ += ' [rev:%s]' % _rev[0] | |
|
77 | ||
|
78 | ||
|
79 | 78 | def get_version(): |
|
80 | 79 | """Returns shorter version (digit parts only) as string.""" |
|
81 | 80 | |
@@ -90,3 +89,6 b' CELERY_ON = False' | |||
|
90 | 89 | |
|
91 | 90 | # link to config for pylons |
|
92 | 91 | CONFIG = {} |
|
92 | ||
|
93 | # Linked module for extensions | |
|
94 | EXTENSIONS = {} |
@@ -93,6 +93,11 b' issue_prefix = #' | |||
|
93 | 93 | ## all running rhodecode instances. Leave empty if you don't use it |
|
94 | 94 | instance_id = |
|
95 | 95 | |
|
96 | ## alternative return HTTP header for failed authentication. Default HTTP | |
|
97 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
|
98 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
|
99 | auth_ret_code = | |
|
100 | ||
|
96 | 101 | #################################### |
|
97 | 102 | ### CELERY CONFIG #### |
|
98 | 103 | #################################### |
@@ -218,13 +223,13 b' sqlalchemy.db1.url = sqlite:///%(here)s/' | |||
|
218 | 223 | |
|
219 | 224 | sqlalchemy.db1.echo = false |
|
220 | 225 | sqlalchemy.db1.pool_recycle = 3600 |
|
221 | sqlalchemy.convert_unicode = true | |
|
226 | sqlalchemy.db1.convert_unicode = true | |
|
222 | 227 | |
|
223 | 228 | ################################ |
|
224 | 229 | ### LOGGING CONFIGURATION #### |
|
225 | 230 | ################################ |
|
226 | 231 | [loggers] |
|
227 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates | |
|
232 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
|
228 | 233 | |
|
229 | 234 | [handlers] |
|
230 | 235 | keys = console, console_sql |
@@ -270,6 +275,12 b' handlers = console_sql' | |||
|
270 | 275 | qualname = sqlalchemy.engine |
|
271 | 276 | propagate = 0 |
|
272 | 277 | |
|
278 | [logger_whoosh_indexer] | |
|
279 | level = DEBUG | |
|
280 | handlers = | |
|
281 | qualname = whoosh_indexer | |
|
282 | propagate = 1 | |
|
283 | ||
|
273 | 284 | ############## |
|
274 | 285 | ## HANDLERS ## |
|
275 | 286 | ############## |
@@ -2,21 +2,24 b'' | |||
|
2 | 2 | |
|
3 | 3 | import os |
|
4 | 4 | import logging |
|
5 | import rhodecode | |
|
5 | 6 | |
|
6 | 7 | from mako.lookup import TemplateLookup |
|
7 | 8 | from pylons.configuration import PylonsConfig |
|
8 | 9 | from pylons.error import handle_mako_error |
|
9 | 10 | |
|
10 | import rhodecode | |
|
11 | # don't remove this import it does magic for celery | |
|
12 | from rhodecode.lib import celerypylons | |
|
13 | ||
|
11 | 14 | import rhodecode.lib.app_globals as app_globals |
|
12 | import rhodecode.lib.helpers | |
|
13 | 15 | |
|
14 | 16 | from rhodecode.config.routing import make_map |
|
15 | # don't remove this import it does magic for celery | |
|
16 |
from rhodecode.lib import |
|
|
17 | from rhodecode.lib import engine_from_config | |
|
17 | ||
|
18 | from rhodecode.lib import helpers | |
|
18 | 19 | from rhodecode.lib.auth import set_available_permissions |
|
19 | from rhodecode.lib.utils import repo2db_mapper, make_ui, set_rhodecode_config | |
|
20 | from rhodecode.lib.utils import repo2db_mapper, make_ui, set_rhodecode_config,\ | |
|
21 | load_rcextensions | |
|
22 | from rhodecode.lib.utils2 import engine_from_config, str2bool | |
|
20 | 23 | from rhodecode.model import init_model |
|
21 | 24 | from rhodecode.model.scm import ScmModel |
|
22 | 25 | |
@@ -24,17 +27,20 b' log = logging.getLogger(__name__)' | |||
|
24 | 27 | |
|
25 | 28 | |
|
26 | 29 | def load_environment(global_conf, app_conf, initial=False): |
|
27 | """Configure the Pylons environment via the ``pylons.config`` | |
|
30 | """ | |
|
31 | Configure the Pylons environment via the ``pylons.config`` | |
|
28 | 32 | object |
|
29 | 33 | """ |
|
30 | 34 | config = PylonsConfig() |
|
31 | 35 | |
|
32 | 36 | # Pylons paths |
|
33 | 37 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|
34 |
paths = dict( |
|
|
35 | controllers=os.path.join(root, 'controllers'), | |
|
36 |
|
|
|
37 |
|
|
|
38 | paths = dict( | |
|
39 | root=root, | |
|
40 | controllers=os.path.join(root, 'controllers'), | |
|
41 | static_files=os.path.join(root, 'public'), | |
|
42 | templates=[os.path.join(root, 'templates')] | |
|
43 | ) | |
|
38 | 44 | |
|
39 | 45 | # Initialize config with the basic options |
|
40 | 46 | config.init_app(global_conf, app_conf, package='rhodecode', paths=paths) |
@@ -44,8 +50,11 b' def load_environment(global_conf, app_co' | |||
|
44 | 50 | |
|
45 | 51 | config['routes.map'] = make_map(config) |
|
46 | 52 | config['pylons.app_globals'] = app_globals.Globals(config) |
|
47 |
config['pylons.h'] = |
|
|
53 | config['pylons.h'] = helpers | |
|
48 | 54 | rhodecode.CONFIG = config |
|
55 | ||
|
56 | load_rcextensions(root_path=config['here']) | |
|
57 | ||
|
49 | 58 | # Setup cache object as early as possible |
|
50 | 59 | import pylons |
|
51 | 60 | pylons.cache._push_object(config['pylons.app_globals'].cache) |
@@ -284,7 +284,6 b' class ReposController(BaseController):' | |||
|
284 | 284 | |
|
285 | 285 | :param repo_name: |
|
286 | 286 | """ |
|
287 | ||
|
288 | 287 | try: |
|
289 | 288 | RepoModel().revoke_user_permission(repo=repo_name, |
|
290 | 289 | user=request.POST['user_id']) |
@@ -145,11 +145,12 b' class UsersController(BaseController):' | |||
|
145 | 145 | user_model = UserModel() |
|
146 | 146 | try: |
|
147 | 147 | user_model.delete(id) |
|
148 | Session.commit() | |
|
148 | 149 | h.flash(_('successfully deleted user'), category='success') |
|
149 | Session.commit() | |
|
150 | 150 | except (UserOwnsReposException, DefaultUserException), e: |
|
151 |
h.flash( |
|
|
151 | h.flash(e, category='warning') | |
|
152 | 152 | except Exception: |
|
153 | log.error(traceback.format_exc()) | |
|
153 | 154 | h.flash(_('An error occurred during deletion of user'), |
|
154 | 155 | category='error') |
|
155 | 156 | return redirect(url('users')) |
@@ -32,8 +32,9 b' from pylons import request, session, tmp' | |||
|
32 | 32 | from pylons.controllers.util import abort, redirect |
|
33 | 33 | from pylons.i18n.translation import _ |
|
34 | 34 | |
|
35 | from rhodecode.lib import helpers as h | |
|
35 | 36 | from rhodecode.lib.exceptions import UsersGroupsAssignedException |
|
36 |
from rhodecode.lib import |
|
|
37 | from rhodecode.lib.utils2 import safe_unicode | |
|
37 | 38 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
38 | 39 | from rhodecode.lib.base import BaseController, render |
|
39 | 40 |
@@ -233,10 +233,10 b' class JSONRPCController(WSGIController):' | |||
|
233 | 233 | try: |
|
234 | 234 | return json.dumps(response) |
|
235 | 235 | except TypeError, e: |
|
236 |
log. |
|
|
236 | log.error('API FAILED. Error encoding response: %s' % e) | |
|
237 | 237 | return json.dumps( |
|
238 | 238 | dict( |
|
239 | self._req_id, | |
|
239 | id=self._req_id, | |
|
240 | 240 | result=None, |
|
241 | 241 | error="Error encoding response" |
|
242 | 242 | ) |
@@ -30,16 +30,15 b' import logging' | |||
|
30 | 30 | |
|
31 | 31 | from rhodecode.controllers.api import JSONRPCController, JSONRPCError |
|
32 | 32 | from rhodecode.lib.auth import HasPermissionAllDecorator, \ |
|
33 | HasPermissionAnyDecorator, PasswordGenerator | |
|
33 | HasPermissionAnyDecorator, PasswordGenerator, AuthUser | |
|
34 | 34 | |
|
35 | 35 | from rhodecode.model.meta import Session |
|
36 | 36 | from rhodecode.model.scm import ScmModel |
|
37 |
from rhodecode.model.db import User, UsersGroup, |
|
|
37 | from rhodecode.model.db import User, UsersGroup, Repository | |
|
38 | 38 | from rhodecode.model.repo import RepoModel |
|
39 | 39 | from rhodecode.model.user import UserModel |
|
40 | 40 | from rhodecode.model.users_group import UsersGroupModel |
|
41 |
from rhodecode. |
|
|
42 | ||
|
41 | from rhodecode.lib.utils import map_groups | |
|
43 | 42 | |
|
44 | 43 | log = logging.getLogger(__name__) |
|
45 | 44 | |
@@ -100,7 +99,9 b' class ApiController(JSONRPCController):' | |||
|
100 | 99 | email=user.email, |
|
101 | 100 | active=user.active, |
|
102 | 101 | admin=user.admin, |
|
103 | ldap_dn=user.ldap_dn | |
|
102 | ldap_dn=user.ldap_dn, | |
|
103 | last_login=user.last_login, | |
|
104 | permissions=AuthUser(user_id=user.user_id).permissions | |
|
104 | 105 | ) |
|
105 | 106 | |
|
106 | 107 | @HasPermissionAllDecorator('hg.admin') |
@@ -122,7 +123,8 b' class ApiController(JSONRPCController):' | |||
|
122 | 123 | email=user.email, |
|
123 | 124 | active=user.active, |
|
124 | 125 | admin=user.admin, |
|
125 | ldap_dn=user.ldap_dn | |
|
126 | ldap_dn=user.ldap_dn, | |
|
127 | last_login=user.last_login, | |
|
126 | 128 | ) |
|
127 | 129 | ) |
|
128 | 130 | return result |
@@ -282,7 +284,7 b' class ApiController(JSONRPCController):' | |||
|
282 | 284 | @HasPermissionAllDecorator('hg.admin') |
|
283 | 285 | def add_user_to_users_group(self, apiuser, group_name, username): |
|
284 | 286 | """" |
|
285 | Add a user to a group | |
|
287 | Add a user to a users group | |
|
286 | 288 | |
|
287 | 289 | :param apiuser: |
|
288 | 290 | :param group_name: |
@@ -360,7 +362,7 b' class ApiController(JSONRPCController):' | |||
|
360 | 362 | user = user.user |
|
361 | 363 | members.append( |
|
362 | 364 | dict( |
|
363 |
type |
|
|
365 | type="user", | |
|
364 | 366 | id=user.user_id, |
|
365 | 367 | username=user.username, |
|
366 | 368 | firstname=user.name, |
@@ -377,7 +379,7 b' class ApiController(JSONRPCController):' | |||
|
377 | 379 | users_group = users_group.users_group |
|
378 | 380 | members.append( |
|
379 | 381 | dict( |
|
380 |
type |
|
|
382 | type="users_group", | |
|
381 | 383 | id=users_group.users_group_id, |
|
382 | 384 | name=users_group.users_group_name, |
|
383 | 385 | active=users_group.users_group_active, |
@@ -464,15 +466,10 b' class ApiController(JSONRPCController):' | |||
|
464 | 466 | if Repository.get_by_repo_name(repo_name): |
|
465 | 467 | raise JSONRPCError("repo %s already exist" % repo_name) |
|
466 | 468 | |
|
467 |
groups = repo_name.split( |
|
|
469 | groups = repo_name.split(Repository.url_sep()) | |
|
468 | 470 | real_name = groups[-1] |
|
469 | groups = groups[:-1] | |
|
470 | parent_id = None | |
|
471 | for g in groups: | |
|
472 | group = RepoGroup.get_by_group_name(g) | |
|
473 | if not group: | |
|
474 | group = ReposGroupModel().create(g, '', parent_id) | |
|
475 | parent_id = group.group_id | |
|
471 | # create structure of groups | |
|
472 | group = map_groups(repo_name) | |
|
476 | 473 | |
|
477 | 474 | repo = RepoModel().create( |
|
478 | 475 | dict( |
@@ -481,7 +478,7 b' class ApiController(JSONRPCController):' | |||
|
481 | 478 | description=description, |
|
482 | 479 | private=private, |
|
483 | 480 | repo_type=repo_type, |
|
484 |
repo_group= |
|
|
481 | repo_group=group.group_id if group else None, | |
|
485 | 482 | clone_uri=clone_uri |
|
486 | 483 | ), |
|
487 | 484 | owner |
@@ -31,7 +31,7 b' import binascii' | |||
|
31 | 31 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
32 | 32 | from rhodecode.lib.base import BaseRepoController, render |
|
33 | 33 | from rhodecode.lib.compat import OrderedDict |
|
34 | from rhodecode.lib import safe_unicode | |
|
34 | from rhodecode.lib.utils2 import safe_unicode | |
|
35 | 35 | log = logging.getLogger(__name__) |
|
36 | 36 | |
|
37 | 37 |
@@ -51,13 +51,18 b' from rhodecode.lib.diffs import wrapped_' | |||
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | def anchor_url(revision, path): | |
|
54 | def _update_with_GET(params, GET): | |
|
55 | for k in ['diff1', 'diff2', 'diff']: | |
|
56 | params[k] += GET.getall(k) | |
|
57 | ||
|
58 | ||
|
59 | def anchor_url(revision, path, GET): | |
|
55 | 60 | fid = h.FID(revision, path) |
|
56 |
return h.url.current(anchor=fid, **dict( |
|
|
61 | return h.url.current(anchor=fid, **dict(GET)) | |
|
57 | 62 | |
|
58 | 63 | |
|
59 | 64 | def get_ignore_ws(fid, GET): |
|
60 |
ig_ws_global = |
|
|
65 | ig_ws_global = GET.get('ignorews') | |
|
61 | 66 | ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid)) |
|
62 | 67 | if ig_ws: |
|
63 | 68 | try: |
@@ -67,12 +72,13 b' def get_ignore_ws(fid, GET):' | |||
|
67 | 72 | return ig_ws_global |
|
68 | 73 | |
|
69 | 74 | |
|
70 | def _ignorews_url(fileid=None): | |
|
71 | ||
|
75 | def _ignorews_url(GET, fileid=None): | |
|
76 | fileid = str(fileid) if fileid else None | |
|
72 | 77 | params = defaultdict(list) |
|
78 | _update_with_GET(params, GET) | |
|
73 | 79 | lbl = _('show white space') |
|
74 |
ig_ws = get_ignore_ws(fileid, |
|
|
75 |
ln_ctx = get_line_ctx(fileid, |
|
|
80 | ig_ws = get_ignore_ws(fileid, GET) | |
|
81 | ln_ctx = get_line_ctx(fileid, GET) | |
|
76 | 82 | # global option |
|
77 | 83 | if fileid is None: |
|
78 | 84 | if ig_ws is None: |
@@ -98,7 +104,7 b' def _ignorews_url(fileid=None):' | |||
|
98 | 104 | |
|
99 | 105 | |
|
100 | 106 | def get_line_ctx(fid, GET): |
|
101 |
ln_ctx_global = |
|
|
107 | ln_ctx_global = GET.get('context') | |
|
102 | 108 | ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid)) |
|
103 | 109 | |
|
104 | 110 | if ln_ctx: |
@@ -112,16 +118,19 b' def get_line_ctx(fid, GET):' | |||
|
112 | 118 | return |
|
113 | 119 | |
|
114 | 120 | |
|
115 | def _context_url(fileid=None): | |
|
121 | def _context_url(GET, fileid=None): | |
|
116 | 122 | """ |
|
117 | 123 | Generates url for context lines |
|
118 | 124 | |
|
119 | 125 | :param fileid: |
|
120 | 126 | """ |
|
121 | ig_ws = get_ignore_ws(fileid, request.GET) | |
|
122 | ln_ctx = (get_line_ctx(fileid, request.GET) or 3) * 2 | |
|
127 | ||
|
128 | fileid = str(fileid) if fileid else None | |
|
129 | ig_ws = get_ignore_ws(fileid, GET) | |
|
130 | ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2 | |
|
123 | 131 | |
|
124 | 132 | params = defaultdict(list) |
|
133 | _update_with_GET(params, GET) | |
|
125 | 134 | |
|
126 | 135 | # global option |
|
127 | 136 | if fileid is None: |
@@ -162,7 +171,7 b' class ChangesetController(BaseRepoContro' | |||
|
162 | 171 | c.anchor_url = anchor_url |
|
163 | 172 | c.ignorews_url = _ignorews_url |
|
164 | 173 | c.context_url = _context_url |
|
165 | ||
|
174 | limit_off = request.GET.get('fulldiff') | |
|
166 | 175 | #get ranges of revisions if preset |
|
167 | 176 | rev_range = revision.split('...')[:2] |
|
168 | 177 | enable_comments = True |
@@ -220,7 +229,7 b' class ChangesetController(BaseRepoContro' | |||
|
220 | 229 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
221 | 230 | lim = self.cut_off_limit |
|
222 | 231 | if cumulative_diff > self.cut_off_limit: |
|
223 | lim = -1 | |
|
232 | lim = -1 if limit_off is None else None | |
|
224 | 233 | size, cs1, cs2, diff, st = wrapped_diff( |
|
225 | 234 | filenode_old=None, |
|
226 | 235 | filenode_new=node, |
@@ -251,7 +260,7 b' class ChangesetController(BaseRepoContro' | |||
|
251 | 260 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
252 | 261 | lim = self.cut_off_limit |
|
253 | 262 | if cumulative_diff > self.cut_off_limit: |
|
254 | lim = -1 | |
|
263 | lim = -1 if limit_off is None else None | |
|
255 | 264 | size, cs1, cs2, diff, st = wrapped_diff( |
|
256 | 265 | filenode_old=filenode_old, |
|
257 | 266 | filenode_new=node, |
@@ -28,7 +28,7 b' import logging' | |||
|
28 | 28 | from pylons import url, response, tmpl_context as c |
|
29 | 29 | from pylons.i18n.translation import _ |
|
30 | 30 | |
|
31 | from rhodecode.lib import safe_unicode | |
|
31 | from rhodecode.lib.utils2 import safe_unicode | |
|
32 | 32 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
33 | 33 | from rhodecode.lib.base import BaseRepoController |
|
34 | 34 |
@@ -32,24 +32,26 b' from pylons.i18n.translation import _' | |||
|
32 | 32 | from pylons.controllers.util import redirect |
|
33 | 33 | from pylons.decorators import jsonify |
|
34 | 34 | |
|
35 |
from rhodecode.lib |
|
|
36 | from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError, \ | |
|
37 | EmptyRepositoryError, ImproperArchiveTypeError, VCSError, \ | |
|
38 | NodeAlreadyExistsError | |
|
39 | from rhodecode.lib.vcs.nodes import FileNode | |
|
35 | from rhodecode.lib import diffs | |
|
36 | from rhodecode.lib import helpers as h | |
|
40 | 37 | |
|
41 | 38 | from rhodecode.lib.compat import OrderedDict |
|
42 | from rhodecode.lib import convert_line_endings, detect_mode, safe_str | |
|
39 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str | |
|
43 | 40 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
44 | 41 | from rhodecode.lib.base import BaseRepoController, render |
|
45 | 42 | from rhodecode.lib.utils import EmptyChangeset |
|
46 |
from rhodecode.lib import |
|
|
47 | import rhodecode.lib.helpers as h | |
|
43 | from rhodecode.lib.vcs.conf import settings | |
|
44 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ | |
|
45 | ChangesetDoesNotExistError, EmptyRepositoryError, \ | |
|
46 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError | |
|
47 | from rhodecode.lib.vcs.nodes import FileNode | |
|
48 | ||
|
48 | 49 | from rhodecode.model.repo import RepoModel |
|
50 | from rhodecode.model.scm import ScmModel | |
|
51 | ||
|
49 | 52 | from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\ |
|
50 | 53 | _context_url, get_line_ctx, get_ignore_ws |
|
51 | from rhodecode.lib.diffs import wrapped_diff | |
|
52 | from rhodecode.model.scm import ScmModel | |
|
54 | ||
|
53 | 55 | |
|
54 | 56 | log = logging.getLogger(__name__) |
|
55 | 57 | |
@@ -447,7 +449,7 b' class FilesController(BaseRepoController' | |||
|
447 | 449 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
448 | 450 | |
|
449 | 451 | lim = request.GET.get('fulldiff') or self.cut_off_limit |
|
450 | _, cs1, cs2, diff, st = wrapped_diff(filenode_old=node1, | |
|
452 | _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1, | |
|
451 | 453 | filenode_new=node2, |
|
452 | 454 | cut_off_limit=lim, |
|
453 | 455 | ignore_whitespace=ign_whitespace_lcl, |
@@ -44,7 +44,7 b' class HomeController(BaseController):' | |||
|
44 | 44 | def index(self): |
|
45 | 45 | c.repos_list = self.scm_model.get_repos() |
|
46 | 46 | c.groups = self.scm_model.get_repos_groups() |
|
47 | ||
|
47 | c.group = None | |
|
48 | 48 | return render('/index.html') |
|
49 | 49 | |
|
50 | 50 | def repo_switcher(self): |
@@ -26,6 +26,7 b'' | |||
|
26 | 26 | import traceback |
|
27 | 27 | import calendar |
|
28 | 28 | import logging |
|
29 | import urllib | |
|
29 | 30 | from time import mktime |
|
30 | 31 | from datetime import timedelta, date |
|
31 | 32 | from urlparse import urlparse |
@@ -39,15 +40,15 b' from pylons.i18n.translation import _' | |||
|
39 | 40 | |
|
40 | 41 | from beaker.cache import cache_region, region_invalidate |
|
41 | 42 | |
|
43 | from rhodecode.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP | |
|
42 | 44 | from rhodecode.model.db import Statistics, CacheInvalidation |
|
43 | from rhodecode.lib import ALL_READMES, ALL_EXTS | |
|
45 | from rhodecode.lib.utils2 import safe_unicode | |
|
44 | 46 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
45 | 47 | from rhodecode.lib.base import BaseRepoController, render |
|
46 | 48 | from rhodecode.lib.utils import EmptyChangeset |
|
47 | 49 | from rhodecode.lib.markup_renderer import MarkupRenderer |
|
48 | 50 | from rhodecode.lib.celerylib import run_task |
|
49 |
from rhodecode.lib.celerylib.tasks import get_commits_stats |
|
|
50 | LANGUAGES_EXTENSIONS_MAP | |
|
51 | from rhodecode.lib.celerylib.tasks import get_commits_stats | |
|
51 | 52 | from rhodecode.lib.helpers import RepoPage |
|
52 | 53 | from rhodecode.lib.compat import json, OrderedDict |
|
53 | 54 | |
@@ -91,34 +92,37 b' class SummaryController(BaseRepoControll' | |||
|
91 | 92 | |
|
92 | 93 | uri_tmpl = config.get('clone_uri', default_clone_uri) |
|
93 | 94 | uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s') |
|
94 | ||
|
95 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path)) | |
|
95 | 96 | uri_dict = { |
|
96 | 97 | 'user': username, |
|
97 | 98 | 'pass': password, |
|
98 | 99 | 'scheme': parsed_url.scheme, |
|
99 | 100 | 'netloc': parsed_url.netloc, |
|
100 |
'path': |
|
|
101 | 'path': decoded_path | |
|
101 | 102 | } |
|
103 | ||
|
102 | 104 | uri = uri_tmpl % uri_dict |
|
103 | 105 | # generate another clone url by id |
|
104 | uri_dict.update({'path': '/_%s' % c.dbrepo.repo_id}) | |
|
106 | uri_dict.update( | |
|
107 | {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)} | |
|
108 | ) | |
|
105 | 109 | uri_id = uri_tmpl % uri_dict |
|
106 | 110 | |
|
107 | 111 | c.clone_repo_url = uri |
|
108 | 112 | c.clone_repo_url_id = uri_id |
|
109 | 113 | c.repo_tags = OrderedDict() |
|
110 | for name, hash in c.rhodecode_repo.tags.items()[:10]: | |
|
114 | for name, hash_ in c.rhodecode_repo.tags.items()[:10]: | |
|
111 | 115 | try: |
|
112 | c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash) | |
|
116 | c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_) | |
|
113 | 117 | except ChangesetError: |
|
114 | c.repo_tags[name] = EmptyChangeset(hash) | |
|
118 | c.repo_tags[name] = EmptyChangeset(hash_) | |
|
115 | 119 | |
|
116 | 120 | c.repo_branches = OrderedDict() |
|
117 | for name, hash in c.rhodecode_repo.branches.items()[:10]: | |
|
121 | for name, hash_ in c.rhodecode_repo.branches.items()[:10]: | |
|
118 | 122 | try: |
|
119 | c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash) | |
|
123 | c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_) | |
|
120 | 124 | except ChangesetError: |
|
121 | c.repo_branches[name] = EmptyChangeset(hash) | |
|
125 | c.repo_branches[name] = EmptyChangeset(hash_) | |
|
122 | 126 | |
|
123 | 127 | td = date.today() + timedelta(days=1) |
|
124 | 128 | td_1m = td - timedelta(days=calendar.mdays[td.month]) |
@@ -175,7 +179,7 b' class SummaryController(BaseRepoControll' | |||
|
175 | 179 | if c.enable_downloads: |
|
176 | 180 | c.download_options = self._get_download_links(c.rhodecode_repo) |
|
177 | 181 | |
|
178 | c.readme_data, c.readme_file = self.__get_readme_data(c.rhodecode_repo) | |
|
182 | c.readme_data, c.readme_file = self.__get_readme_data(c.rhodecode_db_repo) | |
|
179 | 183 | return render('summary/summary.html') |
|
180 | 184 | |
|
181 | 185 | def __get_readme_data(self, repo): |
@@ -206,7 +210,7 b' class SummaryController(BaseRepoControll' | |||
|
206 | 210 | |
|
207 | 211 | return readme_data, readme_file |
|
208 | 212 | |
|
209 | key = repo.name + '_README' | |
|
213 | key = repo.repo_name + '_README' | |
|
210 | 214 | inv = CacheInvalidation.invalidate(key) |
|
211 | 215 | if inv is not None: |
|
212 | 216 | region_invalidate(_get_readme_from_cache, None, key) |
@@ -1,432 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | """ | |
|
3 | rhodecode.lib.__init__ | |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~ | |
|
5 | ||
|
6 | Some simple helper functions | |
|
7 | ||
|
8 | :created_on: Jan 5, 2011 | |
|
9 | :author: marcink | |
|
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> | |
|
11 | :license: GPLv3, see COPYING for more details. | |
|
12 | """ | |
|
13 | # This program is free software: you can redistribute it and/or modify | |
|
14 | # it under the terms of the GNU General Public License as published by | |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
|
16 | # (at your option) any later version. | |
|
17 | # | |
|
18 | # This program is distributed in the hope that it will be useful, | |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
21 | # GNU General Public License for more details. | |
|
22 | # | |
|
23 | # You should have received a copy of the GNU General Public License | |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
25 | ||
|
26 | 1 |
|
|
27 | import re | |
|
28 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
|
29 | ||
|
30 | ||
|
31 | def __get_lem(): | |
|
32 | from pygments import lexers | |
|
33 | from string import lower | |
|
34 | from collections import defaultdict | |
|
35 | ||
|
36 | d = defaultdict(lambda: []) | |
|
37 | ||
|
38 | def __clean(s): | |
|
39 | s = s.lstrip('*') | |
|
40 | s = s.lstrip('.') | |
|
41 | ||
|
42 | if s.find('[') != -1: | |
|
43 | exts = [] | |
|
44 | start, stop = s.find('['), s.find(']') | |
|
45 | ||
|
46 | for suffix in s[start + 1:stop]: | |
|
47 | exts.append(s[:s.find('[')] + suffix) | |
|
48 | return map(lower, exts) | |
|
49 | else: | |
|
50 | return map(lower, [s]) | |
|
51 | ||
|
52 | for lx, t in sorted(lexers.LEXERS.items()): | |
|
53 | m = map(__clean, t[-2]) | |
|
54 | if m: | |
|
55 | m = reduce(lambda x, y: x + y, m) | |
|
56 | for ext in m: | |
|
57 | desc = lx.replace('Lexer', '') | |
|
58 | d[ext].append(desc) | |
|
59 | ||
|
60 | return dict(d) | |
|
61 | ||
|
62 | # language map is also used by whoosh indexer, which for those specified | |
|
63 | # extensions will index it's content | |
|
64 | LANGUAGES_EXTENSIONS_MAP = __get_lem() | |
|
65 | ||
|
66 | # Additional mappings that are not present in the pygments lexers | |
|
67 | # NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP | |
|
68 | ADDITIONAL_MAPPINGS = {'xaml': 'XAML'} | |
|
69 | ||
|
70 | LANGUAGES_EXTENSIONS_MAP.update(ADDITIONAL_MAPPINGS) | |
|
71 | ||
|
72 | # list of readme files to search in file tree and display in summary | |
|
73 | # attached weights defines the search order lower is first | |
|
74 | ALL_READMES = [ | |
|
75 | ('readme', 0), ('README', 0), ('Readme', 0), | |
|
76 | ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1), | |
|
77 | ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2), | |
|
78 | ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2), | |
|
79 | ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2), | |
|
80 | ] | |
|
81 | ||
|
82 | # extension together with weights to search lower is first | |
|
83 | RST_EXTS = [ | |
|
84 | ('', 0), ('.rst', 1), ('.rest', 1), | |
|
85 | ('.RST', 2), ('.REST', 2), | |
|
86 | ('.txt', 3), ('.TXT', 3) | |
|
87 | ] | |
|
88 | ||
|
89 | MARKDOWN_EXTS = [ | |
|
90 | ('.md', 1), ('.MD', 1), | |
|
91 | ('.mkdn', 2), ('.MKDN', 2), | |
|
92 | ('.mdown', 3), ('.MDOWN', 3), | |
|
93 | ('.markdown', 4), ('.MARKDOWN', 4) | |
|
94 | ] | |
|
95 | ||
|
96 | PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)] | |
|
97 | ||
|
98 | ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS | |
|
99 | ||
|
100 | ||
|
101 | def str2bool(_str): | |
|
102 | """ | |
|
103 | returs True/False value from given string, it tries to translate the | |
|
104 | string into boolean | |
|
105 | ||
|
106 | :param _str: string value to translate into boolean | |
|
107 | :rtype: boolean | |
|
108 | :returns: boolean from given string | |
|
109 | """ | |
|
110 | if _str is None: | |
|
111 | return False | |
|
112 | if _str in (True, False): | |
|
113 | return _str | |
|
114 | _str = str(_str).strip().lower() | |
|
115 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') | |
|
116 | ||
|
117 | ||
|
118 | def convert_line_endings(line, mode): | |
|
119 | """ | |
|
120 | Converts a given line "line end" accordingly to given mode | |
|
121 | ||
|
122 | Available modes are:: | |
|
123 | 0 - Unix | |
|
124 | 1 - Mac | |
|
125 | 2 - DOS | |
|
126 | ||
|
127 | :param line: given line to convert | |
|
128 | :param mode: mode to convert to | |
|
129 | :rtype: str | |
|
130 | :return: converted line according to mode | |
|
131 | """ | |
|
132 | from string import replace | |
|
133 | ||
|
134 | if mode == 0: | |
|
135 | line = replace(line, '\r\n', '\n') | |
|
136 | line = replace(line, '\r', '\n') | |
|
137 | elif mode == 1: | |
|
138 | line = replace(line, '\r\n', '\r') | |
|
139 | line = replace(line, '\n', '\r') | |
|
140 | elif mode == 2: | |
|
141 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) | |
|
142 | return line | |
|
143 | ||
|
144 | ||
|
145 | def detect_mode(line, default): | |
|
146 | """ | |
|
147 | Detects line break for given line, if line break couldn't be found | |
|
148 | given default value is returned | |
|
149 | ||
|
150 | :param line: str line | |
|
151 | :param default: default | |
|
152 | :rtype: int | |
|
153 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
|
154 | """ | |
|
155 | if line.endswith('\r\n'): | |
|
156 | return 2 | |
|
157 | elif line.endswith('\n'): | |
|
158 | return 0 | |
|
159 | elif line.endswith('\r'): | |
|
160 | return 1 | |
|
161 | else: | |
|
162 | return default | |
|
163 | ||
|
164 | ||
|
165 | def generate_api_key(username, salt=None): | |
|
166 | """ | |
|
167 | Generates unique API key for given username, if salt is not given | |
|
168 | it'll be generated from some random string | |
|
169 | ||
|
170 | :param username: username as string | |
|
171 | :param salt: salt to hash generate KEY | |
|
172 | :rtype: str | |
|
173 | :returns: sha1 hash from username+salt | |
|
174 | """ | |
|
175 | from tempfile import _RandomNameSequence | |
|
176 | import hashlib | |
|
177 | ||
|
178 | if salt is None: | |
|
179 | salt = _RandomNameSequence().next() | |
|
180 | ||
|
181 | return hashlib.sha1(username + salt).hexdigest() | |
|
182 | ||
|
183 | ||
|
184 | def safe_unicode(str_, from_encoding=None): | |
|
185 | """ | |
|
186 | safe unicode function. Does few trick to turn str_ into unicode | |
|
187 | ||
|
188 | In case of UnicodeDecode error we try to return it with encoding detected | |
|
189 | by chardet library if it fails fallback to unicode with errors replaced | |
|
190 | ||
|
191 | :param str_: string to decode | |
|
192 | :rtype: unicode | |
|
193 | :returns: unicode object | |
|
194 | """ | |
|
195 | if isinstance(str_, unicode): | |
|
196 | return str_ | |
|
197 | ||
|
198 | if not from_encoding: | |
|
199 | import rhodecode | |
|
200 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') | |
|
201 | from_encoding = DEFAULT_ENCODING | |
|
202 | ||
|
203 | try: | |
|
204 | return unicode(str_) | |
|
205 | except UnicodeDecodeError: | |
|
206 | pass | |
|
207 | ||
|
208 | try: | |
|
209 | return unicode(str_, from_encoding) | |
|
210 | except UnicodeDecodeError: | |
|
211 | pass | |
|
212 | ||
|
213 | try: | |
|
214 | import chardet | |
|
215 | encoding = chardet.detect(str_)['encoding'] | |
|
216 | if encoding is None: | |
|
217 | raise Exception() | |
|
218 | return str_.decode(encoding) | |
|
219 | except (ImportError, UnicodeDecodeError, Exception): | |
|
220 | return unicode(str_, from_encoding, 'replace') | |
|
221 | ||
|
222 | ||
|
223 | def safe_str(unicode_, to_encoding=None): | |
|
224 | """ | |
|
225 | safe str function. Does few trick to turn unicode_ into string | |
|
226 | ||
|
227 | In case of UnicodeEncodeError we try to return it with encoding detected | |
|
228 | by chardet library if it fails fallback to string with errors replaced | |
|
229 | ||
|
230 | :param unicode_: unicode to encode | |
|
231 | :rtype: str | |
|
232 | :returns: str object | |
|
233 | """ | |
|
234 | ||
|
235 | # if it's not basestr cast to str | |
|
236 | if not isinstance(unicode_, basestring): | |
|
237 | return str(unicode_) | |
|
238 | ||
|
239 | if isinstance(unicode_, str): | |
|
240 | return unicode_ | |
|
241 | ||
|
242 | if not to_encoding: | |
|
243 | import rhodecode | |
|
244 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') | |
|
245 | to_encoding = DEFAULT_ENCODING | |
|
246 | ||
|
247 | try: | |
|
248 | return unicode_.encode(to_encoding) | |
|
249 | except UnicodeEncodeError: | |
|
250 | pass | |
|
251 | ||
|
252 | try: | |
|
253 | import chardet | |
|
254 | encoding = chardet.detect(unicode_)['encoding'] | |
|
255 | print encoding | |
|
256 | if encoding is None: | |
|
257 | raise UnicodeEncodeError() | |
|
258 | ||
|
259 | return unicode_.encode(encoding) | |
|
260 | except (ImportError, UnicodeEncodeError): | |
|
261 | return unicode_.encode(to_encoding, 'replace') | |
|
262 | ||
|
263 | return safe_str | |
|
264 | ||
|
265 | ||
|
266 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): | |
|
267 | """ | |
|
268 | Custom engine_from_config functions that makes sure we use NullPool for | |
|
269 | file based sqlite databases. This prevents errors on sqlite. This only | |
|
270 | applies to sqlalchemy versions < 0.7.0 | |
|
271 | ||
|
272 | """ | |
|
273 | import sqlalchemy | |
|
274 | from sqlalchemy import engine_from_config as efc | |
|
275 | import logging | |
|
276 | ||
|
277 | if int(sqlalchemy.__version__.split('.')[1]) < 7: | |
|
278 | ||
|
279 | # This solution should work for sqlalchemy < 0.7.0, and should use | |
|
280 | # proxy=TimerProxy() for execution time profiling | |
|
281 | ||
|
282 | from sqlalchemy.pool import NullPool | |
|
283 | url = configuration[prefix + 'url'] | |
|
284 | ||
|
285 | if url.startswith('sqlite'): | |
|
286 | kwargs.update({'poolclass': NullPool}) | |
|
287 | return efc(configuration, prefix, **kwargs) | |
|
288 | else: | |
|
289 | import time | |
|
290 | from sqlalchemy import event | |
|
291 | from sqlalchemy.engine import Engine | |
|
292 | ||
|
293 | log = logging.getLogger('sqlalchemy.engine') | |
|
294 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38) | |
|
295 | engine = efc(configuration, prefix, **kwargs) | |
|
296 | ||
|
297 | def color_sql(sql): | |
|
298 | COLOR_SEQ = "\033[1;%dm" | |
|
299 | COLOR_SQL = YELLOW | |
|
300 | normal = '\x1b[0m' | |
|
301 | return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal]) | |
|
302 | ||
|
303 | if configuration['debug']: | |
|
304 | #attach events only for debug configuration | |
|
305 | ||
|
306 | def before_cursor_execute(conn, cursor, statement, | |
|
307 | parameters, context, executemany): | |
|
308 | context._query_start_time = time.time() | |
|
309 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) | |
|
310 | ||
|
311 | ||
|
312 | def after_cursor_execute(conn, cursor, statement, | |
|
313 | parameters, context, executemany): | |
|
314 | total = time.time() - context._query_start_time | |
|
315 | log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total)) | |
|
316 | ||
|
317 | event.listen(engine, "before_cursor_execute", | |
|
318 | before_cursor_execute) | |
|
319 | event.listen(engine, "after_cursor_execute", | |
|
320 | after_cursor_execute) | |
|
321 | ||
|
322 | return engine | |
|
323 | ||
|
324 | ||
|
325 | def age(curdate): | |
|
326 | """ | |
|
327 | turns a datetime into an age string. | |
|
328 | ||
|
329 | :param curdate: datetime object | |
|
330 | :rtype: unicode | |
|
331 | :returns: unicode words describing age | |
|
332 | """ | |
|
333 | ||
|
334 | from datetime import datetime | |
|
335 | from webhelpers.date import time_ago_in_words | |
|
336 | ||
|
337 | _ = lambda s: s | |
|
338 | ||
|
339 | if not curdate: | |
|
340 | return '' | |
|
341 | ||
|
342 | agescales = [(_(u"year"), 3600 * 24 * 365), | |
|
343 | (_(u"month"), 3600 * 24 * 30), | |
|
344 | (_(u"day"), 3600 * 24), | |
|
345 | (_(u"hour"), 3600), | |
|
346 | (_(u"minute"), 60), | |
|
347 | (_(u"second"), 1), ] | |
|
348 | ||
|
349 | age = datetime.now() - curdate | |
|
350 | age_seconds = (age.days * agescales[2][1]) + age.seconds | |
|
351 | pos = 1 | |
|
352 | for scale in agescales: | |
|
353 | if scale[1] <= age_seconds: | |
|
354 | if pos == 6: | |
|
355 | pos = 5 | |
|
356 | return '%s %s' % (time_ago_in_words(curdate, | |
|
357 | agescales[pos][0]), _('ago')) | |
|
358 | pos += 1 | |
|
359 | ||
|
360 | return _(u'just now') | |
|
361 | ||
|
362 | ||
|
363 | def uri_filter(uri): | |
|
364 | """ | |
|
365 | Removes user:password from given url string | |
|
366 | ||
|
367 | :param uri: | |
|
368 | :rtype: unicode | |
|
369 | :returns: filtered list of strings | |
|
370 | """ | |
|
371 | if not uri: | |
|
372 | return '' | |
|
373 | ||
|
374 | proto = '' | |
|
375 | ||
|
376 | for pat in ('https://', 'http://'): | |
|
377 | if uri.startswith(pat): | |
|
378 | uri = uri[len(pat):] | |
|
379 | proto = pat | |
|
380 | break | |
|
381 | ||
|
382 | # remove passwords and username | |
|
383 | uri = uri[uri.find('@') + 1:] | |
|
384 | ||
|
385 | # get the port | |
|
386 | cred_pos = uri.find(':') | |
|
387 | if cred_pos == -1: | |
|
388 | host, port = uri, None | |
|
389 | else: | |
|
390 | host, port = uri[:cred_pos], uri[cred_pos + 1:] | |
|
391 | ||
|
392 | return filter(None, [proto, host, port]) | |
|
393 | ||
|
394 | ||
|
395 | def credentials_filter(uri): | |
|
396 | """ | |
|
397 | Returns a url with removed credentials | |
|
398 | ||
|
399 | :param uri: | |
|
400 | """ | |
|
401 | ||
|
402 | uri = uri_filter(uri) | |
|
403 | #check if we have port | |
|
404 | if len(uri) > 2 and uri[2]: | |
|
405 | uri[2] = ':' + uri[2] | |
|
406 | ||
|
407 | return ''.join(uri) | |
|
408 | ||
|
409 | ||
|
410 | def get_changeset_safe(repo, rev): | |
|
411 | """ | |
|
412 | Safe version of get_changeset if this changeset doesn't exists for a | |
|
413 | repo it returns a Dummy one instead | |
|
414 | ||
|
415 | :param repo: | |
|
416 | :param rev: | |
|
417 | """ | |
|
418 | from rhodecode.lib.vcs.backends.base import BaseRepository | |
|
419 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
|
420 | if not isinstance(repo, BaseRepository): | |
|
421 | raise Exception('You must pass an Repository ' | |
|
422 | 'object as first argument got %s', type(repo)) | |
|
423 | ||
|
424 | try: | |
|
425 | cs = repo.get_changeset(rev) | |
|
426 | except RepositoryError: | |
|
427 | from rhodecode.lib.utils import EmptyChangeset | |
|
428 | cs = EmptyChangeset(requested_revision=rev) | |
|
429 | return cs | |
|
430 | 2 | |
|
431 | 3 | |
|
432 | 4 | def get_current_revision(quiet=False): |
@@ -450,16 +22,3 b' def get_current_revision(quiet=False):' | |||
|
450 | 22 | print ("Cannot retrieve rhodecode's revision. Original error " |
|
451 | 23 | "was: %s" % err) |
|
452 | 24 | return None |
|
453 | ||
|
454 | ||
|
455 | def extract_mentioned_users(s): | |
|
456 | """ | |
|
457 | Returns unique usernames from given string s that have @mention | |
|
458 | ||
|
459 | :param s: string to get mentions | |
|
460 | """ | |
|
461 | usrs = {} | |
|
462 | for username in re.findall(r'(?:^@|\s@)(\w+)', s): | |
|
463 | usrs[username] = username | |
|
464 | ||
|
465 | return sorted(usrs.keys()) |
@@ -43,7 +43,7 b' if __platform__ in PLATFORM_WIN:' | |||
|
43 | 43 | if __platform__ in PLATFORM_OTHERS: |
|
44 | 44 | import bcrypt |
|
45 | 45 | |
|
46 | from rhodecode.lib import str2bool, safe_unicode | |
|
46 | from rhodecode.lib.utils2 import str2bool, safe_unicode | |
|
47 | 47 | from rhodecode.lib.exceptions import LdapPasswordError, LdapUsernameError |
|
48 | 48 | from rhodecode.lib.utils import get_repo_slug, get_repos_group_slug |
|
49 | 49 | from rhodecode.lib.auth_ldap import AuthLdap |
@@ -521,8 +521,7 b' class PermsDecorator(object):' | |||
|
521 | 521 | self.user = cls.rhodecode_user |
|
522 | 522 | self.user_perms = self.user.permissions |
|
523 | 523 | log.debug('checking %s permissions %s for %s %s', |
|
524 | self.__class__.__name__, self.required_perms, cls, | |
|
525 | self.user) | |
|
524 | self.__class__.__name__, self.required_perms, cls, self.user) | |
|
526 | 525 | |
|
527 | 526 | if self.check_permissions(): |
|
528 | 527 | log.debug('Permission granted for %s %s' % (cls, self.user)) |
@@ -604,6 +603,7 b' class HasRepoPermissionAnyDecorator(Perm' | |||
|
604 | 603 | user_perms = set([self.user_perms['repositories'][repo_name]]) |
|
605 | 604 | except KeyError: |
|
606 | 605 | return False |
|
606 | ||
|
607 | 607 | if self.required_perms.intersection(user_perms): |
|
608 | 608 | return True |
|
609 | 609 | return False |
@@ -655,29 +655,37 b' class PermsFunction(object):' | |||
|
655 | 655 | |
|
656 | 656 | for perm in perms: |
|
657 | 657 | if perm not in available_perms: |
|
658 |
raise Exception("'%s' permission i |
|
|
658 | raise Exception("'%s' permission is not defined" % perm) | |
|
659 | 659 | self.required_perms = set(perms) |
|
660 | 660 | self.user_perms = None |
|
661 | self.granted_for = '' | |
|
662 | 661 | self.repo_name = None |
|
662 | self.group_name = None | |
|
663 | 663 | |
|
664 | 664 | def __call__(self, check_Location=''): |
|
665 | 665 | user = request.user |
|
666 |
|
|
|
667 | self.required_perms, user) | |
|
666 | cls_name = self.__class__.__name__ | |
|
667 | check_scope = { | |
|
668 | 'HasPermissionAll': '', | |
|
669 | 'HasPermissionAny': '', | |
|
670 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, | |
|
671 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, | |
|
672 | 'HasReposGroupPermissionAll': 'group:%s' % self.group_name, | |
|
673 | 'HasReposGroupPermissionAny': 'group:%s' % self.group_name, | |
|
674 | }.get(cls_name, '?') | |
|
675 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, | |
|
676 | self.required_perms, user, check_scope, | |
|
677 | check_Location or 'unspecified location') | |
|
668 | 678 | if not user: |
|
669 | 679 | log.debug('Empty request user') |
|
670 | 680 | return False |
|
671 | 681 | self.user_perms = user.permissions |
|
672 | self.granted_for = user | |
|
673 | ||
|
674 | 682 | if self.check_permissions(): |
|
675 |
log.debug('Permission granted %s @ %s', se |
|
|
683 | log.debug('Permission granted for user: %s @ %s', user, | |
|
676 | 684 | check_Location or 'unspecified location') |
|
677 | 685 | return True |
|
678 | 686 | |
|
679 | 687 | else: |
|
680 |
log.debug('Permission denied for %s @ %s', se |
|
|
688 | log.debug('Permission denied for user: %s @ %s', user, | |
|
681 | 689 | check_Location or 'unspecified location') |
|
682 | 690 | return False |
|
683 | 691 | |
@@ -701,7 +709,6 b' class HasPermissionAny(PermsFunction):' | |||
|
701 | 709 | |
|
702 | 710 | |
|
703 | 711 | class HasRepoPermissionAll(PermsFunction): |
|
704 | ||
|
705 | 712 | def __call__(self, repo_name=None, check_Location=''): |
|
706 | 713 | self.repo_name = repo_name |
|
707 | 714 | return super(HasRepoPermissionAll, self).__call__(check_Location) |
@@ -711,19 +718,17 b' class HasRepoPermissionAll(PermsFunction' | |||
|
711 | 718 | self.repo_name = get_repo_slug(request) |
|
712 | 719 | |
|
713 | 720 | try: |
|
714 | self.user_perms = set( | |
|
721 | self._user_perms = set( | |
|
715 | 722 | [self.user_perms['repositories'][self.repo_name]] |
|
716 | 723 | ) |
|
717 | 724 | except KeyError: |
|
718 | 725 | return False |
|
719 | self.granted_for = self.repo_name | |
|
720 | if self.required_perms.issubset(self.user_perms): | |
|
726 | if self.required_perms.issubset(self._user_perms): | |
|
721 | 727 | return True |
|
722 | 728 | return False |
|
723 | 729 | |
|
724 | 730 | |
|
725 | 731 | class HasRepoPermissionAny(PermsFunction): |
|
726 | ||
|
727 | 732 | def __call__(self, repo_name=None, check_Location=''): |
|
728 | 733 | self.repo_name = repo_name |
|
729 | 734 | return super(HasRepoPermissionAny, self).__call__(check_Location) |
@@ -733,13 +738,12 b' class HasRepoPermissionAny(PermsFunction' | |||
|
733 | 738 | self.repo_name = get_repo_slug(request) |
|
734 | 739 | |
|
735 | 740 | try: |
|
736 | self.user_perms = set( | |
|
741 | self._user_perms = set( | |
|
737 | 742 | [self.user_perms['repositories'][self.repo_name]] |
|
738 | 743 | ) |
|
739 | 744 | except KeyError: |
|
740 | 745 | return False |
|
741 | self.granted_for = self.repo_name | |
|
742 | if self.required_perms.intersection(self.user_perms): | |
|
746 | if self.required_perms.intersection(self._user_perms): | |
|
743 | 747 | return True |
|
744 | 748 | return False |
|
745 | 749 | |
@@ -751,13 +755,12 b' class HasReposGroupPermissionAny(PermsFu' | |||
|
751 | 755 | |
|
752 | 756 | def check_permissions(self): |
|
753 | 757 | try: |
|
754 | self.user_perms = set( | |
|
758 | self._user_perms = set( | |
|
755 | 759 | [self.user_perms['repositories_groups'][self.group_name]] |
|
756 | 760 | ) |
|
757 | 761 | except KeyError: |
|
758 | 762 | return False |
|
759 | self.granted_for = self.repo_name | |
|
760 | if self.required_perms.intersection(self.user_perms): | |
|
763 | if self.required_perms.intersection(self._user_perms): | |
|
761 | 764 | return True |
|
762 | 765 | return False |
|
763 | 766 | |
@@ -769,13 +772,12 b' class HasReposGroupPermissionAll(PermsFu' | |||
|
769 | 772 | |
|
770 | 773 | def check_permissions(self): |
|
771 | 774 | try: |
|
772 | self.user_perms = set( | |
|
775 | self._user_perms = set( | |
|
773 | 776 | [self.user_perms['repositories_groups'][self.group_name]] |
|
774 | 777 | ) |
|
775 | 778 | except KeyError: |
|
776 | 779 | return False |
|
777 | self.granted_for = self.repo_name | |
|
778 | if self.required_perms.issubset(self.user_perms): | |
|
780 | if self.required_perms.issubset(self._user_perms): | |
|
779 | 781 | return True |
|
780 | 782 | return False |
|
781 | 783 | |
@@ -788,12 +790,16 b' class HasPermissionAnyMiddleware(object)' | |||
|
788 | 790 | self.required_perms = set(perms) |
|
789 | 791 | |
|
790 | 792 | def __call__(self, user, repo_name): |
|
793 | # repo_name MUST be unicode, since we handle keys in permission | |
|
794 | # dict by unicode | |
|
795 | repo_name = safe_unicode(repo_name) | |
|
791 | 796 | usr = AuthUser(user.user_id) |
|
792 | 797 | try: |
|
793 | 798 | self.user_perms = set([usr.permissions['repositories'][repo_name]]) |
|
794 | except: | |
|
799 | except Exception: | |
|
800 | log.error('Exception while accessing permissions %s' % | |
|
801 | traceback.format_exc()) | |
|
795 | 802 | self.user_perms = set() |
|
796 | self.granted_for = '' | |
|
797 | 803 | self.username = user.username |
|
798 | 804 | self.repo_name = repo_name |
|
799 | 805 | return self.check_permissions() |
@@ -803,7 +809,13 b' class HasPermissionAnyMiddleware(object)' | |||
|
803 | 809 | 'permissions %s for user:%s repository:%s', self.user_perms, |
|
804 | 810 | self.username, self.repo_name) |
|
805 | 811 | if self.required_perms.intersection(self.user_perms): |
|
806 |
log.debug('permission granted' |
|
|
812 | log.debug('permission granted for user:%s on repo:%s' % ( | |
|
813 | self.username, self.repo_name | |
|
814 | ) | |
|
815 | ) | |
|
807 | 816 | return True |
|
808 |
log.debug('permission denied' |
|
|
817 | log.debug('permission denied for user:%s on repo:%s' % ( | |
|
818 | self.username, self.repo_name | |
|
819 | ) | |
|
820 | ) | |
|
809 | 821 | return False |
@@ -7,6 +7,8 b' import time' | |||
|
7 | 7 | import traceback |
|
8 | 8 | |
|
9 | 9 | from paste.auth.basic import AuthBasicAuthenticator |
|
10 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden | |
|
11 | from paste.httpheaders import WWW_AUTHENTICATE | |
|
10 | 12 | |
|
11 | 13 | from pylons import config, tmpl_context as c, request, session, url |
|
12 | 14 | from pylons.controllers import WSGIController |
@@ -15,7 +17,7 b' from pylons.templating import render_mak' | |||
|
15 | 17 | |
|
16 | 18 | from rhodecode import __version__, BACKENDS |
|
17 | 19 | |
|
18 | from rhodecode.lib import str2bool, safe_unicode | |
|
20 | from rhodecode.lib.utils2 import str2bool, safe_unicode | |
|
19 | 21 | from rhodecode.lib.auth import AuthUser, get_container_username, authfunc,\ |
|
20 | 22 | HasPermissionAnyMiddleware, CookieStoreWrapper |
|
21 | 23 | from rhodecode.lib.utils import get_repo_slug, invalidate_cache |
@@ -28,6 +30,22 b' from rhodecode.model.scm import ScmModel' | |||
|
28 | 30 | log = logging.getLogger(__name__) |
|
29 | 31 | |
|
30 | 32 | |
|
33 | class BasicAuth(AuthBasicAuthenticator): | |
|
34 | ||
|
35 | def __init__(self, realm, authfunc, auth_http_code=None): | |
|
36 | self.realm = realm | |
|
37 | self.authfunc = authfunc | |
|
38 | self._rc_auth_http_code = auth_http_code | |
|
39 | ||
|
40 | def build_authentication(self): | |
|
41 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) | |
|
42 | if self._rc_auth_http_code and self._rc_auth_http_code == '403': | |
|
43 | # return 403 if alternative http return code is specified in | |
|
44 | # RhodeCode config | |
|
45 | return HTTPForbidden(headers=head) | |
|
46 | return HTTPUnauthorized(headers=head) | |
|
47 | ||
|
48 | ||
|
31 | 49 | class BaseVCSController(object): |
|
32 | 50 | |
|
33 | 51 | def __init__(self, application, config): |
@@ -36,7 +54,8 b' class BaseVCSController(object):' | |||
|
36 | 54 | # base path of repo locations |
|
37 | 55 | self.basepath = self.config['base_path'] |
|
38 | 56 | #authenticate this mercurial request using authfunc |
|
39 |
self.authenticate = |
|
|
57 | self.authenticate = BasicAuth('', authfunc, | |
|
58 | config.get('auth_ret_code')) | |
|
40 | 59 | self.ipaddr = '0.0.0.0' |
|
41 | 60 | |
|
42 | 61 | def _handle_request(self, environ, start_response): |
@@ -24,7 +24,7 b' from beaker.exceptions import BeakerExce' | |||
|
24 | 24 | from sqlalchemy.orm.interfaces import MapperOption |
|
25 | 25 | from sqlalchemy.orm.query import Query |
|
26 | 26 | from sqlalchemy.sql import visitors |
|
27 | from rhodecode.lib import safe_str | |
|
27 | from rhodecode.lib.utils2 import safe_str | |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | class CachingQuery(Query): |
@@ -36,7 +36,7 b' from decorator import decorator' | |||
|
36 | 36 | |
|
37 | 37 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
38 | 38 | from rhodecode import CELERY_ON |
|
39 | from rhodecode.lib import str2bool, safe_str | |
|
39 | from rhodecode.lib.utils2 import str2bool, safe_str | |
|
40 | 40 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
41 | 41 | from rhodecode.model import init_model |
|
42 | 42 | from rhodecode.model import meta |
@@ -40,7 +40,7 b' from pylons.i18n.translation import _' | |||
|
40 | 40 | from rhodecode.lib.vcs import get_backend |
|
41 | 41 | |
|
42 | 42 | from rhodecode import CELERY_ON |
|
43 |
from rhodecode.lib import |
|
|
43 | from rhodecode.lib.utils2 import safe_str | |
|
44 | 44 | from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \ |
|
45 | 45 | str2bool, __get_lockkey, LockHeld, DaemonLock, get_session |
|
46 | 46 | from rhodecode.lib.helpers import person |
@@ -147,6 +147,7 b' def get_commits_stats(repo_name, ts_min_' | |||
|
147 | 147 | last_rev, last_rev + parse_limit) |
|
148 | 148 | ) |
|
149 | 149 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
150 | log.debug('parsing %s' % cs) | |
|
150 | 151 | last_cs = cs # remember last parsed changeset |
|
151 | 152 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], |
|
152 | 153 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) |
@@ -233,10 +234,10 b' def get_commits_stats(repo_name, ts_min_' | |||
|
233 | 234 | lock.release() |
|
234 | 235 | return False |
|
235 | 236 | |
|
236 | #final release | |
|
237 | # final release | |
|
237 | 238 | lock.release() |
|
238 | 239 | |
|
239 | #execute another task if celery is enabled | |
|
240 | # execute another task if celery is enabled | |
|
240 | 241 | if len(repo.revisions) > 1 and CELERY_ON: |
|
241 | 242 | run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) |
|
242 | 243 | return True |
@@ -327,7 +328,7 b' def send_email(recipients, subject, body' | |||
|
327 | 328 | DBS = get_session() |
|
328 | 329 | |
|
329 | 330 | email_config = config |
|
330 | subject = "%s %s" % (email_config.get('email_prefix'), subject) | |
|
331 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) | |
|
331 | 332 | if not recipients: |
|
332 | 333 | # if recipients are not defined we send to email_config + all admins |
|
333 | 334 | admins = [u.email for u in User.query() |
@@ -395,6 +396,7 b' def create_repo_fork(form_data, cur_user' | |||
|
395 | 396 | DBS.commit() |
|
396 | 397 | |
|
397 | 398 | def __get_codes_stats(repo_name): |
|
399 | from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP | |
|
398 | 400 | repo = Repository.get_by_repo_name(repo_name).scm_instance |
|
399 | 401 | |
|
400 | 402 | tip = repo.get_changeset() |
@@ -1,9 +1,9 b'' | |||
|
1 | 1 | import rhodecode |
|
2 | from rhodecode.lib.utils import BasePasterCommand, Command | |
|
2 | from rhodecode.lib.utils import BasePasterCommand, Command, load_rcextensions | |
|
3 | 3 | from celery.app import app_or_default |
|
4 | 4 | from celery.bin import camqadm, celerybeat, celeryd, celeryev |
|
5 | 5 | |
|
6 | from rhodecode.lib import str2bool | |
|
6 | from rhodecode.lib.utils2 import str2bool | |
|
7 | 7 | |
|
8 | 8 | __all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand', |
|
9 | 9 | 'CAMQPAdminCommand', 'CeleryEventCommand'] |
@@ -39,9 +39,11 b' class CeleryCommand(BasePasterCommand):' | |||
|
39 | 39 | raise Exception('Please enable celery_on in .ini config ' |
|
40 | 40 | 'file before running celeryd') |
|
41 | 41 | rhodecode.CELERY_ON = CELERY_ON |
|
42 | load_rcextensions(config['here']) | |
|
42 | 43 | cmd = self.celery_command(app_or_default()) |
|
43 | 44 | return cmd.run(**vars(self.options)) |
|
44 | 45 | |
|
46 | ||
|
45 | 47 | class CeleryDaemonCommand(CeleryCommand): |
|
46 | 48 | """Start the celery worker |
|
47 | 49 | |
@@ -82,6 +84,7 b' class CAMQPAdminCommand(CeleryCommand):' | |||
|
82 | 84 | parser = Command.standard_parser(quiet=True) |
|
83 | 85 | celery_command = camqadm.AMQPAdminCommand |
|
84 | 86 | |
|
87 | ||
|
85 | 88 | class CeleryEventCommand(CeleryCommand): |
|
86 | 89 | """Celery event command. |
|
87 | 90 |
@@ -25,16 +25,93 b'' | |||
|
25 | 25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
26 | 26 | |
|
27 | 27 | import os |
|
28 | import datetime | |
|
29 | import functools | |
|
30 | import decimal | |
|
28 | 31 | from rhodecode import __platform__, PLATFORM_WIN |
|
29 | 32 | |
|
30 | 33 | #============================================================================== |
|
31 | 34 | # json |
|
32 | 35 | #============================================================================== |
|
36 | ||
|
37 | ||
|
38 | def _is_aware(value): | |
|
39 | """ | |
|
40 | Determines if a given datetime.time is aware. | |
|
41 | ||
|
42 | The logic is described in Python's docs: | |
|
43 | http://docs.python.org/library/datetime.html#datetime.tzinfo | |
|
44 | """ | |
|
45 | return (value.tzinfo is not None | |
|
46 | and value.tzinfo.utcoffset(value) is not None) | |
|
47 | ||
|
48 | ||
|
49 | def _obj_dump(obj): | |
|
50 | """ | |
|
51 | Custom function for dumping objects to JSON, if obj has __json__ attribute | |
|
52 | or method defined it will be used for serialization | |
|
53 | ||
|
54 | :param obj: | |
|
55 | """ | |
|
56 | ||
|
57 | if isinstance(obj, complex): | |
|
58 | return [obj.real, obj.imag] | |
|
59 | # See "Date Time String Format" in the ECMA-262 specification. | |
|
60 | # some code borrowed from django 1.4 | |
|
61 | elif isinstance(obj, datetime.datetime): | |
|
62 | r = obj.isoformat() | |
|
63 | if obj.microsecond: | |
|
64 | r = r[:23] + r[26:] | |
|
65 | if r.endswith('+00:00'): | |
|
66 | r = r[:-6] + 'Z' | |
|
67 | return r | |
|
68 | elif isinstance(obj, datetime.date): | |
|
69 | return obj.isoformat() | |
|
70 | elif isinstance(obj, decimal.Decimal): | |
|
71 | return str(obj) | |
|
72 | elif isinstance(obj, datetime.time): | |
|
73 | if _is_aware(obj): | |
|
74 | raise ValueError("JSON can't represent timezone-aware times.") | |
|
75 | r = obj.isoformat() | |
|
76 | if obj.microsecond: | |
|
77 | r = r[:12] | |
|
78 | return r | |
|
79 | elif isinstance(obj, set): | |
|
80 | return list(obj) | |
|
81 | elif isinstance(obj, OrderedDict): | |
|
82 | return obj.as_dict() | |
|
83 | elif hasattr(obj, '__json__'): | |
|
84 | if callable(obj.__json__): | |
|
85 | return obj.__json__() | |
|
86 | else: | |
|
87 | return obj.__json__ | |
|
88 | else: | |
|
89 | raise NotImplementedError | |
|
90 | ||
|
33 | 91 | try: |
|
34 | 92 | import json |
|
93 | ||
|
94 | # extended JSON encoder for json | |
|
95 | class ExtendedEncoder(json.JSONEncoder): | |
|
96 | def default(self, obj): | |
|
97 | try: | |
|
98 | return _obj_dump(obj) | |
|
99 | except NotImplementedError: | |
|
100 | pass | |
|
101 | return json.JSONEncoder.default(self, obj) | |
|
102 | # monkey-patch JSON encoder to use extended version | |
|
103 | json.dumps = functools.partial(json.dumps, cls=ExtendedEncoder) | |
|
35 | 104 | except ImportError: |
|
36 | 105 | import simplejson as json |
|
37 | 106 | |
|
107 | def extended_encode(obj): | |
|
108 | try: | |
|
109 | return _obj_dump(obj) | |
|
110 | except NotImplementedError: | |
|
111 | pass | |
|
112 | raise TypeError("%r is not JSON serializable" % (obj,)) | |
|
113 | json.dumps = functools.partial(json.dumps, default=extended_encode) | |
|
114 | ||
|
38 | 115 | |
|
39 | 116 | #============================================================================== |
|
40 | 117 | # izip_longest |
@@ -44,11 +121,11 b' try:' | |||
|
44 | 121 | except ImportError: |
|
45 | 122 | import itertools |
|
46 | 123 | |
|
47 |
def izip_longest(*args, **kwds): |
|
|
124 | def izip_longest(*args, **kwds): | |
|
48 | 125 | fillvalue = kwds.get("fillvalue") |
|
49 | 126 | |
|
50 | 127 | def sentinel(counter=([fillvalue] * (len(args) - 1)).pop): |
|
51 | yield counter() # yields the fillvalue, or raises IndexError | |
|
128 | yield counter() # yields the fillvalue, or raises IndexError | |
|
52 | 129 | |
|
53 | 130 | fillers = itertools.repeat(fillvalue) |
|
54 | 131 | iters = [itertools.chain(it, sentinel(), fillers) |
@@ -376,7 +376,7 b' class DbManage(object):' | |||
|
376 | 376 | |
|
377 | 377 | if not self.tests and not test_repo_path: |
|
378 | 378 | path = raw_input( |
|
379 | 'Enter a valid path to store repositories. ' | |
|
379 | 'Enter a valid absolute path to store repositories. ' | |
|
380 | 380 | 'All repositories in that path will be added automatically:' |
|
381 | 381 | ) |
|
382 | 382 | else: |
@@ -388,8 +388,12 b' class DbManage(object):' | |||
|
388 | 388 | path_ok = False |
|
389 | 389 | log.error('Given path %s is not a valid directory' % path) |
|
390 | 390 | |
|
391 | elif not os.path.isabs(path): | |
|
392 | path_ok = False | |
|
393 | log.error('Given path %s is not an absolute path' % path) | |
|
394 | ||
|
391 | 395 | # check write access |
|
392 | if not os.access(path, os.W_OK) and path_ok: | |
|
396 | elif not os.access(path, os.W_OK) and path_ok: | |
|
393 | 397 | path_ok = False |
|
394 | 398 | log.error('No write permission to given path %s' % path) |
|
395 | 399 |
@@ -71,9 +71,6 b' class InvalidScriptError(ScriptError):' | |||
|
71 | 71 | """Invalid script error.""" |
|
72 | 72 | |
|
73 | 73 | |
|
74 | class InvalidVersionError(Error): | |
|
75 | """Invalid version error.""" | |
|
76 | ||
|
77 | 74 | # migrate.changeset |
|
78 | 75 | |
|
79 | 76 | class NotSupportedError(Error): |
@@ -39,7 +39,7 b' from rhodecode.lib.vcs.utils.helpers imp' | |||
|
39 | 39 | from rhodecode.lib.vcs.exceptions import VCSError |
|
40 | 40 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
41 | 41 | |
|
42 | from rhodecode.lib import str2bool, safe_str, get_changeset_safe, \ | |
|
42 | from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ | |
|
43 | 43 | generate_api_key, safe_unicode |
|
44 | 44 | from rhodecode.lib.exceptions import UsersGroupsAssignedException |
|
45 | 45 | from rhodecode.lib.compat import json |
@@ -717,7 +717,7 b' class Repository(Base, BaseModel):' | |||
|
717 | 717 | return repo |
|
718 | 718 | |
|
719 | 719 | |
|
720 |
class |
|
|
720 | class Group(Base, BaseModel): | |
|
721 | 721 | __tablename__ = 'groups' |
|
722 | 722 | __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'), |
|
723 | 723 | CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},) |
@@ -728,8 +728,7 b' class RepoGroup(Base, BaseModel):' | |||
|
728 | 728 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
729 | 729 | group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
730 | 730 | |
|
731 |
parent_group = relationship(' |
|
|
732 | ||
|
731 | parent_group = relationship('Group', remote_side=group_id) | |
|
733 | 732 | |
|
734 | 733 | def __init__(self, group_name='', parent_group=None): |
|
735 | 734 | self.group_name = group_name |
@@ -39,12 +39,20 b' from webhelpers.html.tags import _set_in' | |||
|
39 | 39 | |
|
40 | 40 | from rhodecode.lib.annotate import annotate_highlight |
|
41 | 41 | from rhodecode.lib.utils import repo_name_slug |
|
42 |
from rhodecode.lib import str2bool, safe_unicode, safe_str, |
|
|
42 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ | |
|
43 | get_changeset_safe | |
|
43 | 44 | from rhodecode.lib.markup_renderer import MarkupRenderer |
|
44 | 45 | |
|
45 | 46 | log = logging.getLogger(__name__) |
|
46 | 47 | |
|
47 | 48 | |
|
49 | def shorter(text, size=20): | |
|
50 | postfix = '...' | |
|
51 | if len(text) > size: | |
|
52 | return text[:size - len(postfix)] + postfix | |
|
53 | return text | |
|
54 | ||
|
55 | ||
|
48 | 56 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
49 | 57 | """ |
|
50 | 58 | Reset button |
@@ -67,7 +75,7 b' def FID(raw_id, path):' | |||
|
67 | 75 | :param path: |
|
68 | 76 | """ |
|
69 | 77 | |
|
70 | return 'C-%s-%s' % (short_id(raw_id), md5(path).hexdigest()[:12]) | |
|
78 | return 'C-%s-%s' % (short_id(raw_id), md5(safe_str(path)).hexdigest()[:12]) | |
|
71 | 79 | |
|
72 | 80 | |
|
73 | 81 | def get_token(): |
@@ -86,6 +94,7 b' def get_token():' | |||
|
86 | 94 | session.save() |
|
87 | 95 | return session[token_key] |
|
88 | 96 | |
|
97 | ||
|
89 | 98 | class _GetError(object): |
|
90 | 99 | """Get error from form_errors, and represent it as span wrapped error |
|
91 | 100 | message |
@@ -101,6 +110,7 b' class _GetError(object):' | |||
|
101 | 110 | |
|
102 | 111 | get_error = _GetError() |
|
103 | 112 | |
|
113 | ||
|
104 | 114 | class _ToolTip(object): |
|
105 | 115 | |
|
106 | 116 | def __call__(self, tooltip_title, trim_at=50): |
@@ -112,6 +122,7 b' class _ToolTip(object):' | |||
|
112 | 122 | return escape(tooltip_title) |
|
113 | 123 | tooltip = _ToolTip() |
|
114 | 124 | |
|
125 | ||
|
115 | 126 | class _FilesBreadCrumbs(object): |
|
116 | 127 | |
|
117 | 128 | def __call__(self, repo_name, rev, paths): |
@@ -136,8 +147,10 b' class _FilesBreadCrumbs(object):' | |||
|
136 | 147 | |
|
137 | 148 | files_breadcrumbs = _FilesBreadCrumbs() |
|
138 | 149 | |
|
150 | ||
|
139 | 151 | class CodeHtmlFormatter(HtmlFormatter): |
|
140 | """My code Html Formatter for source codes | |
|
152 | """ | |
|
153 | My code Html Formatter for source codes | |
|
141 | 154 | """ |
|
142 | 155 | |
|
143 | 156 | def wrap(self, source, outfile): |
@@ -319,7 +332,7 b' flash = _Flash()' | |||
|
319 | 332 | # SCM FILTERS available via h. |
|
320 | 333 | #============================================================================== |
|
321 | 334 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
322 | from rhodecode.lib import credentials_filter, age as _age | |
|
335 | from rhodecode.lib.utils2 import credentials_filter, age as _age | |
|
323 | 336 | from rhodecode.model.db import User |
|
324 | 337 | |
|
325 | 338 | age = lambda x: _age(x) |
@@ -759,10 +772,10 b' def fancy_file_stats(stats):' | |||
|
759 | 772 | d_v = d if d > 0 else '' |
|
760 | 773 | |
|
761 | 774 | def cgen(l_type): |
|
762 | mapping = {'tr': 'top-right-rounded-corner', | |
|
763 | 'tl': 'top-left-rounded-corner', | |
|
764 | 'br': 'bottom-right-rounded-corner', | |
|
765 | 'bl': 'bottom-left-rounded-corner'} | |
|
775 | mapping = {'tr': 'top-right-rounded-corner-mid', | |
|
776 | 'tl': 'top-left-rounded-corner-mid', | |
|
777 | 'br': 'bottom-right-rounded-corner-mid', | |
|
778 | 'bl': 'bottom-left-rounded-corner-mid'} | |
|
766 | 779 | map_getter = lambda x: mapping[x] |
|
767 | 780 | |
|
768 | 781 | if l_type == 'a' and d_v: |
@@ -801,6 +814,12 b' def urlify_text(text_):' | |||
|
801 | 814 | |
|
802 | 815 | |
|
803 | 816 | def urlify_changesets(text_, repository): |
|
817 | """ | |
|
818 | Extract revision ids from changeset and make link from them | |
|
819 | ||
|
820 | :param text_: | |
|
821 | :param repository: | |
|
822 | """ | |
|
804 | 823 | import re |
|
805 | 824 | URL_PAT = re.compile(r'([0-9a-fA-F]{12,})') |
|
806 | 825 | |
@@ -839,8 +858,8 b' def urlify_commit(text_, repository=None' | |||
|
839 | 858 | import re |
|
840 | 859 | import traceback |
|
841 | 860 | |
|
842 | # urlify changesets | |
|
843 | text_ = urlify_changesets(text_, repository) | |
|
861 | def escaper(string): | |
|
862 | return string.replace('<', '<').replace('>', '>') | |
|
844 | 863 | |
|
845 | 864 | def linkify_others(t, l): |
|
846 | 865 | urls = re.compile(r'(\<a.*?\<\/a\>)',) |
@@ -852,6 +871,11 b' def urlify_commit(text_, repository=None' | |||
|
852 | 871 | links.append(e) |
|
853 | 872 | |
|
854 | 873 | return ''.join(links) |
|
874 | ||
|
875 | ||
|
876 | # urlify changesets - extrac revisions and make link out of them | |
|
877 | text_ = urlify_changesets(escaper(text_), repository) | |
|
878 | ||
|
855 | 879 | try: |
|
856 | 880 | conf = config['app_conf'] |
|
857 | 881 |
@@ -27,9 +27,10 b' import sys' | |||
|
27 | 27 | |
|
28 | 28 | from mercurial.scmutil import revrange |
|
29 | 29 | from mercurial.node import nullrev |
|
30 | ||
|
30 | from rhodecode import EXTENSIONS | |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib.utils import action_logger |
|
33 | from inspect import isfunction | |
|
33 | 34 | |
|
34 | 35 | |
|
35 | 36 | def repo_size(ui, repo, hooktype=None, **kwargs): |
@@ -78,14 +79,19 b' def log_pull_action(ui, repo, **kwargs):' | |||
|
78 | 79 | :param repo: |
|
79 | 80 | """ |
|
80 | 81 | |
|
81 |
extra |
|
|
82 |
username = extra |
|
|
83 |
repository = extra |
|
|
82 | extras = dict(repo.ui.configitems('rhodecode_extras')) | |
|
83 | username = extras['username'] | |
|
84 | repository = extras['repository'] | |
|
84 | 85 | action = 'pull' |
|
85 | 86 | |
|
86 |
action_logger(username, action, repository, extra |
|
|
87 | commit=True) | |
|
87 | action_logger(username, action, repository, extras['ip'], commit=True) | |
|
88 | # extension hook call | |
|
89 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) | |
|
88 | 90 | |
|
91 | if isfunction(callback): | |
|
92 | kw = {} | |
|
93 | kw.update(extras) | |
|
94 | callback(**kw) | |
|
89 | 95 | return 0 |
|
90 | 96 | |
|
91 | 97 | |
@@ -97,10 +103,10 b' def log_push_action(ui, repo, **kwargs):' | |||
|
97 | 103 | :param repo: |
|
98 | 104 | """ |
|
99 | 105 | |
|
100 |
extra |
|
|
101 |
username = extra |
|
|
102 |
repository = extra |
|
|
103 |
action = extra |
|
|
106 | extras = dict(repo.ui.configitems('rhodecode_extras')) | |
|
107 | username = extras['username'] | |
|
108 | repository = extras['repository'] | |
|
109 | action = extras['action'] + ':%s' | |
|
104 | 110 | node = kwargs['node'] |
|
105 | 111 | |
|
106 | 112 | def get_revs(repo, rev_opt): |
@@ -119,16 +125,22 b' def log_push_action(ui, repo, **kwargs):' | |||
|
119 | 125 | |
|
120 | 126 | action = action % ','.join(revs) |
|
121 | 127 | |
|
122 |
action_logger(username, action, repository, extra |
|
|
123 | commit=True) | |
|
128 | action_logger(username, action, repository, extras['ip'], commit=True) | |
|
124 | 129 | |
|
130 | # extension hook call | |
|
131 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) | |
|
132 | if isfunction(callback): | |
|
133 | kw = {'pushed_revs': revs} | |
|
134 | kw.update(extras) | |
|
135 | callback(**kw) | |
|
125 | 136 | return 0 |
|
126 | 137 | |
|
127 | 138 | |
|
128 | 139 | def log_create_repository(repository_dict, created_by, **kwargs): |
|
129 | 140 | """ |
|
130 | 141 | Post create repository Hook. This is a dummy function for admins to re-use |
|
131 | if needed | |
|
142 | if needed. It's taken from rhodecode-extensions module and executed | |
|
143 | if present | |
|
132 | 144 | |
|
133 | 145 | :param repository: dict dump of repository object |
|
134 | 146 | :param created_by: username who created repository |
@@ -151,5 +163,12 b' def log_create_repository(repository_dic' | |||
|
151 | 163 | |
|
152 | 164 | """ |
|
153 | 165 | |
|
166 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) | |
|
167 | if isfunction(callback): | |
|
168 | kw = {} | |
|
169 | kw.update(repository_dict) | |
|
170 | kw.update({'created_by': created_by}) | |
|
171 | kw.update(kwargs) | |
|
172 | return callback(**kw) | |
|
154 | 173 | |
|
155 | 174 | return 0 |
@@ -25,6 +25,7 b'' | |||
|
25 | 25 | import os |
|
26 | 26 | import sys |
|
27 | 27 | import traceback |
|
28 | import logging | |
|
28 | 29 | from os.path import dirname as dn, join as jn |
|
29 | 30 | |
|
30 | 31 | #to get the rhodecode import |
@@ -46,11 +47,9 b' from rhodecode.model import init_model' | |||
|
46 | 47 | from rhodecode.model.scm import ScmModel |
|
47 | 48 | from rhodecode.model.repo import RepoModel |
|
48 | 49 | from rhodecode.config.environment import load_environment |
|
49 |
from rhodecode.lib import |
|
|
50 | from rhodecode.lib.utils import BasePasterCommand, Command, add_cache | |
|
51 | ||
|
52 | # EXTENSIONS WE WANT TO INDEX CONTENT OFF | |
|
53 | INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys() | |
|
50 | from rhodecode.lib.utils2 import LazyProperty | |
|
51 | from rhodecode.lib.utils import BasePasterCommand, Command, add_cache,\ | |
|
52 | load_rcextensions | |
|
54 | 53 | |
|
55 | 54 | # CUSTOM ANALYZER wordsplit + lowercase filter |
|
56 | 55 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() |
@@ -84,18 +83,17 b' class MakeIndex(BasePasterCommand):' | |||
|
84 | 83 | parser = Command.standard_parser(verbose=True) |
|
85 | 84 | |
|
86 | 85 | def command(self): |
|
87 | ||
|
86 | logging.config.fileConfig(self.path_to_ini_file) | |
|
88 | 87 | from pylons import config |
|
89 | 88 | add_cache(config) |
|
90 | 89 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
91 | 90 | init_model(engine) |
|
92 | ||
|
93 | 91 | index_location = config['index_dir'] |
|
94 | 92 | repo_location = self.options.repo_location \ |
|
95 | 93 | if self.options.repo_location else RepoModel().repos_path |
|
96 | 94 | repo_list = map(strip, self.options.repo_list.split(',')) \ |
|
97 | 95 | if self.options.repo_list else None |
|
98 | ||
|
96 | load_rcextensions(config['here']) | |
|
99 | 97 | #====================================================================== |
|
100 | 98 | # WHOOSH DAEMON |
|
101 | 99 | #====================================================================== |
@@ -105,7 +103,7 b' class MakeIndex(BasePasterCommand):' | |||
|
105 | 103 | l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock')) |
|
106 | 104 | WhooshIndexingDaemon(index_location=index_location, |
|
107 | 105 | repo_location=repo_location, |
|
108 | repo_list=repo_list)\ | |
|
106 | repo_list=repo_list,)\ | |
|
109 | 107 | .run(full_index=self.options.full_index) |
|
110 | 108 | l.release() |
|
111 | 109 | except LockHeld: |
@@ -38,34 +38,17 b' from os.path import join as jn' | |||
|
38 | 38 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) |
|
39 | 39 | sys.path.append(project_path) |
|
40 | 40 | |
|
41 | ||
|
41 | from rhodecode.config.conf import INDEX_EXTENSIONS | |
|
42 | 42 | from rhodecode.model.scm import ScmModel |
|
43 | from rhodecode.lib import safe_unicode | |
|
44 |
from rhodecode.lib.indexers import |
|
|
43 | from rhodecode.lib.utils2 import safe_unicode | |
|
44 | from rhodecode.lib.indexers import SCHEMA, IDX_NAME | |
|
45 | 45 | |
|
46 | 46 | from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \ |
|
47 | 47 | NodeDoesNotExistError |
|
48 | 48 | |
|
49 | 49 | from whoosh.index import create_in, open_dir |
|
50 | 50 | |
|
51 | ||
|
52 | log = logging.getLogger('whooshIndexer') | |
|
53 | # create logger | |
|
54 | log.setLevel(logging.DEBUG) | |
|
55 | log.propagate = False | |
|
56 | # create console handler and set level to debug | |
|
57 | ch = logging.StreamHandler() | |
|
58 | ch.setLevel(logging.DEBUG) | |
|
59 | ||
|
60 | # create formatter | |
|
61 | formatter = logging.Formatter("%(asctime)s - %(name)s -" | |
|
62 | " %(levelname)s - %(message)s") | |
|
63 | ||
|
64 | # add formatter to ch | |
|
65 | ch.setFormatter(formatter) | |
|
66 | ||
|
67 | # add ch to logger | |
|
68 | log.addHandler(ch) | |
|
51 | log = logging.getLogger('whoosh_indexer') | |
|
69 | 52 | |
|
70 | 53 | |
|
71 | 54 | class WhooshIndexingDaemon(object): |
@@ -103,7 +86,8 b' class WhooshIndexingDaemon(object):' | |||
|
103 | 86 | self.initial = True |
|
104 | 87 | |
|
105 | 88 | def get_paths(self, repo): |
|
106 | """recursive walk in root dir and return a set of all path in that dir | |
|
89 | """ | |
|
90 | recursive walk in root dir and return a set of all path in that dir | |
|
107 | 91 | based on repository walk function |
|
108 | 92 | """ |
|
109 | 93 | index_paths_ = set() |
@@ -127,32 +111,39 b' class WhooshIndexingDaemon(object):' | |||
|
127 | 111 | return mktime(node.last_changeset.date.timetuple()) |
|
128 | 112 | |
|
129 | 113 | def add_doc(self, writer, path, repo, repo_name): |
|
130 | """Adding doc to writer this function itself fetches data from | |
|
131 | the instance of vcs backend""" | |
|
132 | node = self.get_node(repo, path) | |
|
114 | """ | |
|
115 | Adding doc to writer this function itself fetches data from | |
|
116 | the instance of vcs backend | |
|
117 | """ | |
|
133 | 118 | |
|
134 | #we just index the content of chosen files, and skip binary files | |
|
119 | node = self.get_node(repo, path) | |
|
120 | indexed = indexed_w_content = 0 | |
|
121 | # we just index the content of chosen files, and skip binary files | |
|
135 | 122 | if node.extension in INDEX_EXTENSIONS and not node.is_binary: |
|
136 | ||
|
137 | 123 | u_content = node.content |
|
138 | 124 | if not isinstance(u_content, unicode): |
|
139 | 125 | log.warning(' >> %s Could not get this content as unicode ' |
|
140 |
'replacing with empty content' |
|
|
126 | 'replacing with empty content' % path) | |
|
141 | 127 | u_content = u'' |
|
142 | 128 | else: |
|
143 | 129 | log.debug(' >> %s [WITH CONTENT]' % path) |
|
130 | indexed_w_content += 1 | |
|
144 | 131 | |
|
145 | 132 | else: |
|
146 | 133 | log.debug(' >> %s' % path) |
|
147 | #just index file name without it's content | |
|
134 | # just index file name without it's content | |
|
148 | 135 | u_content = u'' |
|
136 | indexed += 1 | |
|
149 | 137 | |
|
150 |
writer.add_document( |
|
|
151 |
|
|
|
152 |
|
|
|
153 | content=u_content, | |
|
154 | modtime=self.get_node_mtime(node), | |
|
155 | extension=node.extension) | |
|
138 | writer.add_document( | |
|
139 | owner=unicode(repo.contact), | |
|
140 | repository=safe_unicode(repo_name), | |
|
141 | path=safe_unicode(path), | |
|
142 | content=u_content, | |
|
143 | modtime=self.get_node_mtime(node), | |
|
144 | extension=node.extension | |
|
145 | ) | |
|
146 | return indexed, indexed_w_content | |
|
156 | 147 | |
|
157 | 148 | def build_index(self): |
|
158 | 149 | if os.path.exists(self.index_location): |
@@ -164,19 +155,25 b' class WhooshIndexingDaemon(object):' | |||
|
164 | 155 | |
|
165 | 156 | idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) |
|
166 | 157 | writer = idx.writer() |
|
167 | ||
|
158 | log.debug('BUILDIN INDEX FOR EXTENSIONS %s' % INDEX_EXTENSIONS) | |
|
168 | 159 | for repo_name, repo in self.repo_paths.items(): |
|
169 | 160 | log.debug('building index @ %s' % repo.path) |
|
170 | ||
|
161 | i_cnt = iwc_cnt = 0 | |
|
171 | 162 | for idx_path in self.get_paths(repo): |
|
172 | self.add_doc(writer, idx_path, repo, repo_name) | |
|
163 | i, iwc = self.add_doc(writer, idx_path, repo, repo_name) | |
|
164 | i_cnt += i | |
|
165 | iwc_cnt += iwc | |
|
166 | log.debug('added %s files %s with content for repo %s' % ( | |
|
167 | i_cnt + iwc_cnt, iwc_cnt, repo.path) | |
|
168 | ) | |
|
173 | 169 | |
|
174 | 170 | log.debug('>> COMMITING CHANGES <<') |
|
175 | 171 | writer.commit(merge=True) |
|
176 | 172 | log.debug('>>> FINISHED BUILDING INDEX <<<') |
|
177 | 173 | |
|
178 | 174 | def update_index(self): |
|
179 |
log.debug('STARTING INCREMENTAL INDEXING UPDATE' |
|
|
175 | log.debug('STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s' % | |
|
176 | INDEX_EXTENSIONS) | |
|
180 | 177 | |
|
181 | 178 | idx = open_dir(self.index_location, indexname=self.indexname) |
|
182 | 179 | # The set of all paths in the index |
@@ -215,14 +212,19 b' class WhooshIndexingDaemon(object):' | |||
|
215 | 212 | # Loop over the files in the filesystem |
|
216 | 213 | # Assume we have a function that gathers the filenames of the |
|
217 | 214 | # documents to be indexed |
|
215 | ri_cnt = riwc_cnt = 0 | |
|
218 | 216 | for repo_name, repo in self.repo_paths.items(): |
|
219 | 217 | for path in self.get_paths(repo): |
|
220 | 218 | if path in to_index or path not in indexed_paths: |
|
221 | 219 | # This is either a file that's changed, or a new file |
|
222 | 220 | # that wasn't indexed before. So index it! |
|
223 | self.add_doc(writer, path, repo, repo_name) | |
|
221 | i, iwc = self.add_doc(writer, path, repo, repo_name) | |
|
224 | 222 | log.debug('re indexing %s' % path) |
|
225 | ||
|
223 | ri_cnt += i | |
|
224 | riwc_cnt += iwc | |
|
225 | log.debug('added %s files %s with content for repo %s' % ( | |
|
226 | ri_cnt + riwc_cnt, riwc_cnt, repo.path) | |
|
227 | ) | |
|
226 | 228 | log.debug('>> COMMITING CHANGES <<') |
|
227 | 229 | writer.commit(merge=True) |
|
228 | 230 | log.debug('>>> FINISHED REBUILDING INDEX <<<') |
@@ -27,7 +27,7 b'' | |||
|
27 | 27 | import re |
|
28 | 28 | import logging |
|
29 | 29 | |
|
30 | from rhodecode.lib import safe_unicode | |
|
30 | from rhodecode.lib.utils2 import safe_unicode | |
|
31 | 31 | |
|
32 | 32 | log = logging.getLogger(__name__) |
|
33 | 33 |
@@ -23,7 +23,7 b'' | |||
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | from rhodecode.lib import str2bool | |
|
26 | from rhodecode.lib.utils2 import str2bool | |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class HttpsFixup(object): |
@@ -65,11 +65,11 b' dulserver.DEFAULT_HANDLERS = {' | |||
|
65 | 65 | } |
|
66 | 66 | |
|
67 | 67 | from dulwich.repo import Repo |
|
68 |
from dulwich.web import |
|
|
68 | from dulwich.web import make_wsgi_chain | |
|
69 | 69 | |
|
70 | 70 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
71 | 71 | |
|
72 | from rhodecode.lib import safe_str | |
|
72 | from rhodecode.lib.utils2 import safe_str | |
|
73 | 73 | from rhodecode.lib.base import BaseVCSController |
|
74 | 74 | from rhodecode.lib.auth import get_container_username |
|
75 | 75 | from rhodecode.lib.utils import is_valid_repo |
@@ -86,7 +86,9 b" GIT_PROTO_PAT = re.compile(r'^/(.+)/(inf" | |||
|
86 | 86 | def is_git(environ): |
|
87 | 87 | path_info = environ['PATH_INFO'] |
|
88 | 88 | isgit_path = GIT_PROTO_PAT.match(path_info) |
|
89 | log.debug('is a git path %s pathinfo : %s' % (isgit_path, path_info)) | |
|
89 | log.debug('pathinfo: %s detected as GIT %s' % ( | |
|
90 | path_info, isgit_path != None) | |
|
91 | ) | |
|
90 | 92 | return isgit_path |
|
91 | 93 | |
|
92 | 94 | |
@@ -113,6 +115,10 b' class SimpleGit(BaseVCSController):' | |||
|
113 | 115 | except: |
|
114 | 116 | return HTTPInternalServerError()(environ, start_response) |
|
115 | 117 | |
|
118 | # quick check if that dir exists... | |
|
119 | if is_valid_repo(repo_name, self.basepath) is False: | |
|
120 | return HTTPNotFound()(environ, start_response) | |
|
121 | ||
|
116 | 122 | #====================================================================== |
|
117 | 123 | # GET ACTION PULL or PUSH |
|
118 | 124 | #====================================================================== |
@@ -121,7 +127,6 b' class SimpleGit(BaseVCSController):' | |||
|
121 | 127 | #====================================================================== |
|
122 | 128 | # CHECK ANONYMOUS PERMISSION |
|
123 | 129 | #====================================================================== |
|
124 | ||
|
125 | 130 | if action in ['pull', 'push']: |
|
126 | 131 | anonymous_user = self.__get_user('default') |
|
127 | 132 | username = anonymous_user.username |
@@ -177,13 +182,9 b' class SimpleGit(BaseVCSController):' | |||
|
177 | 182 | #=================================================================== |
|
178 | 183 | # GIT REQUEST HANDLING |
|
179 | 184 | #=================================================================== |
|
180 |
repo_path = |
|
|
185 | repo_path = os.path.join(safe_str(self.basepath), safe_str(repo_name)) | |
|
181 | 186 | log.debug('Repository path is %s' % repo_path) |
|
182 | 187 | |
|
183 | # quick check if that dir exists... | |
|
184 | if is_valid_repo(repo_name, self.basepath) is False: | |
|
185 | return HTTPNotFound()(environ, start_response) | |
|
186 | ||
|
187 | 188 | try: |
|
188 | 189 | #invalidate cache on push |
|
189 | 190 | if action == 'push': |
@@ -204,7 +205,7 b' class SimpleGit(BaseVCSController):' | |||
|
204 | 205 | """ |
|
205 | 206 | _d = {'/' + repo_name: Repo(repo_path)} |
|
206 | 207 | backend = dulserver.DictBackend(_d) |
|
207 |
gitserve = |
|
|
208 | gitserve = make_wsgi_chain(backend) | |
|
208 | 209 | |
|
209 | 210 | return gitserve |
|
210 | 211 |
@@ -27,13 +27,14 b'' | |||
|
27 | 27 | import os |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | import urllib | |
|
30 | 31 | |
|
31 | 32 | from mercurial.error import RepoError |
|
32 | 33 | from mercurial.hgweb import hgweb_mod |
|
33 | 34 | |
|
34 | 35 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
35 | 36 | |
|
36 | from rhodecode.lib import safe_str | |
|
37 | from rhodecode.lib.utils2 import safe_str | |
|
37 | 38 | from rhodecode.lib.base import BaseVCSController |
|
38 | 39 | from rhodecode.lib.auth import get_container_username |
|
39 | 40 | from rhodecode.lib.utils import make_ui, is_valid_repo, ui_sections |
@@ -45,13 +46,21 b' log = logging.getLogger(__name__)' | |||
|
45 | 46 | |
|
46 | 47 | |
|
47 | 48 | def is_mercurial(environ): |
|
48 | """Returns True if request's target is mercurial server - header | |
|
49 | """ | |
|
50 | Returns True if request's target is mercurial server - header | |
|
49 | 51 | ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. |
|
50 | 52 | """ |
|
51 | 53 | http_accept = environ.get('HTTP_ACCEPT') |
|
54 | path_info = environ['PATH_INFO'] | |
|
52 | 55 | if http_accept and http_accept.startswith('application/mercurial'): |
|
53 |
|
|
|
54 |
|
|
|
56 | ishg_path = True | |
|
57 | else: | |
|
58 | ishg_path = False | |
|
59 | ||
|
60 | log.debug('pathinfo: %s detected as HG %s' % ( | |
|
61 | path_info, ishg_path) | |
|
62 | ) | |
|
63 | return ishg_path | |
|
55 | 64 | |
|
56 | 65 | |
|
57 | 66 | class SimpleHg(BaseVCSController): |
@@ -76,16 +85,20 b' class SimpleHg(BaseVCSController):' | |||
|
76 | 85 | except: |
|
77 | 86 | return HTTPInternalServerError()(environ, start_response) |
|
78 | 87 | |
|
88 | # quick check if that dir exists... | |
|
89 | if is_valid_repo(repo_name, self.basepath) is False: | |
|
90 | return HTTPNotFound()(environ, start_response) | |
|
91 | ||
|
79 | 92 | #====================================================================== |
|
80 | 93 | # GET ACTION PULL or PUSH |
|
81 | 94 | #====================================================================== |
|
82 | 95 | action = self.__get_action(environ) |
|
96 | ||
|
83 | 97 | #====================================================================== |
|
84 | 98 | # CHECK ANONYMOUS PERMISSION |
|
85 | 99 | #====================================================================== |
|
86 | 100 | if action in ['pull', 'push']: |
|
87 | 101 | anonymous_user = self.__get_user('default') |
|
88 | ||
|
89 | 102 | username = anonymous_user.username |
|
90 | 103 | anonymous_perm = self._check_permission(action, anonymous_user, |
|
91 | 104 | repo_name) |
@@ -132,30 +145,28 b' class SimpleHg(BaseVCSController):' | |||
|
132 | 145 | start_response) |
|
133 | 146 | |
|
134 | 147 | #check permissions for this repository |
|
135 | perm = self._check_permission(action, user, | |
|
136 | repo_name) | |
|
148 | perm = self._check_permission(action, user, repo_name) | |
|
137 | 149 | if perm is not True: |
|
138 | 150 | return HTTPForbidden()(environ, start_response) |
|
139 | 151 | |
|
140 | extras = {'ip': ipaddr, | |
|
141 | 'username': username, | |
|
142 | 'action': action, | |
|
143 | 'repository': repo_name} | |
|
152 | # extras are injected into mercurial UI object and later available | |
|
153 | # in hg hooks executed by rhodecode | |
|
154 | extras = { | |
|
155 | 'ip': ipaddr, | |
|
156 | 'username': username, | |
|
157 | 'action': action, | |
|
158 | 'repository': repo_name | |
|
159 | } | |
|
144 | 160 | |
|
145 | 161 | #====================================================================== |
|
146 | 162 | # MERCURIAL REQUEST HANDLING |
|
147 | 163 | #====================================================================== |
|
148 | ||
|
149 | repo_path = safe_str(os.path.join(self.basepath, repo_name)) | |
|
164 | repo_path = os.path.join(safe_str(self.basepath), safe_str(repo_name)) | |
|
150 | 165 | log.debug('Repository path is %s' % repo_path) |
|
151 | 166 | |
|
152 | 167 | baseui = make_ui('db') |
|
153 | 168 | self.__inject_extras(repo_path, baseui, extras) |
|
154 | 169 | |
|
155 | # quick check if that dir exists... | |
|
156 | if is_valid_repo(repo_name, self.basepath) is False: | |
|
157 | return HTTPNotFound()(environ, start_response) | |
|
158 | ||
|
159 | 170 | try: |
|
160 | 171 | # invalidate cache on push |
|
161 | 172 | if action == 'push': |
@@ -51,9 +51,12 b' from rhodecode.lib.caching_query import ' | |||
|
51 | 51 | |
|
52 | 52 | from rhodecode.model import meta |
|
53 | 53 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ |
|
54 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm | |
|
54 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm,\ | |
|
55 | CacheInvalidation | |
|
55 | 56 | from rhodecode.model.meta import Session |
|
56 | 57 | from rhodecode.model.repos_group import ReposGroupModel |
|
58 | from rhodecode.lib.utils2 import safe_str, safe_unicode | |
|
59 | from rhodecode.lib.vcs.utils.fakemod import create_module | |
|
57 | 60 | |
|
58 | 61 | log = logging.getLogger(__name__) |
|
59 | 62 | |
@@ -61,7 +64,8 b" REMOVED_REPO_PAT = re.compile(r'rm__\\d{8" | |||
|
61 | 64 | |
|
62 | 65 | |
|
63 | 66 | def recursive_replace(str_, replace=' '): |
|
64 | """Recursive replace of given sign to just one instance | |
|
67 | """ | |
|
68 | Recursive replace of given sign to just one instance | |
|
65 | 69 | |
|
66 | 70 | :param str_: given string |
|
67 | 71 | :param replace: char to find and replace multiple instances |
@@ -79,7 +83,8 b" def recursive_replace(str_, replace=' ')" | |||
|
79 | 83 | |
|
80 | 84 | |
|
81 | 85 | def repo_name_slug(value): |
|
82 | """Return slug of name of repository | |
|
86 | """ | |
|
87 | Return slug of name of repository | |
|
83 | 88 | This function is called on each creation/modification |
|
84 | 89 | of repository to prevent bad names in repo |
|
85 | 90 | """ |
@@ -154,7 +159,10 b' def action_logger(user, action, repo, ip' | |||
|
154 | 159 | user_log.user_ip = ipaddr |
|
155 | 160 | sa.add(user_log) |
|
156 | 161 | |
|
157 | log.info('Adding user %s, action %s on %s' % (user_obj, action, repo)) | |
|
162 | log.info( | |
|
163 | 'Adding user %s, action %s on %s' % (user_obj, action, | |
|
164 | safe_unicode(repo)) | |
|
165 | ) | |
|
158 | 166 | if commit: |
|
159 | 167 | sa.commit() |
|
160 | 168 | except: |
@@ -198,12 +206,13 b' def get_repos(path, recursive=False):' | |||
|
198 | 206 | def is_valid_repo(repo_name, base_path): |
|
199 | 207 | """ |
|
200 | 208 | Returns True if given path is a valid repository False otherwise |
|
209 | ||
|
201 | 210 | :param repo_name: |
|
202 | 211 | :param base_path: |
|
203 | 212 | |
|
204 | 213 | :return True: if given path is a valid repository |
|
205 | 214 | """ |
|
206 | full_path = os.path.join(base_path, repo_name) | |
|
215 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) | |
|
207 | 216 | |
|
208 | 217 | try: |
|
209 | 218 | get_scm(full_path) |
@@ -219,7 +228,7 b' def is_valid_repos_group(repos_group_nam' | |||
|
219 | 228 | :param repo_name: |
|
220 | 229 | :param base_path: |
|
221 | 230 | """ |
|
222 | full_path = os.path.join(base_path, repos_group_name) | |
|
231 | full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name)) | |
|
223 | 232 | |
|
224 | 233 | # check if it's not a repo |
|
225 | 234 | if is_valid_repo(repos_group_name, base_path): |
@@ -258,7 +267,8 b" ui_sections = ['alias', 'auth'," | |||
|
258 | 267 | |
|
259 | 268 | |
|
260 | 269 | def make_ui(read_from='file', path=None, checkpaths=True): |
|
261 | """A function that will read python rc files or database | |
|
270 | """ | |
|
271 | A function that will read python rc files or database | |
|
262 | 272 | and make an mercurial ui object from read options |
|
263 | 273 | |
|
264 | 274 | :param path: path to mercurial config file |
@@ -371,15 +381,16 b' class EmptyChangeset(BaseChangeset):' | |||
|
371 | 381 | return 0 |
|
372 | 382 | |
|
373 | 383 | |
|
374 |
def map_groups( |
|
|
384 | def map_groups(path): | |
|
375 | 385 | """ |
|
376 | Checks for groups existence, and creates groups structures. | |
|
377 | It returns last group in structure | |
|
386 | Given a full path to a repository, create all nested groups that this | |
|
387 | repo is inside. This function creates parent-child relationships between | |
|
388 | groups and creates default perms for all new groups. | |
|
378 | 389 | |
|
379 | :param groups: list of groups structure | |
|
390 | :param paths: full path to repository | |
|
380 | 391 | """ |
|
381 | 392 | sa = meta.Session |
|
382 | ||
|
393 | groups = path.split(Repository.url_sep()) | |
|
383 | 394 | parent = None |
|
384 | 395 | group = None |
|
385 | 396 | |
@@ -391,22 +402,18 b' def map_groups(groups):' | |||
|
391 | 402 | group = RepoGroup.get_by_group_name(group_name) |
|
392 | 403 | desc = '%s group' % group_name |
|
393 | 404 | |
|
394 | # # WTF that doesn't work !? | |
|
395 | # if group is None: | |
|
396 | # group = rgm.create(group_name, desc, parent, just_db=True) | |
|
397 | # sa.commit() | |
|
398 | ||
|
399 | 405 | # skip folders that are now removed repos |
|
400 | 406 | if REMOVED_REPO_PAT.match(group_name): |
|
401 | 407 | break |
|
402 | 408 | |
|
403 | 409 | if group is None: |
|
404 |
log.debug('creating group level: %s group_name: %s' % (lvl, |
|
|
410 | log.debug('creating group level: %s group_name: %s' % (lvl, | |
|
411 | group_name)) | |
|
405 | 412 | group = RepoGroup(group_name, parent) |
|
406 | 413 | group.group_description = desc |
|
407 | 414 | sa.add(group) |
|
408 | 415 | rgm._create_default_perms(group) |
|
409 |
sa. |
|
|
416 | sa.flush() | |
|
410 | 417 | parent = group |
|
411 | 418 | return group |
|
412 | 419 | |
@@ -429,7 +436,7 b' def repo2db_mapper(initial_repo_list, re' | |||
|
429 | 436 | added = [] |
|
430 | 437 | |
|
431 | 438 | for name, repo in initial_repo_list.items(): |
|
432 |
group = map_groups(name |
|
|
439 | group = map_groups(name) | |
|
433 | 440 | if not rm.get_by_repo_name(name, cache=False): |
|
434 | 441 | log.info('repository %s not found creating default' % name) |
|
435 | 442 | added.append(name) |
@@ -446,13 +453,19 b' def repo2db_mapper(initial_repo_list, re' | |||
|
446 | 453 | sa.commit() |
|
447 | 454 | removed = [] |
|
448 | 455 | if remove_obsolete: |
|
449 | #remove from database those repositories that are not in the filesystem | |
|
456 | # remove from database those repositories that are not in the filesystem | |
|
450 | 457 | for repo in sa.query(Repository).all(): |
|
451 | 458 | if repo.repo_name not in initial_repo_list.keys(): |
|
459 | log.debug("Removing non existing repository found in db %s" % | |
|
460 | repo.repo_name) | |
|
452 | 461 | removed.append(repo.repo_name) |
|
453 | 462 | sa.delete(repo) |
|
454 | 463 | sa.commit() |
|
455 | 464 | |
|
465 | # clear cache keys | |
|
466 | log.debug("Clearing cache keys now...") | |
|
467 | CacheInvalidation.clear_cache() | |
|
468 | sa.commit() | |
|
456 | 469 | return added, removed |
|
457 | 470 | |
|
458 | 471 | |
@@ -484,6 +497,30 b' def add_cache(settings):' | |||
|
484 | 497 | beaker.cache.cache_regions[region] = region_settings |
|
485 | 498 | |
|
486 | 499 | |
|
500 | def load_rcextensions(root_path): | |
|
501 | import rhodecode | |
|
502 | from rhodecode.config import conf | |
|
503 | ||
|
504 | path = os.path.join(root_path, 'rcextensions', '__init__.py') | |
|
505 | if os.path.isfile(path): | |
|
506 | rcext = create_module('rc', path) | |
|
507 | EXT = rhodecode.EXTENSIONS = rcext | |
|
508 | log.debug('Found rcextensions now loading %s...' % rcext) | |
|
509 | ||
|
510 | # Additional mappings that are not present in the pygments lexers | |
|
511 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) | |
|
512 | ||
|
513 | #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) | |
|
514 | ||
|
515 | if getattr(EXT, 'INDEX_EXTENSIONS', []) != []: | |
|
516 | log.debug('settings custom INDEX_EXTENSIONS') | |
|
517 | conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) | |
|
518 | ||
|
519 | #ADDITIONAL MAPPINGS | |
|
520 | log.debug('adding extra into INDEX_EXTENSIONS') | |
|
521 | conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) | |
|
522 | ||
|
523 | ||
|
487 | 524 | #============================================================================== |
|
488 | 525 | # TEST FUNCTIONS AND CREATORS |
|
489 | 526 | #============================================================================== |
@@ -624,6 +661,6 b' class BasePasterCommand(Command):' | |||
|
624 | 661 | """ |
|
625 | 662 | from pylons import config as pylonsconfig |
|
626 | 663 | |
|
627 | path_to_ini_file = os.path.realpath(conf) | |
|
628 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) | |
|
664 | self.path_to_ini_file = os.path.realpath(conf) | |
|
665 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) | |
|
629 | 666 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
@@ -68,19 +68,24 b' class GitChangeset(BaseChangeset):' | |||
|
68 | 68 | def branch(self): |
|
69 | 69 | # TODO: Cache as we walk (id <-> branch name mapping) |
|
70 | 70 | refs = self.repository._repo.get_refs() |
|
71 | heads = [(key[len('refs/heads/'):], val) for key, val in refs.items() | |
|
72 | if key.startswith('refs/heads/')] | |
|
71 | heads = {} | |
|
72 | for key, val in refs.items(): | |
|
73 | for ref_key in ['refs/heads/', 'refs/remotes/origin/']: | |
|
74 | if key.startswith(ref_key): | |
|
75 | n = key[len(ref_key):] | |
|
76 | if n not in ['HEAD']: | |
|
77 | heads[n] = val | |
|
73 | 78 | |
|
74 | for name, id in heads: | |
|
79 | for name, id in heads.iteritems(): | |
|
75 | 80 | walker = self.repository._repo.object_store.get_graph_walker([id]) |
|
76 | 81 | while True: |
|
77 | id = walker.next() | |
|
78 | if not id: | |
|
82 | id_ = walker.next() | |
|
83 | if not id_: | |
|
79 | 84 | break |
|
80 | if id == self.id: | |
|
85 | if id_ == self.id: | |
|
81 | 86 | return safe_unicode(name) |
|
82 | 87 | raise ChangesetError("This should not happen... Have you manually " |
|
83 | "change id of the changeset?") | |
|
88 | "change id of the changeset?") | |
|
84 | 89 | |
|
85 | 90 | def _fix_path(self, path): |
|
86 | 91 | """ |
@@ -92,6 +97,7 b' class GitChangeset(BaseChangeset):' | |||
|
92 | 97 | return path |
|
93 | 98 | |
|
94 | 99 | def _get_id_for_path(self, path): |
|
100 | ||
|
95 | 101 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; |
|
96 | 102 | if not path in self._paths: |
|
97 | 103 | path = path.strip('/') |
@@ -103,24 +109,23 b' class GitChangeset(BaseChangeset):' | |||
|
103 | 109 | splitted = path.split('/') |
|
104 | 110 | dirs, name = splitted[:-1], splitted[-1] |
|
105 | 111 | curdir = '' |
|
112 | ||
|
113 | # initially extract things from root dir | |
|
114 | for item, stat, id in tree.iteritems(): | |
|
115 | if curdir: | |
|
116 | name = '/'.join((curdir, item)) | |
|
117 | else: | |
|
118 | name = item | |
|
119 | self._paths[name] = id | |
|
120 | self._stat_modes[name] = stat | |
|
121 | ||
|
106 | 122 | for dir in dirs: |
|
107 | 123 | if curdir: |
|
108 | 124 | curdir = '/'.join((curdir, dir)) |
|
109 | 125 | else: |
|
110 | 126 | curdir = dir |
|
111 | #if curdir in self._paths: | |
|
112 | ## This path have been already traversed | |
|
113 | ## Update tree and continue | |
|
114 | #tree = self.repository._repo[self._paths[curdir]] | |
|
115 | #continue | |
|
116 | 127 | dir_id = None |
|
117 | 128 | for item, stat, id in tree.iteritems(): |
|
118 | if curdir: | |
|
119 | item_path = '/'.join((curdir, item)) | |
|
120 | else: | |
|
121 | item_path = item | |
|
122 | self._paths[item_path] = id | |
|
123 | self._stat_modes[item_path] = stat | |
|
124 | 129 | if dir == item: |
|
125 | 130 | dir_id = id |
|
126 | 131 | if dir_id: |
@@ -130,13 +135,16 b' class GitChangeset(BaseChangeset):' | |||
|
130 | 135 | raise ChangesetError('%s is not a directory' % curdir) |
|
131 | 136 | else: |
|
132 | 137 | raise ChangesetError('%s have not been found' % curdir) |
|
133 | for item, stat, id in tree.iteritems(): | |
|
134 | if curdir: | |
|
135 | name = '/'.join((curdir, item)) | |
|
136 |
|
|
|
137 | name = item | |
|
138 | self._paths[name] = id | |
|
139 |
|
|
|
138 | ||
|
139 | # cache all items from the given traversed tree | |
|
140 | for item, stat, id in tree.iteritems(): | |
|
141 | if curdir: | |
|
142 | name = '/'.join((curdir, item)) | |
|
143 | else: | |
|
144 | name = item | |
|
145 | self._paths[name] = id | |
|
146 | self._stat_modes[name] = stat | |
|
147 | ||
|
140 | 148 | if not path in self._paths: |
|
141 | 149 | raise NodeDoesNotExistError("There is no file nor directory " |
|
142 | 150 | "at the given path %r at revision %r" |
@@ -85,14 +85,14 b' class BaseModel(object):' | |||
|
85 | 85 | |
|
86 | 86 | if isinstance(instance, cls): |
|
87 | 87 | return instance |
|
88 | elif isinstance(instance, int) or str(instance).isdigit(): | |
|
88 | elif isinstance(instance, (int, long)) or str(instance).isdigit(): | |
|
89 | 89 | return cls.get(instance) |
|
90 | 90 | else: |
|
91 | 91 | if instance: |
|
92 | 92 | if callback is None: |
|
93 | 93 | raise Exception( |
|
94 |
'given object must be int or Instance of %s |
|
|
95 | 'no callback provided' % (cls, type(instance)) | |
|
94 | 'given object must be int, long or Instance of %s ' | |
|
95 | 'got %s, no callback provided' % (cls, type(instance)) | |
|
96 | 96 | ) |
|
97 | 97 | else: |
|
98 | 98 | return callback(instance) |
@@ -29,7 +29,7 b' import traceback' | |||
|
29 | 29 | from pylons.i18n.translation import _ |
|
30 | 30 | from sqlalchemy.util.compat import defaultdict |
|
31 | 31 | |
|
32 | from rhodecode.lib import extract_mentioned_users | |
|
32 | from rhodecode.lib.utils2 import extract_mentioned_users | |
|
33 | 33 | from rhodecode.lib import helpers as h |
|
34 | 34 | from rhodecode.model import BaseModel |
|
35 | 35 | from rhodecode.model.db import ChangesetComment, User, Repository, Notification |
@@ -63,6 +63,7 b' class ChangesetCommentsModel(BaseModel):' | |||
|
63 | 63 | :param f_path: |
|
64 | 64 | :param line_no: |
|
65 | 65 | """ |
|
66 | ||
|
66 | 67 | if text: |
|
67 | 68 | repo = Repository.get(repo_id) |
|
68 | 69 | cs = repo.scm_instance.get_changeset(revision) |
@@ -78,7 +79,6 b' class ChangesetCommentsModel(BaseModel):' | |||
|
78 | 79 | |
|
79 | 80 | self.sa.add(comment) |
|
80 | 81 | self.sa.flush() |
|
81 | ||
|
82 | 82 | # make notification |
|
83 | 83 | line = '' |
|
84 | 84 | if line_no: |
@@ -39,7 +39,8 b' from rhodecode.lib.vcs.utils.helpers imp' | |||
|
39 | 39 | from rhodecode.lib.vcs.exceptions import VCSError |
|
40 | 40 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
41 | 41 | |
|
42 |
from rhodecode.lib import str2bool, safe_str, get_changeset_safe, |
|
|
42 | from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ | |
|
43 | safe_unicode | |
|
43 | 44 | from rhodecode.lib.compat import json |
|
44 | 45 | from rhodecode.lib.caching_query import FromCache |
|
45 | 46 | |
@@ -145,12 +146,18 b' class BaseModel(object):' | |||
|
145 | 146 | obj = cls.query().get(id_) |
|
146 | 147 | Session.delete(obj) |
|
147 | 148 | |
|
149 | def __repr__(self): | |
|
150 | if hasattr(self, '__unicode__'): | |
|
151 | # python repr needs to return str | |
|
152 | return safe_str(self.__unicode__()) | |
|
153 | return '<DB:%s>' % (self.__class__.__name__) | |
|
148 | 154 | |
|
149 | 155 | class RhodeCodeSetting(Base, BaseModel): |
|
150 | 156 | __tablename__ = 'rhodecode_settings' |
|
151 | 157 | __table_args__ = ( |
|
152 | 158 | UniqueConstraint('app_settings_name'), |
|
153 |
{'extend_existing': True |
|
|
159 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
160 | 'mysql_charset': 'utf8'} | |
|
154 | 161 | ) |
|
155 | 162 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
156 | 163 | app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
@@ -181,8 +188,8 b' class RhodeCodeSetting(Base, BaseModel):' | |||
|
181 | 188 | """ |
|
182 | 189 | self._app_settings_value = safe_unicode(val) |
|
183 | 190 | |
|
184 |
def __ |
|
|
185 | return "<%s('%s:%s')>" % ( | |
|
191 | def __unicode__(self): | |
|
192 | return u"<%s('%s:%s')>" % ( | |
|
186 | 193 | self.__class__.__name__, |
|
187 | 194 | self.app_settings_name, self.app_settings_value |
|
188 | 195 | ) |
@@ -224,7 +231,8 b' class RhodeCodeUi(Base, BaseModel):' | |||
|
224 | 231 | __tablename__ = 'rhodecode_ui' |
|
225 | 232 | __table_args__ = ( |
|
226 | 233 | UniqueConstraint('ui_key'), |
|
227 |
{'extend_existing': True |
|
|
234 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
235 | 'mysql_charset': 'utf8'} | |
|
228 | 236 | ) |
|
229 | 237 | |
|
230 | 238 | HOOK_UPDATE = 'changegroup.update' |
@@ -274,7 +282,8 b' class User(Base, BaseModel):' | |||
|
274 | 282 | __tablename__ = 'users' |
|
275 | 283 | __table_args__ = ( |
|
276 | 284 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
277 |
{'extend_existing': True |
|
|
285 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
286 | 'mysql_charset': 'utf8'} | |
|
278 | 287 | ) |
|
279 | 288 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
280 | 289 | username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
@@ -294,10 +303,15 b' class User(Base, BaseModel):' | |||
|
294 | 303 | repositories = relationship('Repository') |
|
295 | 304 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
296 | 305 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
306 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') | |
|
297 | 307 | |
|
298 | 308 | group_member = relationship('UsersGroupMember', cascade='all') |
|
299 | 309 | |
|
300 | notifications = relationship('UserNotification',) | |
|
310 | notifications = relationship('UserNotification', cascade='all') | |
|
311 | # notifications assigned to this user | |
|
312 | user_created_notifications = relationship('Notification', cascade='all') | |
|
313 | # comments created by this user | |
|
314 | user_comments = relationship('ChangesetComment', cascade='all') | |
|
301 | 315 | |
|
302 | 316 | @hybrid_property |
|
303 | 317 | def email(self): |
@@ -328,8 +342,8 b' class User(Base, BaseModel):' | |||
|
328 | 342 | def is_admin(self): |
|
329 | 343 | return self.admin |
|
330 | 344 | |
|
331 |
def __ |
|
|
332 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, | |
|
345 | def __unicode__(self): | |
|
346 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
|
333 | 347 | self.user_id, self.username) |
|
334 | 348 | |
|
335 | 349 | @classmethod |
@@ -376,6 +390,9 b' class User(Base, BaseModel):' | |||
|
376 | 390 | |
|
377 | 391 | def __json__(self): |
|
378 | 392 | return dict( |
|
393 | user_id=self.user_id, | |
|
394 | first_name=self.name, | |
|
395 | last_name=self.lastname, | |
|
379 | 396 | email=self.email, |
|
380 | 397 | full_name=self.full_name, |
|
381 | 398 | full_name_or_username=self.full_name_or_username, |
@@ -386,7 +403,10 b' class User(Base, BaseModel):' | |||
|
386 | 403 | |
|
387 | 404 | class UserLog(Base, BaseModel): |
|
388 | 405 | __tablename__ = 'user_logs' |
|
389 |
__table_args__ = |
|
|
406 | __table_args__ = ( | |
|
407 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
408 | 'mysql_charset': 'utf8'}, | |
|
409 | ) | |
|
390 | 410 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
391 | 411 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
392 | 412 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
@@ -405,7 +425,10 b' class UserLog(Base, BaseModel):' | |||
|
405 | 425 | |
|
406 | 426 | class UsersGroup(Base, BaseModel): |
|
407 | 427 | __tablename__ = 'users_groups' |
|
408 |
__table_args__ = |
|
|
428 | __table_args__ = ( | |
|
429 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
430 | 'mysql_charset': 'utf8'}, | |
|
431 | ) | |
|
409 | 432 | |
|
410 | 433 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
411 | 434 | users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
@@ -413,9 +436,10 b' class UsersGroup(Base, BaseModel):' | |||
|
413 | 436 | |
|
414 | 437 | members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
415 | 438 | users_group_to_perm = relationship('UsersGroupToPerm', cascade='all') |
|
439 | users_group_repo_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') | |
|
416 | 440 | |
|
417 |
def __ |
|
|
418 | return '<userGroup(%s)>' % (self.users_group_name) | |
|
441 | def __unicode__(self): | |
|
442 | return u'<userGroup(%s)>' % (self.users_group_name) | |
|
419 | 443 | |
|
420 | 444 | @classmethod |
|
421 | 445 | def get_by_group_name(cls, group_name, cache=False, |
@@ -443,7 +467,10 b' class UsersGroup(Base, BaseModel):' | |||
|
443 | 467 | |
|
444 | 468 | class UsersGroupMember(Base, BaseModel): |
|
445 | 469 | __tablename__ = 'users_groups_members' |
|
446 |
__table_args__ = |
|
|
470 | __table_args__ = ( | |
|
471 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
472 | 'mysql_charset': 'utf8'}, | |
|
473 | ) | |
|
447 | 474 | |
|
448 | 475 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
449 | 476 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
@@ -461,7 +488,8 b' class Repository(Base, BaseModel):' | |||
|
461 | 488 | __tablename__ = 'repositories' |
|
462 | 489 | __table_args__ = ( |
|
463 | 490 | UniqueConstraint('repo_name'), |
|
464 |
{'extend_existing': True |
|
|
491 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
492 | 'mysql_charset': 'utf8'}, | |
|
465 | 493 | ) |
|
466 | 494 | |
|
467 | 495 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
@@ -489,9 +517,9 b' class Repository(Base, BaseModel):' | |||
|
489 | 517 | |
|
490 | 518 | logs = relationship('UserLog') |
|
491 | 519 | |
|
492 |
def __ |
|
|
493 | return "<%s('%s:%s')>" % (self.__class__.__name__, | |
|
494 |
|
|
|
520 | def __unicode__(self): | |
|
521 | return u"<%s('%s:%s')>" % (self.__class__.__name__,self.repo_id, | |
|
522 | self.repo_name) | |
|
495 | 523 | |
|
496 | 524 | @classmethod |
|
497 | 525 | def url_sep(cls): |
@@ -710,7 +738,8 b' class RepoGroup(Base, BaseModel):' | |||
|
710 | 738 | __table_args__ = ( |
|
711 | 739 | UniqueConstraint('group_name', 'group_parent_id'), |
|
712 | 740 | CheckConstraint('group_id != group_parent_id'), |
|
713 |
{'extend_existing': True |
|
|
741 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
742 | 'mysql_charset': 'utf8'}, | |
|
714 | 743 | ) |
|
715 | 744 | __mapper_args__ = {'order_by': 'group_name'} |
|
716 | 745 | |
@@ -728,8 +757,8 b' class RepoGroup(Base, BaseModel):' | |||
|
728 | 757 | self.group_name = group_name |
|
729 | 758 | self.parent_group = parent_group |
|
730 | 759 | |
|
731 |
def __ |
|
|
732 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, | |
|
760 | def __unicode__(self): | |
|
761 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, | |
|
733 | 762 | self.group_name) |
|
734 | 763 | |
|
735 | 764 | @classmethod |
@@ -837,13 +866,16 b' class RepoGroup(Base, BaseModel):' | |||
|
837 | 866 | |
|
838 | 867 | class Permission(Base, BaseModel): |
|
839 | 868 | __tablename__ = 'permissions' |
|
840 |
__table_args__ = |
|
|
869 | __table_args__ = ( | |
|
870 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
871 | 'mysql_charset': 'utf8'}, | |
|
872 | ) | |
|
841 | 873 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
842 | 874 | permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
843 | 875 | permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
844 | 876 | |
|
845 |
def __ |
|
|
846 | return "<%s('%s:%s')>" % ( | |
|
877 | def __unicode__(self): | |
|
878 | return u"<%s('%s:%s')>" % ( | |
|
847 | 879 | self.__class__.__name__, self.permission_id, self.permission_name |
|
848 | 880 | ) |
|
849 | 881 | |
@@ -874,7 +906,8 b' class UserRepoToPerm(Base, BaseModel):' | |||
|
874 | 906 | __tablename__ = 'repo_to_perm' |
|
875 | 907 | __table_args__ = ( |
|
876 | 908 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
877 |
{'extend_existing': True |
|
|
909 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
910 | 'mysql_charset': 'utf8'} | |
|
878 | 911 | ) |
|
879 | 912 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
880 | 913 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
@@ -894,15 +927,16 b' class UserRepoToPerm(Base, BaseModel):' | |||
|
894 | 927 | Session.add(n) |
|
895 | 928 | return n |
|
896 | 929 | |
|
897 |
def __ |
|
|
898 | return '<user:%s => %s >' % (self.user, self.repository) | |
|
930 | def __unicode__(self): | |
|
931 | return u'<user:%s => %s >' % (self.user, self.repository) | |
|
899 | 932 | |
|
900 | 933 | |
|
901 | 934 | class UserToPerm(Base, BaseModel): |
|
902 | 935 | __tablename__ = 'user_to_perm' |
|
903 | 936 | __table_args__ = ( |
|
904 | 937 | UniqueConstraint('user_id', 'permission_id'), |
|
905 |
{'extend_existing': True |
|
|
938 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
939 | 'mysql_charset': 'utf8'} | |
|
906 | 940 | ) |
|
907 | 941 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
908 | 942 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
@@ -916,7 +950,8 b' class UsersGroupRepoToPerm(Base, BaseMod' | |||
|
916 | 950 | __tablename__ = 'users_group_repo_to_perm' |
|
917 | 951 | __table_args__ = ( |
|
918 | 952 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
919 |
{'extend_existing': True |
|
|
953 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
954 | 'mysql_charset': 'utf8'} | |
|
920 | 955 | ) |
|
921 | 956 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
922 | 957 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
@@ -936,15 +971,16 b' class UsersGroupRepoToPerm(Base, BaseMod' | |||
|
936 | 971 | Session.add(n) |
|
937 | 972 | return n |
|
938 | 973 | |
|
939 |
def __ |
|
|
940 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) | |
|
974 | def __unicode__(self): | |
|
975 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) | |
|
941 | 976 | |
|
942 | 977 | |
|
943 | 978 | class UsersGroupToPerm(Base, BaseModel): |
|
944 | 979 | __tablename__ = 'users_group_to_perm' |
|
945 | 980 | __table_args__ = ( |
|
946 | 981 | UniqueConstraint('users_group_id', 'permission_id',), |
|
947 |
{'extend_existing': True |
|
|
982 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
983 | 'mysql_charset': 'utf8'} | |
|
948 | 984 | ) |
|
949 | 985 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
950 | 986 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
@@ -958,7 +994,8 b' class UserRepoGroupToPerm(Base, BaseMode' | |||
|
958 | 994 | __tablename__ = 'user_repo_group_to_perm' |
|
959 | 995 | __table_args__ = ( |
|
960 | 996 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
961 |
{'extend_existing': True |
|
|
997 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
998 | 'mysql_charset': 'utf8'} | |
|
962 | 999 | ) |
|
963 | 1000 | |
|
964 | 1001 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
@@ -975,7 +1012,8 b' class UsersGroupRepoGroupToPerm(Base, Ba' | |||
|
975 | 1012 | __tablename__ = 'users_group_repo_group_to_perm' |
|
976 | 1013 | __table_args__ = ( |
|
977 | 1014 | UniqueConstraint('users_group_id', 'group_id'), |
|
978 |
{'extend_existing': True |
|
|
1015 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1016 | 'mysql_charset': 'utf8'} | |
|
979 | 1017 | ) |
|
980 | 1018 | |
|
981 | 1019 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
@@ -990,7 +1028,11 b' class UsersGroupRepoGroupToPerm(Base, Ba' | |||
|
990 | 1028 | |
|
991 | 1029 | class Statistics(Base, BaseModel): |
|
992 | 1030 | __tablename__ = 'statistics' |
|
993 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing': True}) | |
|
1031 | __table_args__ = ( | |
|
1032 | UniqueConstraint('repository_id'), | |
|
1033 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1034 | 'mysql_charset': 'utf8'} | |
|
1035 | ) | |
|
994 | 1036 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
995 | 1037 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
996 | 1038 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
@@ -1006,7 +1048,8 b' class UserFollowing(Base, BaseModel):' | |||
|
1006 | 1048 | __table_args__ = ( |
|
1007 | 1049 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
1008 | 1050 | UniqueConstraint('user_id', 'follows_user_id'), |
|
1009 |
{'extend_existing': True |
|
|
1051 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1052 | 'mysql_charset': 'utf8'} | |
|
1010 | 1053 | ) |
|
1011 | 1054 | |
|
1012 | 1055 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
@@ -1027,7 +1070,11 b' class UserFollowing(Base, BaseModel):' | |||
|
1027 | 1070 | |
|
1028 | 1071 | class CacheInvalidation(Base, BaseModel): |
|
1029 | 1072 | __tablename__ = 'cache_invalidation' |
|
1030 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing': True}) | |
|
1073 | __table_args__ = ( | |
|
1074 | UniqueConstraint('cache_key'), | |
|
1075 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1076 | 'mysql_charset': 'utf8'}, | |
|
1077 | ) | |
|
1031 | 1078 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1032 | 1079 | cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1033 | 1080 | cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
@@ -1038,14 +1085,17 b' class CacheInvalidation(Base, BaseModel)' | |||
|
1038 | 1085 | self.cache_args = cache_args |
|
1039 | 1086 | self.cache_active = False |
|
1040 | 1087 | |
|
1041 |
def __ |
|
|
1042 | return "<%s('%s:%s')>" % (self.__class__.__name__, | |
|
1088 | def __unicode__(self): | |
|
1089 | return u"<%s('%s:%s')>" % (self.__class__.__name__, | |
|
1043 | 1090 | self.cache_id, self.cache_key) |
|
1091 | @classmethod | |
|
1092 | def clear_cache(cls): | |
|
1093 | cls.query().delete() | |
|
1044 | 1094 | |
|
1045 | 1095 | @classmethod |
|
1046 | 1096 | def _get_key(cls, key): |
|
1047 | 1097 | """ |
|
1048 | Wrapper for generating a key | |
|
1098 | Wrapper for generating a key, together with a prefix | |
|
1049 | 1099 | |
|
1050 | 1100 | :param key: |
|
1051 | 1101 | """ |
@@ -1054,13 +1104,26 b' class CacheInvalidation(Base, BaseModel)' | |||
|
1054 | 1104 | iid = rhodecode.CONFIG.get('instance_id') |
|
1055 | 1105 | if iid: |
|
1056 | 1106 | prefix = iid |
|
1057 | return "%s%s" % (prefix, key) | |
|
1107 | return "%s%s" % (prefix, key), prefix, key.rstrip('_README') | |
|
1058 | 1108 | |
|
1059 | 1109 | @classmethod |
|
1060 | 1110 | def get_by_key(cls, key): |
|
1061 | 1111 | return cls.query().filter(cls.cache_key == key).scalar() |
|
1062 | 1112 | |
|
1063 | 1113 | @classmethod |
|
1114 | def _get_or_create_key(cls, key, prefix, org_key): | |
|
1115 | inv_obj = Session.query(cls).filter(cls.cache_key == key).scalar() | |
|
1116 | if not inv_obj: | |
|
1117 | try: | |
|
1118 | inv_obj = CacheInvalidation(key, org_key) | |
|
1119 | Session.add(inv_obj) | |
|
1120 | Session.commit() | |
|
1121 | except Exception: | |
|
1122 | log.error(traceback.format_exc()) | |
|
1123 | Session.rollback() | |
|
1124 | return inv_obj | |
|
1125 | ||
|
1126 | @classmethod | |
|
1064 | 1127 | def invalidate(cls, key): |
|
1065 | 1128 | """ |
|
1066 | 1129 | Returns Invalidation object if this given key should be invalidated |
@@ -1069,10 +1132,12 b' class CacheInvalidation(Base, BaseModel)' | |||
|
1069 | 1132 | |
|
1070 | 1133 | :param key: |
|
1071 | 1134 | """ |
|
1072 | return cls.query()\ | |
|
1073 | .filter(CacheInvalidation.cache_key == key)\ | |
|
1074 | .filter(CacheInvalidation.cache_active == False)\ | |
|
1075 | .scalar() | |
|
1135 | ||
|
1136 | key, _prefix, _org_key = cls._get_key(key) | |
|
1137 | inv = cls._get_or_create_key(key, _prefix, _org_key) | |
|
1138 | ||
|
1139 | if inv and inv.cache_active is False: | |
|
1140 | return inv | |
|
1076 | 1141 | |
|
1077 | 1142 | @classmethod |
|
1078 | 1143 | def set_invalidate(cls, key): |
@@ -1082,17 +1147,16 b' class CacheInvalidation(Base, BaseModel)' | |||
|
1082 | 1147 | :param key: |
|
1083 | 1148 | """ |
|
1084 | 1149 | |
|
1085 | log.debug('marking %s for invalidation' % key) | |
|
1086 |
inv_obj = Session.query(cls) |
|
|
1087 | .filter(cls.cache_key == key).scalar() | |
|
1088 | if inv_obj: | |
|
1089 | inv_obj.cache_active = False | |
|
1090 | else: | |
|
1091 | log.debug('cache key not found in invalidation db -> creating one') | |
|
1092 | inv_obj = CacheInvalidation(key) | |
|
1150 | key, _prefix, _org_key = cls._get_key(key) | |
|
1151 | inv_objs = Session.query(cls).filter(cls.cache_args == _org_key).all() | |
|
1152 | log.debug('marking %s key[s] %s for invalidation' % (len(inv_objs), | |
|
1153 | _org_key)) | |
|
1154 | try: | |
|
1155 | for inv_obj in inv_objs: | |
|
1156 | if inv_obj: | |
|
1157 | inv_obj.cache_active = False | |
|
1093 | 1158 | |
|
1094 | try: | |
|
1095 | Session.add(inv_obj) | |
|
1159 | Session.add(inv_obj) | |
|
1096 | 1160 | Session.commit() |
|
1097 | 1161 | except Exception: |
|
1098 | 1162 | log.error(traceback.format_exc()) |
@@ -1113,7 +1177,10 b' class CacheInvalidation(Base, BaseModel)' | |||
|
1113 | 1177 | |
|
1114 | 1178 | class ChangesetComment(Base, BaseModel): |
|
1115 | 1179 | __tablename__ = 'changeset_comments' |
|
1116 |
__table_args__ = ( |
|
|
1180 | __table_args__ = ( | |
|
1181 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1182 | 'mysql_charset': 'utf8'}, | |
|
1183 | ) | |
|
1117 | 1184 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
1118 | 1185 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1119 | 1186 | revision = Column('revision', String(40), nullable=False) |
@@ -1142,7 +1209,10 b' class ChangesetComment(Base, BaseModel):' | |||
|
1142 | 1209 | |
|
1143 | 1210 | class Notification(Base, BaseModel): |
|
1144 | 1211 | __tablename__ = 'notifications' |
|
1145 |
__table_args__ = ( |
|
|
1212 | __table_args__ = ( | |
|
1213 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1214 | 'mysql_charset': 'utf8'}, | |
|
1215 | ) | |
|
1146 | 1216 | |
|
1147 | 1217 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1148 | 1218 | TYPE_MESSAGE = u'message' |
@@ -1194,7 +1264,8 b' class UserNotification(Base, BaseModel):' | |||
|
1194 | 1264 | __tablename__ = 'user_to_notification' |
|
1195 | 1265 | __table_args__ = ( |
|
1196 | 1266 | UniqueConstraint('user_id', 'notification_id'), |
|
1197 |
{'extend_existing': True |
|
|
1267 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1268 | 'mysql_charset': 'utf8'} | |
|
1198 | 1269 | ) |
|
1199 | 1270 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1200 | 1271 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
@@ -1212,7 +1283,10 b' class UserNotification(Base, BaseModel):' | |||
|
1212 | 1283 | |
|
1213 | 1284 | class DbMigrateVersion(Base, BaseModel): |
|
1214 | 1285 | __tablename__ = 'db_migrate_version' |
|
1215 |
__table_args__ = |
|
|
1286 | __table_args__ = ( | |
|
1287 | {'extend_existing': True, 'mysql_engine':'InnoDB', | |
|
1288 | 'mysql_charset': 'utf8'}, | |
|
1289 | ) | |
|
1216 | 1290 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1217 | 1291 | repository_path = Column('repository_path', Text) |
|
1218 | 1292 | version = Column('version', Integer) |
@@ -32,6 +32,7 b' import datetime' | |||
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | |
|
34 | 34 | import rhodecode |
|
35 | from rhodecode.config.conf import DATETIME_FORMAT | |
|
35 | 36 | from rhodecode.lib import helpers as h |
|
36 | 37 | from rhodecode.model import BaseModel |
|
37 | 38 | from rhodecode.model.db import Notification, User, UserNotification |
@@ -47,11 +48,11 b' class NotificationModel(BaseModel):' | |||
|
47 | 48 | def __get_notification(self, notification): |
|
48 | 49 | if isinstance(notification, Notification): |
|
49 | 50 | return notification |
|
50 | elif isinstance(notification, int): | |
|
51 | elif isinstance(notification, (int, long)): | |
|
51 | 52 | return Notification.get(notification) |
|
52 | 53 | else: |
|
53 | 54 | if notification: |
|
54 | raise Exception('notification must be int or Instance' | |
|
55 | raise Exception('notification must be int, long or Instance' | |
|
55 | 56 | ' of Notification got %s' % type(notification)) |
|
56 | 57 | |
|
57 | 58 | def create(self, created_by, subject, body, recipients=None, |
@@ -111,6 +112,7 b' class NotificationModel(BaseModel):' | |||
|
111 | 112 | kwargs.update(email_kwargs) |
|
112 | 113 | email_body_html = EmailNotificationModel()\ |
|
113 | 114 | .get_email_tmpl(type_, **kwargs) |
|
115 | ||
|
114 | 116 | run_task(tasks.send_email, rec.email, email_subject, email_body, |
|
115 | 117 | email_body_html) |
|
116 | 118 | |
@@ -176,14 +178,13 b' class NotificationModel(BaseModel):' | |||
|
176 | 178 | notification.TYPE_REGISTRATION: _('registered in RhodeCode') |
|
177 | 179 | } |
|
178 | 180 | |
|
179 | DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" | |
|
180 | ||
|
181 | 181 | tmpl = "%(user)s %(action)s %(when)s" |
|
182 | 182 | if show_age: |
|
183 | 183 | when = h.age(notification.created_on) |
|
184 | 184 | else: |
|
185 | 185 | DTF = lambda d: datetime.datetime.strftime(d, DATETIME_FORMAT) |
|
186 | 186 | when = DTF(notification.created_on) |
|
187 | ||
|
187 | 188 | data = dict( |
|
188 | 189 | user=notification.created_by_user.username, |
|
189 | 190 | action=_map[notification.type_], when=when, |
@@ -29,15 +29,15 b' import traceback' | |||
|
29 | 29 | from datetime import datetime |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.vcs.backends import get_backend |
|
32 | ||
|
33 | from rhodecode.lib import LazyProperty | |
|
34 | from rhodecode.lib import safe_str, safe_unicode | |
|
32 | from rhodecode.lib.compat import json | |
|
33 | from rhodecode.lib.utils2 import LazyProperty, safe_str, safe_unicode | |
|
35 | 34 | from rhodecode.lib.caching_query import FromCache |
|
36 | 35 | from rhodecode.lib.hooks import log_create_repository |
|
37 | 36 | |
|
38 | 37 | from rhodecode.model import BaseModel |
|
39 | 38 | from rhodecode.model.db import Repository, UserRepoToPerm, User, Permission, \ |
|
40 | 39 | Statistics, UsersGroup, UsersGroupRepoToPerm, RhodeCodeUi, RepoGroup |
|
40 | from rhodecode.lib import helpers as h | |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | log = logging.getLogger(__name__) |
@@ -95,25 +95,28 b' class RepoModel(BaseModel):' | |||
|
95 | 95 | return repo.scalar() |
|
96 | 96 | |
|
97 | 97 | def get_users_js(self): |
|
98 | ||
|
99 | 98 | users = self.sa.query(User).filter(User.active == True).all() |
|
100 | u_tmpl = '''{id:%s, fname:"%s", lname:"%s", nname:"%s"},''' | |
|
101 | users_array = '[%s]' % '\n'.join([u_tmpl % (u.user_id, u.name, | |
|
102 | u.lastname, u.username) | |
|
103 | for u in users]) | |
|
104 | return users_array | |
|
99 | return json.dumps([ | |
|
100 | { | |
|
101 | 'id': u.user_id, | |
|
102 | 'fname': u.name, | |
|
103 | 'lname': u.lastname, | |
|
104 | 'nname': u.username, | |
|
105 | 'gravatar_lnk': h.gravatar_url(u.email, 14) | |
|
106 | } for u in users] | |
|
107 | ) | |
|
105 | 108 | |
|
106 | 109 | def get_users_groups_js(self): |
|
107 | 110 | users_groups = self.sa.query(UsersGroup)\ |
|
108 | 111 | .filter(UsersGroup.users_group_active == True).all() |
|
109 | 112 | |
|
110 | g_tmpl = '''{id:%s, grname:"%s",grmembers:"%s"},''' | |
|
111 | ||
|
112 | users_groups_array = '[%s]' % '\n'.join([g_tmpl % \ | |
|
113 |
|
|
|
114 |
|
|
|
115 |
|
|
|
116 | return users_groups_array | |
|
113 | return json.dumps([ | |
|
114 | { | |
|
115 | 'id': gr.users_group_id, | |
|
116 | 'grname': gr.users_group_name, | |
|
117 | 'grmembers': len(gr.members), | |
|
118 | } for gr in users_groups] | |
|
119 | ) | |
|
117 | 120 | |
|
118 | 121 | def _get_defaults(self, repo_name): |
|
119 | 122 | """ |
@@ -346,6 +349,7 b' class RepoModel(BaseModel):' | |||
|
346 | 349 | :param repo: Instance of Repository, repository_id, or repository name |
|
347 | 350 | :param user: Instance of User, user_id or username |
|
348 | 351 | """ |
|
352 | ||
|
349 | 353 | user = self.__get_user(user) |
|
350 | 354 | repo = self.__get_repo(repo) |
|
351 | 355 |
@@ -28,7 +28,7 b' import logging' | |||
|
28 | 28 | import traceback |
|
29 | 29 | import shutil |
|
30 | 30 | |
|
31 | from rhodecode.lib import LazyProperty | |
|
31 | from rhodecode.lib.utils2 import LazyProperty | |
|
32 | 32 | |
|
33 | 33 | from rhodecode.model import BaseModel |
|
34 | 34 | from rhodecode.model.db import RepoGroup, RhodeCodeUi, UserRepoGroupToPerm, \ |
@@ -35,7 +35,7 b' from rhodecode.lib.vcs.nodes import File' | |||
|
35 | 35 | |
|
36 | 36 | from rhodecode import BACKENDS |
|
37 | 37 | from rhodecode.lib import helpers as h |
|
38 | from rhodecode.lib import safe_str | |
|
38 | from rhodecode.lib.utils2 import safe_str | |
|
39 | 39 | from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny |
|
40 | 40 | from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \ |
|
41 | 41 | action_logger, EmptyChangeset, REMOVED_REPO_PAT |
@@ -235,13 +235,13 b' class ScmModel(BaseModel):' | |||
|
235 | 235 | return group_iter |
|
236 | 236 | |
|
237 | 237 | def mark_for_invalidation(self, repo_name): |
|
238 | """Puts cache invalidation task into db for | |
|
238 | """ | |
|
239 | Puts cache invalidation task into db for | |
|
239 | 240 | further global cache invalidation |
|
240 | 241 | |
|
241 | 242 | :param repo_name: this repo that should invalidation take place |
|
242 | 243 | """ |
|
243 | 244 | CacheInvalidation.set_invalidate(repo_name) |
|
244 | CacheInvalidation.set_invalidate(repo_name + "_README") | |
|
245 | 245 | |
|
246 | 246 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
247 | 247 |
@@ -29,18 +29,19 b' import traceback' | |||
|
29 | 29 | from pylons import url |
|
30 | 30 | from pylons.i18n.translation import _ |
|
31 | 31 | |
|
32 | from rhodecode.lib import safe_unicode | |
|
32 | from rhodecode.lib.utils2 import safe_unicode, generate_api_key | |
|
33 | 33 | from rhodecode.lib.caching_query import FromCache |
|
34 | 34 | |
|
35 | 35 | from rhodecode.model import BaseModel |
|
36 | 36 | from rhodecode.model.db import User, UserRepoToPerm, Repository, Permission, \ |
|
37 | 37 | UserToPerm, UsersGroupRepoToPerm, UsersGroupToPerm, UsersGroupMember, \ |
|
38 | Notification, RepoGroup, UserRepoGroupToPerm, UsersGroup | |
|
38 | Notification, RepoGroup, UserRepoGroupToPerm, UsersGroup,\ | |
|
39 | UsersGroupRepoGroupToPerm | |
|
39 | 40 | from rhodecode.lib.exceptions import DefaultUserException, \ |
|
40 | 41 | UserOwnsReposException |
|
41 | 42 | |
|
42 | 43 | from sqlalchemy.exc import DatabaseError |
|
43 | from rhodecode.lib import generate_api_key | |
|
44 | ||
|
44 | 45 | from sqlalchemy.orm import joinedload |
|
45 | 46 | |
|
46 | 47 | log = logging.getLogger(__name__) |
@@ -298,14 +299,16 b' class UserModel(BaseModel):' | |||
|
298 | 299 | try: |
|
299 | 300 | if user.username == 'default': |
|
300 | 301 | raise DefaultUserException( |
|
301 |
|
|
|
302 |
|
|
|
302 | _(u"You can't remove this user since it's" | |
|
303 | " crucial for entire application") | |
|
304 | ) | |
|
303 | 305 | if user.repositories: |
|
304 | raise UserOwnsReposException(_('This user still owns %s ' | |
|
305 | 'repositories and cannot be ' | |
|
306 | 'removed. Switch owners or ' | |
|
307 |
|
|
|
308 | % user.repositories) | |
|
306 | repos = [x.repo_name for x in user.repositories] | |
|
307 | raise UserOwnsReposException( | |
|
308 | _(u'user "%s" still owns %s repositories and cannot be ' | |
|
309 | 'removed. Switch owners or remove those repositories. %s') | |
|
310 | % (user.username, len(repos), ', '.join(repos)) | |
|
311 | ) | |
|
309 | 312 | self.sa.delete(user) |
|
310 | 313 | except: |
|
311 | 314 | log.error(traceback.format_exc()) |
@@ -409,7 +412,7 b' class UserModel(BaseModel):' | |||
|
409 | 412 | for perm in default_global_perms: |
|
410 | 413 | user.permissions[GLOBAL].add(perm.permission.permission_name) |
|
411 | 414 | |
|
412 | # default for repositories | |
|
415 | # defaults for repositories, taken from default user | |
|
413 | 416 | for perm in default_repo_perms: |
|
414 | 417 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
415 | 418 | if perm.Repository.private and not (perm.Repository.user_id == uid): |
@@ -423,17 +426,18 b' class UserModel(BaseModel):' | |||
|
423 | 426 | |
|
424 | 427 | user.permissions[RK][r_k] = p |
|
425 | 428 | |
|
426 | # default for repositories groups | |
|
429 | # defaults for repositories groups taken from default user permission | |
|
430 | # on given group | |
|
427 | 431 | for perm in default_repo_groups_perms: |
|
428 | 432 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
429 | 433 | p = perm.Permission.permission_name |
|
430 | 434 | user.permissions[GK][rg_k] = p |
|
431 | 435 | |
|
432 | 436 | #================================================================== |
|
433 | # overwrite default with user permissions if any | |
|
437 | # overwrite defaults with user permissions if any found | |
|
434 | 438 | #================================================================== |
|
435 | 439 | |
|
436 | # user global | |
|
440 | # user global permissions | |
|
437 | 441 | user_perms = self.sa.query(UserToPerm)\ |
|
438 | 442 | .options(joinedload(UserToPerm.permission))\ |
|
439 | 443 | .filter(UserToPerm.user_id == uid).all() |
@@ -441,7 +445,7 b' class UserModel(BaseModel):' | |||
|
441 | 445 | for perm in user_perms: |
|
442 | 446 | user.permissions[GLOBAL].add(perm.permission.permission_name) |
|
443 | 447 | |
|
444 | # user repositories | |
|
448 | # user explicit permissions for repositories | |
|
445 | 449 | user_repo_perms = \ |
|
446 | 450 | self.sa.query(UserRepoToPerm, Permission, Repository)\ |
|
447 | 451 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
@@ -459,8 +463,8 b' class UserModel(BaseModel):' | |||
|
459 | 463 | user.permissions[RK][r_k] = p |
|
460 | 464 | |
|
461 | 465 | #================================================================== |
|
462 |
# check if user is part of groups for this repository and |
|
|
463 | # (or replace with higher) permissions | |
|
466 | # check if user is part of user groups for this repository and | |
|
467 | # fill in (or replace with higher) permissions | |
|
464 | 468 | #================================================================== |
|
465 | 469 | |
|
466 | 470 | # users group global |
@@ -473,7 +477,7 b' class UserModel(BaseModel):' | |||
|
473 | 477 | for perm in user_perms_from_users_groups: |
|
474 | 478 | user.permissions[GLOBAL].add(perm.permission.permission_name) |
|
475 | 479 | |
|
476 | # users group repositories | |
|
480 | # users group for repositories permissions | |
|
477 | 481 | user_repo_perms_from_users_groups = \ |
|
478 | 482 | self.sa.query(UsersGroupRepoToPerm, Permission, Repository,)\ |
|
479 | 483 | .join((Repository, UsersGroupRepoToPerm.repository_id == Repository.repo_id))\ |
@@ -495,12 +499,12 b' class UserModel(BaseModel):' | |||
|
495 | 499 | # get access for this user for repos group and override defaults |
|
496 | 500 | #================================================================== |
|
497 | 501 | |
|
498 |
# user |
|
|
502 | # user explicit permissions for repository | |
|
499 | 503 | user_repo_groups_perms = \ |
|
500 | 504 | self.sa.query(UserRepoGroupToPerm, Permission, RepoGroup)\ |
|
501 | 505 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
502 | 506 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ |
|
503 | .filter(UserRepoToPerm.user_id == uid)\ | |
|
507 | .filter(UserRepoGroupToPerm.user_id == uid)\ | |
|
504 | 508 | .all() |
|
505 | 509 | |
|
506 | 510 | for perm in user_repo_groups_perms: |
@@ -510,6 +514,30 b' class UserModel(BaseModel):' | |||
|
510 | 514 | if PERM_WEIGHTS[p] > PERM_WEIGHTS[cur_perm]: |
|
511 | 515 | user.permissions[GK][rg_k] = p |
|
512 | 516 | |
|
517 | #================================================================== | |
|
518 | # check if user is part of user groups for this repo group and | |
|
519 | # fill in (or replace with higher) permissions | |
|
520 | #================================================================== | |
|
521 | ||
|
522 | # users group for repositories permissions | |
|
523 | user_repo_group_perms_from_users_groups = \ | |
|
524 | self.sa.query(UsersGroupRepoGroupToPerm, Permission, RepoGroup)\ | |
|
525 | .join((RepoGroup, UsersGroupRepoGroupToPerm.group_id == RepoGroup.group_id))\ | |
|
526 | .join((Permission, UsersGroupRepoGroupToPerm.permission_id == Permission.permission_id))\ | |
|
527 | .join((UsersGroupMember, UsersGroupRepoGroupToPerm.users_group_id == UsersGroupMember.users_group_id))\ | |
|
528 | .filter(UsersGroupMember.user_id == uid)\ | |
|
529 | .all() | |
|
530 | ||
|
531 | for perm in user_repo_group_perms_from_users_groups: | |
|
532 | g_k = perm.UsersGroupRepoGroupToPerm.group.group_name | |
|
533 | print perm, g_k | |
|
534 | p = perm.Permission.permission_name | |
|
535 | cur_perm = user.permissions[GK][g_k] | |
|
536 | # overwrite permission only if it's greater than permission | |
|
537 | # given from other sources | |
|
538 | if PERM_WEIGHTS[p] > PERM_WEIGHTS[cur_perm]: | |
|
539 | user.permissions[GK][g_k] = p | |
|
540 | ||
|
513 | 541 | return user |
|
514 | 542 | |
|
515 | 543 | def has_perm(self, user, perm): |
@@ -185,6 +185,41 b' div.options a {' | |||
|
185 | 185 | border-bottom-right-radius: 8px; |
|
186 | 186 | } |
|
187 | 187 | |
|
188 | .top-left-rounded-corner-mid { | |
|
189 | -webkit-border-top-left-radius: 4px; | |
|
190 | -khtml-border-radius-topleft: 4px; | |
|
191 | -moz-border-radius-topleft: 4px; | |
|
192 | border-top-left-radius: 4px; | |
|
193 | } | |
|
194 | ||
|
195 | .top-right-rounded-corner-mid { | |
|
196 | -webkit-border-top-right-radius: 4px; | |
|
197 | -khtml-border-radius-topright: 4px; | |
|
198 | -moz-border-radius-topright: 4px; | |
|
199 | border-top-right-radius: 4px; | |
|
200 | } | |
|
201 | ||
|
202 | .bottom-left-rounded-corner-mid { | |
|
203 | -webkit-border-bottom-left-radius: 4px; | |
|
204 | -khtml-border-radius-bottomleft: 4px; | |
|
205 | -moz-border-radius-bottomleft: 4px; | |
|
206 | border-bottom-left-radius: 4px; | |
|
207 | } | |
|
208 | ||
|
209 | .bottom-right-rounded-corner-mid { | |
|
210 | -webkit-border-bottom-right-radius: 4px; | |
|
211 | -khtml-border-radius-bottomright: 4px; | |
|
212 | -moz-border-radius-bottomright: 4px; | |
|
213 | border-bottom-right-radius: 4px; | |
|
214 | } | |
|
215 | ||
|
216 | .help-block { | |
|
217 | color: #999999; | |
|
218 | display: block; | |
|
219 | margin-bottom: 0; | |
|
220 | margin-top: 5px; | |
|
221 | } | |
|
222 | ||
|
188 | 223 | #header { |
|
189 | 224 | margin: 0; |
|
190 | 225 | padding: 0 10px; |
@@ -197,18 +232,16 b' div.options a {' | |||
|
197 | 232 | -moz-border-radius: 0px 0px 8px 8px; |
|
198 | 233 | border-radius: 0px 0px 8px 8px; |
|
199 | 234 | height: 37px; |
|
200 |
background-color: # |
|
|
235 | background-color: #003B76; | |
|
201 | 236 | background-repeat: repeat-x; |
|
202 |
background-image: -khtml-gradient(linear, left top, left bottom, from(# |
|
|
203 | to(#eedc94) ); | |
|
237 | background-image: -khtml-gradient(linear, left top, left bottom, from(#003B76), to(#00376E) ); | |
|
204 | 238 | background-image: -moz-linear-gradient(top, #003b76, #00376e); |
|
205 | 239 | background-image: -ms-linear-gradient(top, #003b76, #00376e); |
|
206 | 240 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76), color-stop(100%, #00376e) ); |
|
207 | 241 | background-image: -webkit-linear-gradient(top, #003b76, #00376e); |
|
208 | 242 | background-image: -o-linear-gradient(top, #003b76, #00376e); |
|
209 | 243 | background-image: linear-gradient(top, #003b76, #00376e); |
|
210 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#003b76', | |
|
211 | endColorstr='#00376e', GradientType=0 ); | |
|
244 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#003b76',endColorstr='#00376e', GradientType=0 ); | |
|
212 | 245 | box-shadow: 0 2px 2px rgba(0, 0, 0, 0.6); |
|
213 | 246 | } |
|
214 | 247 | |
@@ -260,9 +293,9 b' div.options a {' | |||
|
260 | 293 | min-height: 44px; |
|
261 | 294 | clear: both; |
|
262 | 295 | position: relative; |
|
263 |
background-color: # |
|
|
296 | background-color: #003B76; | |
|
264 | 297 | background-repeat: repeat-x; |
|
265 |
background-image: -khtml-gradient(linear, left top, left bottom, from(# |
|
|
298 | background-image: -khtml-gradient(linear, left top, left bottom, from(#003B76), to(#00376E) ); | |
|
266 | 299 | background-image: -moz-linear-gradient(top, #003b76, #00376e); |
|
267 | 300 | background-image: -ms-linear-gradient(top, #003b76, #00376e); |
|
268 | 301 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76),color-stop(100%, #00376e) ); |
@@ -289,6 +322,14 b' div.options a {' | |||
|
289 | 322 | -moz-border-radius: 0px 0px 0px 0px; |
|
290 | 323 | border-radius: 0px 0px 0px 0px; |
|
291 | 324 | } |
|
325 | ||
|
326 | .ie7 #header #header-inner.hover, | |
|
327 | .ie8 #header #header-inner.hover, | |
|
328 | .ie9 #header #header-inner.hover | |
|
329 | { | |
|
330 | z-index: auto !important; | |
|
331 | } | |
|
332 | ||
|
292 | 333 | #header #header-inner #home a { |
|
293 | 334 | height: 40px; |
|
294 | 335 | width: 46px; |
@@ -997,9 +1038,9 b' tbody .yui-dt-editable { cursor: pointer' | |||
|
997 | 1038 | #content div.box div.title { |
|
998 | 1039 | clear: both; |
|
999 | 1040 | overflow: hidden; |
|
1000 |
background-color: # |
|
|
1041 | background-color: #003B76; | |
|
1001 | 1042 | background-repeat: repeat-x; |
|
1002 |
background-image: -khtml-gradient(linear, left top, left bottom, from(# |
|
|
1043 | background-image: -khtml-gradient(linear, left top, left bottom, from(#003B76), to(#00376E) ); | |
|
1003 | 1044 | background-image: -moz-linear-gradient(top, #003b76, #00376e); |
|
1004 | 1045 | background-image: -ms-linear-gradient(top, #003b76, #00376e); |
|
1005 | 1046 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76), color-stop(100%, #00376e) ); |
@@ -1758,33 +1799,21 b' div.form div.fields div.field div.button' | |||
|
1758 | 1799 | } |
|
1759 | 1800 | |
|
1760 | 1801 | #footer div#footer-inner { |
|
1761 | background-color: #eedc94; background-repeat : repeat-x; | |
|
1762 | background-image : -khtml-gradient( linear, left top, left bottom, | |
|
1763 | from( #fceec1), to( #eedc94)); background-image : -moz-linear-gradient( | |
|
1764 | top, #003b76, #00376e); background-image : -ms-linear-gradient( top, | |
|
1765 | #003b76, #00376e); background-image : -webkit-gradient( linear, left | |
|
1766 | top, left bottom, color-stop( 0%, #003b76), color-stop( 100%, #00376e)); | |
|
1802 | background-color: #003B76; | |
|
1803 | background-repeat : repeat-x; | |
|
1804 | background-image : -khtml-gradient( linear, left top, left bottom, from(#003B76), to(#00376E)); | |
|
1805 | background-image : -moz-linear-gradient(top, #003b76, #00376e); | |
|
1806 | background-image : -ms-linear-gradient( top, #003b76, #00376e); | |
|
1807 | background-image : -webkit-gradient( linear, left top, left bottom, color-stop( 0%, #003b76), color-stop( 100%, #00376e)); | |
|
1767 | 1808 | background-image : -webkit-linear-gradient( top, #003b76, #00376e)); |
|
1768 | 1809 | background-image : -o-linear-gradient( top, #003b76, #00376e)); |
|
1769 |
background-image : linear-gradient( top, #003b76, #00376e); |
|
|
1770 | progid : DXImageTransform.Microsoft.gradient ( startColorstr = | |
|
1771 | '#003b76', endColorstr = '#00376e', GradientType = 0); | |
|
1810 | background-image : linear-gradient( top, #003b76, #00376e); | |
|
1811 | filter :progid : DXImageTransform.Microsoft.gradient ( startColorstr = '#003b76', endColorstr = '#00376e', GradientType = 0); | |
|
1772 | 1812 | box-shadow: 0 2px 2px rgba(0, 0, 0, 0.6); |
|
1773 | 1813 | -webkit-border-radius: 4px 4px 4px 4px; |
|
1774 | 1814 | -khtml-border-radius: 4px 4px 4px 4px; |
|
1775 | 1815 | -moz-border-radius: 4px 4px 4px 4px; |
|
1776 | 1816 | border-radius: 4px 4px 4px 4px; |
|
1777 | background-repeat: repeat-x; | |
|
1778 | background-image: -khtml-gradient(linear, left top, left bottom, from(#fceec1), | |
|
1779 | to(#eedc94) ); | |
|
1780 | background-image: -moz-linear-gradient(top, #003b76, #00376e); | |
|
1781 | background-image: -ms-linear-gradient(top, #003b76, #00376e); | |
|
1782 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76), color-stop(100%, #00376e) ); | |
|
1783 | background-image: -webkit-linear-gradient(top, #003b76, #00376e); | |
|
1784 | background-image: -o-linear-gradient(top, #003b76, #00376e); | |
|
1785 | background-image: linear-gradient(top, #003b76, #00376e); | |
|
1786 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#003b76', | |
|
1787 | endColorstr='#00376e', GradientType=0 ); | |
|
1788 | 1817 | } |
|
1789 | 1818 | |
|
1790 | 1819 | #footer div#footer-inner p { |
@@ -1808,30 +1837,18 b' div.form div.fields div.field div.button' | |||
|
1808 | 1837 | clear: both; |
|
1809 | 1838 | overflow: hidden; |
|
1810 | 1839 | position: relative; |
|
1811 | background-color: #eedc94; background-repeat : repeat-x; | |
|
1812 | background-image : -khtml-gradient( linear, left top, left bottom, | |
|
1813 | from( #fceec1), to( #eedc94)); background-image : -moz-linear-gradient( | |
|
1814 | top, #003b76, #00376e); background-image : -ms-linear-gradient( top, | |
|
1815 | #003b76, #00376e); background-image : -webkit-gradient( linear, left | |
|
1816 | top, left bottom, color-stop( 0%, #003b76), color-stop( 100%, #00376e)); | |
|
1840 | background-color: #003B76; | |
|
1841 | background-repeat : repeat-x; | |
|
1842 | background-image : -khtml-gradient( linear, left top, left bottom, from(#003B76), to(#00376E)); | |
|
1843 | background-image : -moz-linear-gradient( top, #003b76, #00376e); | |
|
1844 | background-image : -ms-linear-gradient( top, #003b76, #00376e); | |
|
1845 | background-image : -webkit-gradient( linear, left top, left bottom, color-stop( 0%, #003b76), color-stop( 100%, #00376e)); | |
|
1817 | 1846 | background-image : -webkit-linear-gradient( top, #003b76, #00376e)); |
|
1818 | 1847 | background-image : -o-linear-gradient( top, #003b76, #00376e)); |
|
1819 |
background-image : linear-gradient( top, #003b76, #00376e); |
|
|
1820 | progid : DXImageTransform.Microsoft.gradient ( startColorstr = | |
|
1821 | '#003b76', endColorstr = '#00376e', GradientType = 0); | |
|
1848 | background-image : linear-gradient( top, #003b76, #00376e); | |
|
1849 | filter : progid : DXImageTransform.Microsoft.gradient ( startColorstr = '#003b76', endColorstr = '#00376e', GradientType = 0); | |
|
1822 | 1850 | margin: 0 auto; |
|
1823 | 1851 | padding: 0; |
|
1824 | background-repeat: repeat-x; | |
|
1825 | background-image: -khtml-gradient(linear, left top, left bottom, from(#fceec1), | |
|
1826 | to(#eedc94) ); | |
|
1827 | background-image: -moz-linear-gradient(top, #003b76, #00376e); | |
|
1828 | background-image: -ms-linear-gradient(top, #003b76, #00376e); | |
|
1829 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76), color-stop(100%, #00376e) ); | |
|
1830 | background-image: -webkit-linear-gradient(top, #003b76, #00376e); | |
|
1831 | background-image: -o-linear-gradient(top, #003b76, #00376e); | |
|
1832 | background-image: linear-gradient(top, #003b76, #00376e); | |
|
1833 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#003b76', | |
|
1834 | endColorstr='#00376e', GradientType=0 ); | |
|
1835 | 1852 | } |
|
1836 | 1853 | |
|
1837 | 1854 | #login div.inner { |
@@ -1908,16 +1925,14 b' div.form div.fields div.field div.button' | |||
|
1908 | 1925 | width: 278px; |
|
1909 | 1926 | |
|
1910 | 1927 | background-repeat: repeat-x; |
|
1911 |
background-image: -khtml-gradient(linear, left top, left bottom, from(# |
|
|
1912 | to(#eedc94) ); | |
|
1928 | background-image: -khtml-gradient(linear, left top, left bottom, from(#003B76), to(#00376E) ); | |
|
1913 | 1929 | background-image: -moz-linear-gradient(top, #003b76, #00376e); |
|
1914 | 1930 | background-image: -ms-linear-gradient(top, #003b76, #00376e); |
|
1915 | 1931 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76), color-stop(100%, #00376e) ); |
|
1916 | 1932 | background-image: -webkit-linear-gradient(top, #003b76, #00376e); |
|
1917 | 1933 | background-image: -o-linear-gradient(top, #003b76, #00376e); |
|
1918 | 1934 | background-image: linear-gradient(top, #003b76, #00376e); |
|
1919 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#003b76', | |
|
1920 | endColorstr='#00376e', GradientType=0 ); | |
|
1935 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#003b76', endColorstr='#00376e', GradientType=0 ); | |
|
1921 | 1936 | |
|
1922 | 1937 | z-index: 999; |
|
1923 | 1938 | -webkit-border-radius: 0px 0px 4px 4px; |
@@ -2060,10 +2075,9 b' div.form div.fields div.field div.button' | |||
|
2060 | 2075 | clear: both; |
|
2061 | 2076 | overflow: hidden; |
|
2062 | 2077 | position: relative; |
|
2063 |
background-color: # |
|
|
2078 | background-color: #003B76; | |
|
2064 | 2079 | background-repeat: repeat-x; |
|
2065 |
background-image: -khtml-gradient(linear, left top, left bottom, from(# |
|
|
2066 | to(#eedc94) ); | |
|
2080 | background-image: -khtml-gradient(linear, left top, left bottom, from(#003B76), to(#00376E) ); | |
|
2067 | 2081 | background-image: -moz-linear-gradient(top, #003b76, #00376e); |
|
2068 | 2082 | background-image: -ms-linear-gradient(top, #003b76, #00376e); |
|
2069 | 2083 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #003b76), color-stop(100%, #00376e) ); |
@@ -2794,12 +2808,12 b' table.code-browser .browser-dir {' | |||
|
2794 | 2808 | } |
|
2795 | 2809 | |
|
2796 | 2810 | .ac .yui-ac { |
|
2797 |
position: |
|
|
2811 | position: inherit; | |
|
2798 | 2812 | font-size: 100%; |
|
2799 | 2813 | } |
|
2800 | 2814 | |
|
2801 | 2815 | .ac .perm_ac { |
|
2802 |
width: |
|
|
2816 | width: 20em; | |
|
2803 | 2817 | } |
|
2804 | 2818 | |
|
2805 | 2819 | .ac .yui-ac-input { |
@@ -2809,16 +2823,15 b' table.code-browser .browser-dir {' | |||
|
2809 | 2823 | .ac .yui-ac-container { |
|
2810 | 2824 | position: absolute; |
|
2811 | 2825 | top: 1.6em; |
|
2812 |
width: |
|
|
2826 | width: auto; | |
|
2813 | 2827 | } |
|
2814 | 2828 | |
|
2815 | 2829 | .ac .yui-ac-content { |
|
2816 | 2830 | position: absolute; |
|
2817 | width: 100%; | |
|
2818 | 2831 | border: 1px solid gray; |
|
2819 | 2832 | background: #fff; |
|
2820 | overflow: hidden; | |
|
2821 | 2833 | z-index: 9050; |
|
2834 | ||
|
2822 | 2835 | } |
|
2823 | 2836 | |
|
2824 | 2837 | .ac .yui-ac-shadow { |
@@ -2827,7 +2840,7 b' table.code-browser .browser-dir {' | |||
|
2827 | 2840 | background: #000; |
|
2828 | 2841 | -moz-opacity: 0.1px; |
|
2829 | 2842 | opacity: .10; |
|
2830 |
filter: alpha(opacity = |
|
|
2843 | filter: alpha(opacity = 10); | |
|
2831 | 2844 | z-index: 9049; |
|
2832 | 2845 | margin: .3em; |
|
2833 | 2846 | } |
@@ -2836,6 +2849,7 b' table.code-browser .browser-dir {' | |||
|
2836 | 2849 | width: 100%; |
|
2837 | 2850 | margin: 0; |
|
2838 | 2851 | padding: 0; |
|
2852 | z-index: 9050; | |
|
2839 | 2853 | } |
|
2840 | 2854 | |
|
2841 | 2855 | .ac .yui-ac-content li { |
@@ -2843,15 +2857,28 b' table.code-browser .browser-dir {' | |||
|
2843 | 2857 | white-space: nowrap; |
|
2844 | 2858 | margin: 0; |
|
2845 | 2859 | padding: 2px 5px; |
|
2860 | height: 18px; | |
|
2861 | z-index: 9050; | |
|
2862 | display: block; | |
|
2863 | width: auto !important; | |
|
2864 | } | |
|
2865 | ||
|
2866 | .ac .yui-ac-content li .ac-container-wrap{ | |
|
2867 | width: auto; | |
|
2846 | 2868 | } |
|
2847 | 2869 | |
|
2848 | 2870 | .ac .yui-ac-content li.yui-ac-prehighlight { |
|
2849 | 2871 | background: #B3D4FF; |
|
2872 | z-index: 9050; | |
|
2850 | 2873 | } |
|
2851 | 2874 | |
|
2852 | 2875 | .ac .yui-ac-content li.yui-ac-highlight { |
|
2853 | 2876 | background: #556CB5; |
|
2854 | 2877 | color: #FFF; |
|
2878 | z-index: 9050; | |
|
2879 | } | |
|
2880 | .ac .yui-ac-bd{ | |
|
2881 | z-index: 9050; | |
|
2855 | 2882 | } |
|
2856 | 2883 | |
|
2857 | 2884 | .follow { |
@@ -3006,17 +3033,14 b' table.code-browser .browser-dir {' | |||
|
3006 | 3033 | .error_msg { |
|
3007 | 3034 | background-color: #c43c35; |
|
3008 | 3035 | background-repeat: repeat-x; |
|
3009 | background-image: -khtml-gradient(linear, left top, left bottom, from(#ee5f5b), | |
|
3010 | to(#c43c35) ); | |
|
3036 | background-image: -khtml-gradient(linear, left top, left bottom, from(#ee5f5b), to(#c43c35) ); | |
|
3011 | 3037 | background-image: -moz-linear-gradient(top, #ee5f5b, #c43c35); |
|
3012 | 3038 | background-image: -ms-linear-gradient(top, #ee5f5b, #c43c35); |
|
3013 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #ee5f5b), | |
|
3014 | color-stop(100%, #c43c35) ); | |
|
3039 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #ee5f5b), color-stop(100%, #c43c35) ); | |
|
3015 | 3040 | background-image: -webkit-linear-gradient(top, #ee5f5b, #c43c35); |
|
3016 | 3041 | background-image: -o-linear-gradient(top, #ee5f5b, #c43c35); |
|
3017 | 3042 | background-image: linear-gradient(top, #ee5f5b, #c43c35); |
|
3018 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ee5f5b', | |
|
3019 | endColorstr='#c43c35', GradientType=0 ); | |
|
3043 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ee5f5b',endColorstr='#c43c35', GradientType=0 ); | |
|
3020 | 3044 | border-color: #c43c35 #c43c35 #882a25; |
|
3021 | 3045 | } |
|
3022 | 3046 | |
@@ -3024,51 +3048,42 b' table.code-browser .browser-dir {' | |||
|
3024 | 3048 | color: #404040 !important; |
|
3025 | 3049 | background-color: #eedc94; |
|
3026 | 3050 | background-repeat: repeat-x; |
|
3027 | background-image: -khtml-gradient(linear, left top, left bottom, from(#fceec1), | |
|
3028 | to(#eedc94) ); | |
|
3051 | background-image: -khtml-gradient(linear, left top, left bottom, from(#fceec1), to(#eedc94) ); | |
|
3029 | 3052 | background-image: -moz-linear-gradient(top, #fceec1, #eedc94); |
|
3030 | 3053 | background-image: -ms-linear-gradient(top, #fceec1, #eedc94); |
|
3031 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #fceec1), | |
|
3032 | color-stop(100%, #eedc94) ); | |
|
3054 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #fceec1), color-stop(100%, #eedc94) ); | |
|
3033 | 3055 | background-image: -webkit-linear-gradient(top, #fceec1, #eedc94); |
|
3034 | 3056 | background-image: -o-linear-gradient(top, #fceec1, #eedc94); |
|
3035 | 3057 | background-image: linear-gradient(top, #fceec1, #eedc94); |
|
3036 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fceec1', | |
|
3037 | endColorstr='#eedc94', GradientType=0 ); | |
|
3058 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fceec1', endColorstr='#eedc94', GradientType=0 ); | |
|
3038 | 3059 | border-color: #eedc94 #eedc94 #e4c652; |
|
3039 | 3060 | } |
|
3040 | 3061 | |
|
3041 | 3062 | .success_msg { |
|
3042 | 3063 | background-color: #57a957; |
|
3043 | 3064 | background-repeat: repeat-x !important; |
|
3044 | background-image: -khtml-gradient(linear, left top, left bottom, from(#62c462), | |
|
3045 | to(#57a957) ); | |
|
3065 | background-image: -khtml-gradient(linear, left top, left bottom, from(#62c462), to(#57a957) ); | |
|
3046 | 3066 | background-image: -moz-linear-gradient(top, #62c462, #57a957); |
|
3047 | 3067 | background-image: -ms-linear-gradient(top, #62c462, #57a957); |
|
3048 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #62c462), | |
|
3049 | color-stop(100%, #57a957) ); | |
|
3068 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #62c462), color-stop(100%, #57a957) ); | |
|
3050 | 3069 | background-image: -webkit-linear-gradient(top, #62c462, #57a957); |
|
3051 | 3070 | background-image: -o-linear-gradient(top, #62c462, #57a957); |
|
3052 | 3071 | background-image: linear-gradient(top, #62c462, #57a957); |
|
3053 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#62c462', | |
|
3054 | endColorstr='#57a957', GradientType=0 ); | |
|
3072 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#62c462', endColorstr='#57a957', GradientType=0 ); | |
|
3055 | 3073 | border-color: #57a957 #57a957 #3d773d; |
|
3056 | 3074 | } |
|
3057 | 3075 | |
|
3058 | 3076 | .notice_msg { |
|
3059 | 3077 | background-color: #339bb9; |
|
3060 | 3078 | background-repeat: repeat-x; |
|
3061 | background-image: -khtml-gradient(linear, left top, left bottom, from(#5bc0de), | |
|
3062 | to(#339bb9) ); | |
|
3079 | background-image: -khtml-gradient(linear, left top, left bottom, from(#5bc0de), to(#339bb9) ); | |
|
3063 | 3080 | background-image: -moz-linear-gradient(top, #5bc0de, #339bb9); |
|
3064 | 3081 | background-image: -ms-linear-gradient(top, #5bc0de, #339bb9); |
|
3065 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #5bc0de), | |
|
3066 | color-stop(100%, #339bb9) ); | |
|
3082 | background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #5bc0de), color-stop(100%, #339bb9) ); | |
|
3067 | 3083 | background-image: -webkit-linear-gradient(top, #5bc0de, #339bb9); |
|
3068 | 3084 | background-image: -o-linear-gradient(top, #5bc0de, #339bb9); |
|
3069 | 3085 | background-image: linear-gradient(top, #5bc0de, #339bb9); |
|
3070 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#5bc0de', | |
|
3071 | endColorstr='#339bb9', GradientType=0 ); | |
|
3086 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#5bc0de', endColorstr='#339bb9', GradientType=0 ); | |
|
3072 | 3087 | border-color: #339bb9 #339bb9 #22697d; |
|
3073 | 3088 | } |
|
3074 | 3089 | |
@@ -3096,8 +3111,7 b' table.code-browser .browser-dir {' | |||
|
3096 | 3111 | } |
|
3097 | 3112 | |
|
3098 | 3113 | #msg_close { |
|
3099 | background: transparent url("../icons/cross_grey_small.png") no-repeat | |
|
3100 | scroll 0 0; | |
|
3114 | background: transparent url("../icons/cross_grey_small.png") no-repeat scroll 0 0; | |
|
3101 | 3115 | cursor: pointer; |
|
3102 | 3116 | height: 16px; |
|
3103 | 3117 | position: absolute; |
@@ -3105,7 +3119,12 b' table.code-browser .browser-dir {' | |||
|
3105 | 3119 | top: 5px; |
|
3106 | 3120 | width: 16px; |
|
3107 | 3121 | } |
|
3108 | ||
|
3122 | div#legend_data{ | |
|
3123 | padding-left:10px; | |
|
3124 | } | |
|
3125 | div#legend_container table{ | |
|
3126 | border: none !important; | |
|
3127 | } | |
|
3109 | 3128 | div#legend_container table,div#legend_choices table { |
|
3110 | 3129 | width: auto !important; |
|
3111 | 3130 | } |
@@ -4115,6 +4134,56 b' form.comment-inline-form {' | |||
|
4115 | 4134 | padding:5px 0px 5px 38px; |
|
4116 | 4135 | } |
|
4117 | 4136 | |
|
4137 | /**** | |
|
4138 | PERMS | |
|
4139 | *****/ | |
|
4140 | #perms .perms_section_head { | |
|
4141 | padding:10px 10px 10px 0px; | |
|
4142 | font-size:16px; | |
|
4143 | font-weight: bold; | |
|
4144 | } | |
|
4145 | ||
|
4146 | #perms .perm_tag{ | |
|
4147 | padding: 1px 3px 1px 3px; | |
|
4148 | font-size: 10px; | |
|
4149 | font-weight: bold; | |
|
4150 | text-transform: uppercase; | |
|
4151 | white-space: nowrap; | |
|
4152 | -webkit-border-radius: 3px; | |
|
4153 | -moz-border-radius: 3px; | |
|
4154 | border-radius: 3px; | |
|
4155 | } | |
|
4156 | ||
|
4157 | #perms .perm_tag.admin{ | |
|
4158 | background-color: #B94A48; | |
|
4159 | color: #ffffff; | |
|
4160 | } | |
|
4161 | ||
|
4162 | #perms .perm_tag.write{ | |
|
4163 | background-color: #B94A48; | |
|
4164 | color: #ffffff; | |
|
4165 | } | |
|
4166 | ||
|
4167 | #perms .perm_tag.read{ | |
|
4168 | background-color: #468847; | |
|
4169 | color: #ffffff; | |
|
4170 | } | |
|
4171 | ||
|
4172 | #perms .perm_tag.none{ | |
|
4173 | background-color: #bfbfbf; | |
|
4174 | color: #ffffff; | |
|
4175 | } | |
|
4176 | ||
|
4177 | .perm-gravatar{ | |
|
4178 | vertical-align:middle; | |
|
4179 | padding:2px; | |
|
4180 | } | |
|
4181 | .perm-gravatar-ac{ | |
|
4182 | vertical-align:middle; | |
|
4183 | padding:2px; | |
|
4184 | width: 14px; | |
|
4185 | height: 14px; | |
|
4186 | } | |
|
4118 | 4187 | |
|
4119 | 4188 | /***************************************************************************** |
|
4120 | 4189 | DIFFS CSS |
@@ -609,6 +609,178 b' var deleteNotification = function(url, n' | |||
|
609 | 609 | }; |
|
610 | 610 | |
|
611 | 611 | |
|
612 | /** MEMBERS AUTOCOMPLETE WIDGET **/ | |
|
613 | ||
|
614 | var MembersAutoComplete = function (users_list, groups_list, group_lbl, members_lbl) { | |
|
615 | var myUsers = users_list; | |
|
616 | var myGroups = groups_list; | |
|
617 | ||
|
618 | // Define a custom search function for the DataSource of users | |
|
619 | var matchUsers = function (sQuery) { | |
|
620 | // Case insensitive matching | |
|
621 | var query = sQuery.toLowerCase(); | |
|
622 | var i = 0; | |
|
623 | var l = myUsers.length; | |
|
624 | var matches = []; | |
|
625 | ||
|
626 | // Match against each name of each contact | |
|
627 | for (; i < l; i++) { | |
|
628 | contact = myUsers[i]; | |
|
629 | if ((contact.fname.toLowerCase().indexOf(query) > -1) || (contact.lname.toLowerCase().indexOf(query) > -1) || (contact.nname && (contact.nname.toLowerCase().indexOf(query) > -1))) { | |
|
630 | matches[matches.length] = contact; | |
|
631 | } | |
|
632 | } | |
|
633 | return matches; | |
|
634 | }; | |
|
635 | ||
|
636 | // Define a custom search function for the DataSource of usersGroups | |
|
637 | var matchGroups = function (sQuery) { | |
|
638 | // Case insensitive matching | |
|
639 | var query = sQuery.toLowerCase(); | |
|
640 | var i = 0; | |
|
641 | var l = myGroups.length; | |
|
642 | var matches = []; | |
|
643 | ||
|
644 | // Match against each name of each contact | |
|
645 | for (; i < l; i++) { | |
|
646 | matched_group = myGroups[i]; | |
|
647 | if (matched_group.grname.toLowerCase().indexOf(query) > -1) { | |
|
648 | matches[matches.length] = matched_group; | |
|
649 | } | |
|
650 | } | |
|
651 | return matches; | |
|
652 | }; | |
|
653 | ||
|
654 | //match all | |
|
655 | var matchAll = function (sQuery) { | |
|
656 | u = matchUsers(sQuery); | |
|
657 | g = matchGroups(sQuery); | |
|
658 | return u.concat(g); | |
|
659 | }; | |
|
660 | ||
|
661 | // DataScheme for members | |
|
662 | var memberDS = new YAHOO.util.FunctionDataSource(matchAll); | |
|
663 | memberDS.responseSchema = { | |
|
664 | fields: ["id", "fname", "lname", "nname", "grname", "grmembers", "gravatar_lnk"] | |
|
665 | }; | |
|
666 | ||
|
667 | // DataScheme for owner | |
|
668 | var ownerDS = new YAHOO.util.FunctionDataSource(matchUsers); | |
|
669 | ownerDS.responseSchema = { | |
|
670 | fields: ["id", "fname", "lname", "nname", "gravatar_lnk"] | |
|
671 | }; | |
|
672 | ||
|
673 | // Instantiate AutoComplete for perms | |
|
674 | var membersAC = new YAHOO.widget.AutoComplete("perm_new_member_name", "perm_container", memberDS); | |
|
675 | membersAC.useShadow = false; | |
|
676 | membersAC.resultTypeList = false; | |
|
677 | ||
|
678 | // Instantiate AutoComplete for owner | |
|
679 | var ownerAC = new YAHOO.widget.AutoComplete("user", "owner_container", ownerDS); | |
|
680 | ownerAC.useShadow = false; | |
|
681 | ownerAC.resultTypeList = false; | |
|
682 | ||
|
683 | ||
|
684 | // Helper highlight function for the formatter | |
|
685 | var highlightMatch = function (full, snippet, matchindex) { | |
|
686 | return full.substring(0, matchindex) | |
|
687 | + "<span class='match'>" | |
|
688 | + full.substr(matchindex, snippet.length) | |
|
689 | + "</span>" + full.substring(matchindex + snippet.length); | |
|
690 | }; | |
|
691 | ||
|
692 | // Custom formatter to highlight the matching letters | |
|
693 | var custom_formatter = function (oResultData, sQuery, sResultMatch) { | |
|
694 | var query = sQuery.toLowerCase(); | |
|
695 | var _gravatar = function(res, em, group){ | |
|
696 | if (group !== undefined){ | |
|
697 | em = '/images/icons/group.png' | |
|
698 | } | |
|
699 | tmpl = '<div class="ac-container-wrap"><img class="perm-gravatar-ac" src="{0}"/>{1}</div>' | |
|
700 | return tmpl.format(em,res) | |
|
701 | } | |
|
702 | // group | |
|
703 | if (oResultData.grname != undefined) { | |
|
704 | var grname = oResultData.grname; | |
|
705 | var grmembers = oResultData.grmembers; | |
|
706 | var grnameMatchIndex = grname.toLowerCase().indexOf(query); | |
|
707 | var grprefix = "{0}: ".format(group_lbl); | |
|
708 | var grsuffix = " (" + grmembers + " )"; | |
|
709 | var grsuffix = " ({0} {1})".format(grmembers, members_lbl); | |
|
710 | ||
|
711 | if (grnameMatchIndex > -1) { | |
|
712 | return _gravatar(grprefix + highlightMatch(grname, query, grnameMatchIndex) + grsuffix,null,true); | |
|
713 | } | |
|
714 | return _gravatar(grprefix + oResultData.grname + grsuffix, null,true); | |
|
715 | // Users | |
|
716 | } else if (oResultData.fname != undefined) { | |
|
717 | var fname = oResultData.fname, | |
|
718 | lname = oResultData.lname, | |
|
719 | nname = oResultData.nname || "", | |
|
720 | // Guard against null value | |
|
721 | fnameMatchIndex = fname.toLowerCase().indexOf(query), | |
|
722 | lnameMatchIndex = lname.toLowerCase().indexOf(query), | |
|
723 | nnameMatchIndex = nname.toLowerCase().indexOf(query), | |
|
724 | displayfname, displaylname, displaynname; | |
|
725 | ||
|
726 | if (fnameMatchIndex > -1) { | |
|
727 | displayfname = highlightMatch(fname, query, fnameMatchIndex); | |
|
728 | } else { | |
|
729 | displayfname = fname; | |
|
730 | } | |
|
731 | ||
|
732 | if (lnameMatchIndex > -1) { | |
|
733 | displaylname = highlightMatch(lname, query, lnameMatchIndex); | |
|
734 | } else { | |
|
735 | displaylname = lname; | |
|
736 | } | |
|
737 | ||
|
738 | if (nnameMatchIndex > -1) { | |
|
739 | displaynname = "(" + highlightMatch(nname, query, nnameMatchIndex) + ")"; | |
|
740 | } else { | |
|
741 | displaynname = nname ? "(" + nname + ")" : ""; | |
|
742 | } | |
|
743 | ||
|
744 | return _gravatar(displayfname + " " + displaylname + " " + displaynname, oResultData.gravatar_lnk); | |
|
745 | } else { | |
|
746 | return ''; | |
|
747 | } | |
|
748 | }; | |
|
749 | membersAC.formatResult = custom_formatter; | |
|
750 | ownerAC.formatResult = custom_formatter; | |
|
751 | ||
|
752 | var myHandler = function (sType, aArgs) { | |
|
753 | ||
|
754 | var myAC = aArgs[0]; // reference back to the AC instance | |
|
755 | var elLI = aArgs[1]; // reference to the selected LI element | |
|
756 | var oData = aArgs[2]; // object literal of selected item's result data | |
|
757 | //fill the autocomplete with value | |
|
758 | if (oData.nname != undefined) { | |
|
759 | //users | |
|
760 | myAC.getInputEl().value = oData.nname; | |
|
761 | YUD.get('perm_new_member_type').value = 'user'; | |
|
762 | } else { | |
|
763 | //groups | |
|
764 | myAC.getInputEl().value = oData.grname; | |
|
765 | YUD.get('perm_new_member_type').value = 'users_group'; | |
|
766 | } | |
|
767 | }; | |
|
768 | ||
|
769 | membersAC.itemSelectEvent.subscribe(myHandler); | |
|
770 | if(ownerAC.itemSelectEvent){ | |
|
771 | ownerAC.itemSelectEvent.subscribe(myHandler); | |
|
772 | } | |
|
773 | ||
|
774 | return { | |
|
775 | memberDS: memberDS, | |
|
776 | ownerDS: ownerDS, | |
|
777 | membersAC: membersAC, | |
|
778 | ownerAC: ownerAC, | |
|
779 | }; | |
|
780 | } | |
|
781 | ||
|
782 | ||
|
783 | ||
|
612 | 784 | /** |
|
613 | 785 | * QUICK REPO MENU |
|
614 | 786 | */ |
@@ -700,6 +872,19 b' var nameSort = function(a, b, desc, fiel' | |||
|
700 | 872 | return compState; |
|
701 | 873 | }; |
|
702 | 874 | |
|
875 | var permNameSort = function(a, b, desc, field) { | |
|
876 | var a_ = fromHTML(a.getData(field)); | |
|
877 | var b_ = fromHTML(b.getData(field)); | |
|
878 | // extract name from table | |
|
879 | ||
|
880 | a_ = a_.children[0].innerHTML; | |
|
881 | b_ = b_.children[0].innerHTML; | |
|
882 | ||
|
883 | var comp = YAHOO.util.Sort.compare; | |
|
884 | var compState = comp(a_, b_, desc); | |
|
885 | return compState; | |
|
886 | }; | |
|
887 | ||
|
703 | 888 | var groupNameSort = function(a, b, desc, field) { |
|
704 | 889 | var a_ = fromHTML(a.getData(field)); |
|
705 | 890 | var b_ = fromHTML(b.getData(field)); |
@@ -40,7 +40,14 b'' | |||
|
40 | 40 | </ul> |
|
41 | 41 | </%def> |
|
42 | 42 | |
|
43 | <%def name="repo_name(name,rtype,private,fork_of)"> | |
|
43 | <%def name="repo_name(name,rtype,private,fork_of,short_name=False, admin=False)"> | |
|
44 | <% | |
|
45 | def get_name(name,short_name=short_name): | |
|
46 | if short_name: | |
|
47 | return name.split('/')[-1] | |
|
48 | else: | |
|
49 | return name | |
|
50 | %> | |
|
44 | 51 | <div style="white-space: nowrap"> |
|
45 | 52 | ##TYPE OF REPO |
|
46 | 53 | %if h.is_hg(rtype): |
@@ -57,7 +64,11 b'' | |||
|
57 | 64 | %endif |
|
58 | 65 | |
|
59 | 66 | ##NAME |
|
60 | ${h.link_to(name,h.url('summary_home',repo_name=name),class_="repo_name")} | |
|
67 | %if admin: | |
|
68 | ${h.link_to(get_name(name),h.url('edit_repo',repo_name=name),class_="repo_name")} | |
|
69 | %else: | |
|
70 | ${h.link_to(get_name(name),h.url('summary_home',repo_name=name),class_="repo_name")} | |
|
71 | %endif | |
|
61 | 72 | %if fork_of: |
|
62 | 73 | <a href="${h.url('summary_home',repo_name=fork_of)}"> |
|
63 | 74 | <img class="icon" alt="${_('fork')}" title="${_('Fork of')} ${fork_of}" src="${h.url('/images/icons/arrow_divide.png')}"/></a> |
@@ -21,6 +21,7 b'' | |||
|
21 | 21 | </div> |
|
22 | 22 | <div class="input"> |
|
23 | 23 | ${h.text('clone_uri',class_="small")} |
|
24 | <span class="help-block">${_('Optional http[s] url from which repository should be cloned.')}</span> | |
|
24 | 25 | </div> |
|
25 | 26 | </div> |
|
26 | 27 | <div class="field"> |
@@ -28,7 +29,8 b'' | |||
|
28 | 29 | <label for="repo_group">${_('Repository group')}:</label> |
|
29 | 30 | </div> |
|
30 | 31 | <div class="input"> |
|
31 | ${h.select('repo_group','',c.repo_groups,class_="medium")} | |
|
32 | ${h.select('repo_group',request.GET.get('parent_group'),c.repo_groups,class_="medium")} | |
|
33 | <span class="help-block">${_('Optional select a group to put this repository into.')}</span> | |
|
32 | 34 | </div> |
|
33 | 35 | </div> |
|
34 | 36 | <div class="field"> |
@@ -37,6 +39,7 b'' | |||
|
37 | 39 | </div> |
|
38 | 40 | <div class="input"> |
|
39 | 41 | ${h.select('repo_type','hg',c.backends,class_="small")} |
|
42 | <span class="help-block">${_('Type of repository to create.')}</span> | |
|
40 | 43 | </div> |
|
41 | 44 | </div> |
|
42 | 45 | <div class="field"> |
@@ -44,15 +47,17 b'' | |||
|
44 | 47 | <label for="description">${_('Description')}:</label> |
|
45 | 48 | </div> |
|
46 | 49 | <div class="textarea text-area editor"> |
|
47 |
${h.textarea('description' |
|
|
50 | ${h.textarea('description')} | |
|
51 | <span class="help-block">${_('Keep it short and to the point. Use a README file for longer descriptions.')}</span> | |
|
48 | 52 | </div> |
|
49 | 53 | </div> |
|
50 | 54 | <div class="field"> |
|
51 | 55 | <div class="label label-checkbox"> |
|
52 | <label for="private">${_('Private')}:</label> | |
|
56 | <label for="private">${_('Private repository')}:</label> | |
|
53 | 57 | </div> |
|
54 | 58 | <div class="checkboxes"> |
|
55 | 59 | ${h.checkbox('private',value="True")} |
|
60 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> | |
|
56 | 61 | </div> |
|
57 | 62 | </div> |
|
58 | 63 | <div class="buttons"> |
@@ -41,6 +41,7 b'' | |||
|
41 | 41 | </div> |
|
42 | 42 | <div class="input"> |
|
43 | 43 | ${h.text('clone_uri',class_="medium")} |
|
44 | <span class="help-block">${_('Optional http[s] url from which repository should be cloned.')}</span> | |
|
44 | 45 | </div> |
|
45 | 46 | </div> |
|
46 | 47 | <div class="field"> |
@@ -49,6 +50,7 b'' | |||
|
49 | 50 | </div> |
|
50 | 51 | <div class="input"> |
|
51 | 52 | ${h.select('repo_group','',c.repo_groups,class_="medium")} |
|
53 | <span class="help-block">${_('Optional select a group to put this repository into.')}</span> | |
|
52 | 54 | </div> |
|
53 | 55 | </div> |
|
54 | 56 | <div class="field"> |
@@ -64,16 +66,18 b'' | |||
|
64 | 66 | <label for="description">${_('Description')}:</label> |
|
65 | 67 | </div> |
|
66 | 68 | <div class="textarea text-area editor"> |
|
67 |
${h.textarea('description' |
|
|
69 | ${h.textarea('description')} | |
|
70 | <span class="help-block">${_('Keep it short and to the point. Use a README file for longer descriptions.')}</span> | |
|
68 | 71 | </div> |
|
69 | 72 | </div> |
|
70 | 73 | |
|
71 | 74 | <div class="field"> |
|
72 | 75 | <div class="label label-checkbox"> |
|
73 | <label for="private">${_('Private')}:</label> | |
|
76 | <label for="private">${_('Private repository')}:</label> | |
|
74 | 77 | </div> |
|
75 | 78 | <div class="checkboxes"> |
|
76 | 79 | ${h.checkbox('private',value="True")} |
|
80 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> | |
|
77 | 81 | </div> |
|
78 | 82 | </div> |
|
79 | 83 | <div class="field"> |
@@ -82,6 +86,7 b'' | |||
|
82 | 86 | </div> |
|
83 | 87 | <div class="checkboxes"> |
|
84 | 88 | ${h.checkbox('enable_statistics',value="True")} |
|
89 | <span class="help-block">${_('Enable statistics window on summary page.')}</span> | |
|
85 | 90 | </div> |
|
86 | 91 | </div> |
|
87 | 92 | <div class="field"> |
@@ -90,15 +95,17 b'' | |||
|
90 | 95 | </div> |
|
91 | 96 | <div class="checkboxes"> |
|
92 | 97 | ${h.checkbox('enable_downloads',value="True")} |
|
98 | <span class="help-block">${_('Enable download menu on summary page.')}</span> | |
|
93 | 99 | </div> |
|
94 | 100 | </div> |
|
95 | 101 | <div class="field"> |
|
96 | 102 | <div class="label"> |
|
97 | 103 | <label for="user">${_('Owner')}:</label> |
|
98 | 104 | </div> |
|
99 |
<div class="input input- |
|
|
105 | <div class="input input-medium ac"> | |
|
100 | 106 | <div class="perm_ac"> |
|
101 | 107 | ${h.text('user',class_='yui-ac-input')} |
|
108 | <span class="help-block">${_('Change owner of this repository.')}</span> | |
|
102 | 109 | <div id="owner_container"></div> |
|
103 | 110 | </div> |
|
104 | 111 | </div> |
@@ -25,7 +25,7 b'' | |||
|
25 | 25 | <td>${h.radio('u_perm_%s' % r2p.user.username,'repository.write')}</td> |
|
26 | 26 | <td>${h.radio('u_perm_%s' % r2p.user.username,'repository.admin')}</td> |
|
27 | 27 | <td style="white-space: nowrap;"> |
|
28 |
<img |
|
|
28 | <img class="perm-gravatar" src="${h.gravatar_url(r2p.user.email,14)}"/>${r2p.user.username} | |
|
29 | 29 | </td> |
|
30 | 30 | <td> |
|
31 | 31 | %if r2p.user.username !='default': |
@@ -46,7 +46,7 b'' | |||
|
46 | 46 | <td>${h.radio('g_perm_%s' % g2p.users_group.users_group_name,'repository.write')}</td> |
|
47 | 47 | <td>${h.radio('g_perm_%s' % g2p.users_group.users_group_name,'repository.admin')}</td> |
|
48 | 48 | <td style="white-space: nowrap;"> |
|
49 |
<img |
|
|
49 | <img class="perm-gravatar" src="${h.url('/images/icons/group.png')}"/>${g2p.users_group.users_group_name} | |
|
50 | 50 | </td> |
|
51 | 51 | <td> |
|
52 | 52 | <span class="delete_icon action_button" onclick="ajaxActionUsersGroup(${g2p.users_group.users_group_id},'${'id%s'%id(g2p.users_group.users_group_name)}')"> |
@@ -117,165 +117,12 b' YUE.onDOMReady(function () {' | |||
|
117 | 117 | YUD.setStyle('add_perm', 'opacity', '0.6'); |
|
118 | 118 | YUD.setStyle('add_perm', 'cursor', 'default'); |
|
119 | 119 | }); |
|
120 | MembersAutoComplete( | |
|
121 | ${c.users_array|n}, | |
|
122 | ${c.users_groups_array|n}, | |
|
123 | "${_('Group')}", | |
|
124 | "${_('members')}" | |
|
125 | ); | |
|
120 | 126 | }); |
|
121 | 127 | |
|
122 | YAHOO.example.FnMultipleFields = function () { | |
|
123 | var myUsers = ${c.users_array|n}; | |
|
124 | var myGroups = ${c.users_groups_array|n}; | |
|
125 | ||
|
126 | // Define a custom search function for the DataSource of users | |
|
127 | var matchUsers = function (sQuery) { | |
|
128 | // Case insensitive matching | |
|
129 | var query = sQuery.toLowerCase(); | |
|
130 | var i = 0; | |
|
131 | var l = myUsers.length; | |
|
132 | var matches = []; | |
|
133 | ||
|
134 | // Match against each name of each contact | |
|
135 | for (; i < l; i++) { | |
|
136 | contact = myUsers[i]; | |
|
137 | if ((contact.fname.toLowerCase().indexOf(query) > -1) || (contact.lname.toLowerCase().indexOf(query) > -1) || (contact.nname && (contact.nname.toLowerCase().indexOf(query) > -1))) { | |
|
138 | matches[matches.length] = contact; | |
|
139 | } | |
|
140 | } | |
|
141 | return matches; | |
|
142 | }; | |
|
143 | ||
|
144 | // Define a custom search function for the DataSource of usersGroups | |
|
145 | var matchGroups = function (sQuery) { | |
|
146 | // Case insensitive matching | |
|
147 | var query = sQuery.toLowerCase(); | |
|
148 | var i = 0; | |
|
149 | var l = myGroups.length; | |
|
150 | var matches = []; | |
|
151 | ||
|
152 | // Match against each name of each contact | |
|
153 | for (; i < l; i++) { | |
|
154 | matched_group = myGroups[i]; | |
|
155 | if (matched_group.grname.toLowerCase().indexOf(query) > -1) { | |
|
156 | matches[matches.length] = matched_group; | |
|
157 | } | |
|
158 | } | |
|
159 | return matches; | |
|
160 | }; | |
|
161 | ||
|
162 | //match all | |
|
163 | var matchAll = function (sQuery) { | |
|
164 | u = matchUsers(sQuery); | |
|
165 | g = matchGroups(sQuery); | |
|
166 | return u.concat(g); | |
|
167 | }; | |
|
168 | ||
|
169 | // DataScheme for members | |
|
170 | var memberDS = new YAHOO.util.FunctionDataSource(matchAll); | |
|
171 | memberDS.responseSchema = { | |
|
172 | fields: ["id", "fname", "lname", "nname", "grname", "grmembers"] | |
|
173 | }; | |
|
174 | ||
|
175 | // DataScheme for owner | |
|
176 | var ownerDS = new YAHOO.util.FunctionDataSource(matchUsers); | |
|
177 | ownerDS.responseSchema = { | |
|
178 | fields: ["id", "fname", "lname", "nname"] | |
|
179 | }; | |
|
180 | ||
|
181 | // Instantiate AutoComplete for perms | |
|
182 | var membersAC = new YAHOO.widget.AutoComplete("perm_new_member_name", "perm_container", memberDS); | |
|
183 | membersAC.useShadow = false; | |
|
184 | membersAC.resultTypeList = false; | |
|
185 | ||
|
186 | // Instantiate AutoComplete for owner | |
|
187 | var ownerAC = new YAHOO.widget.AutoComplete("user", "owner_container", ownerDS); | |
|
188 | ownerAC.useShadow = false; | |
|
189 | ownerAC.resultTypeList = false; | |
|
190 | ||
|
191 | ||
|
192 | // Helper highlight function for the formatter | |
|
193 | var highlightMatch = function (full, snippet, matchindex) { | |
|
194 | return full.substring(0, matchindex) + "<span class='match'>" + full.substr(matchindex, snippet.length) + "</span>" + full.substring(matchindex + snippet.length); | |
|
195 | }; | |
|
196 | ||
|
197 | // Custom formatter to highlight the matching letters | |
|
198 | var custom_formatter = function (oResultData, sQuery, sResultMatch) { | |
|
199 | var query = sQuery.toLowerCase(); | |
|
200 | ||
|
201 | if (oResultData.grname != undefined) { | |
|
202 | var grname = oResultData.grname; | |
|
203 | var grmembers = oResultData.grmembers; | |
|
204 | var grnameMatchIndex = grname.toLowerCase().indexOf(query); | |
|
205 | var grprefix = "${_('Group')}: "; | |
|
206 | var grsuffix = " (" + grmembers + " ${_('members')})"; | |
|
207 | ||
|
208 | if (grnameMatchIndex > -1) { | |
|
209 | return grprefix + highlightMatch(grname, query, grnameMatchIndex) + grsuffix; | |
|
210 | } | |
|
211 | ||
|
212 | return grprefix + oResultData.grname + grsuffix; | |
|
213 | } else if (oResultData.fname != undefined) { | |
|
214 | ||
|
215 | var fname = oResultData.fname, | |
|
216 | lname = oResultData.lname, | |
|
217 | nname = oResultData.nname || "", | |
|
218 | // Guard against null value | |
|
219 | fnameMatchIndex = fname.toLowerCase().indexOf(query), | |
|
220 | lnameMatchIndex = lname.toLowerCase().indexOf(query), | |
|
221 | nnameMatchIndex = nname.toLowerCase().indexOf(query), | |
|
222 | displayfname, displaylname, displaynname; | |
|
223 | ||
|
224 | if (fnameMatchIndex > -1) { | |
|
225 | displayfname = highlightMatch(fname, query, fnameMatchIndex); | |
|
226 | } else { | |
|
227 | displayfname = fname; | |
|
228 | } | |
|
229 | ||
|
230 | if (lnameMatchIndex > -1) { | |
|
231 | displaylname = highlightMatch(lname, query, lnameMatchIndex); | |
|
232 | } else { | |
|
233 | displaylname = lname; | |
|
234 | } | |
|
235 | ||
|
236 | if (nnameMatchIndex > -1) { | |
|
237 | displaynname = "(" + highlightMatch(nname, query, nnameMatchIndex) + ")"; | |
|
238 | } else { | |
|
239 | displaynname = nname ? "(" + nname + ")" : ""; | |
|
240 | } | |
|
241 | ||
|
242 | return displayfname + " " + displaylname + " " + displaynname; | |
|
243 | } else { | |
|
244 | return ''; | |
|
245 | } | |
|
246 | }; | |
|
247 | membersAC.formatResult = custom_formatter; | |
|
248 | ownerAC.formatResult = custom_formatter; | |
|
249 | ||
|
250 | var myHandler = function (sType, aArgs) { | |
|
251 | ||
|
252 | var myAC = aArgs[0]; // reference back to the AC instance | |
|
253 | var elLI = aArgs[1]; // reference to the selected LI element | |
|
254 | var oData = aArgs[2]; // object literal of selected item's result data | |
|
255 | //fill the autocomplete with value | |
|
256 | if (oData.nname != undefined) { | |
|
257 | //users | |
|
258 | myAC.getInputEl().value = oData.nname; | |
|
259 | YUD.get('perm_new_member_type').value = 'user'; | |
|
260 | } else { | |
|
261 | //groups | |
|
262 | myAC.getInputEl().value = oData.grname; | |
|
263 | YUD.get('perm_new_member_type').value = 'users_group'; | |
|
264 | } | |
|
265 | ||
|
266 | }; | |
|
267 | ||
|
268 | membersAC.itemSelectEvent.subscribe(myHandler); | |
|
269 | if(ownerAC.itemSelectEvent){ | |
|
270 | ownerAC.itemSelectEvent.subscribe(myHandler); | |
|
271 | } | |
|
272 | ||
|
273 | return { | |
|
274 | memberDS: memberDS, | |
|
275 | ownerDS: ownerDS, | |
|
276 | membersAC: membersAC, | |
|
277 | ownerAC: ownerAC, | |
|
278 | }; | |
|
279 | }(); | |
|
280 | ||
|
281 | 128 | </script> |
@@ -48,7 +48,7 b'' | |||
|
48 | 48 | ${dt.quick_menu(repo['name'])} |
|
49 | 49 | </td> |
|
50 | 50 | <td class="reponame"> |
|
51 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'))} | |
|
51 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'), admin=True)} | |
|
52 | 52 | </td> |
|
53 | 53 | ##DESCRIPTION |
|
54 | 54 | <td><span class="tooltip" title="${h.tooltip(repo['description'])}"> |
@@ -15,7 +15,7 b'' | |||
|
15 | 15 | <td>${h.radio('u_perm_%s' % r2p.user.username,'group.write')}</td> |
|
16 | 16 | <td>${h.radio('u_perm_%s' % r2p.user.username,'group.admin')}</td> |
|
17 | 17 | <td style="white-space: nowrap;"> |
|
18 |
<img |
|
|
18 | <img class="perm-gravatar" src="${h.gravatar_url(r2p.user.email,14)}"/>${r2p.user.username} | |
|
19 | 19 | </td> |
|
20 | 20 | <td> |
|
21 | 21 | %if r2p.user.username !='default': |
@@ -35,7 +35,7 b'' | |||
|
35 | 35 | <td>${h.radio('g_perm_%s' % g2p.users_group.users_group_name,'group.write')}</td> |
|
36 | 36 | <td>${h.radio('g_perm_%s' % g2p.users_group.users_group_name,'group.admin')}</td> |
|
37 | 37 | <td style="white-space: nowrap;"> |
|
38 |
<img |
|
|
38 | <img class="perm-gravatar" src="${h.url('/images/icons/group.png')}"/>${g2p.users_group.users_group_name} | |
|
39 | 39 | </td> |
|
40 | 40 | <td> |
|
41 | 41 | <span class="delete_icon action_button" onclick="ajaxActionUsersGroup(${g2p.users_group.users_group_id},'${'id%s'%id(g2p.users_group.users_group_name)}')"> |
@@ -68,7 +68,7 b'' | |||
|
68 | 68 | </table> |
|
69 | 69 | <script type="text/javascript"> |
|
70 | 70 | function ajaxActionUser(user_id, field_id) { |
|
71 | var sUrl = "${h.url('delete_repos_group_user_perm',group_name=c.repos_group.name)}"; | |
|
71 | var sUrl = "${h.url('delete_repos_group_user_perm',group_name=c.repos_group.group_name)}"; | |
|
72 | 72 | var callback = { |
|
73 | 73 | success: function (o) { |
|
74 | 74 | var tr = YUD.get(String(field_id)); |
@@ -83,7 +83,7 b' function ajaxActionUser(user_id, field_i' | |||
|
83 | 83 | }; |
|
84 | 84 | |
|
85 | 85 | function ajaxActionUsersGroup(users_group_id,field_id){ |
|
86 | var sUrl = "${h.url('delete_repos_group_users_group_perm',group_name=c.repos_group.name)}"; | |
|
86 | var sUrl = "${h.url('delete_repos_group_users_group_perm',group_name=c.repos_group.group_name)}"; | |
|
87 | 87 | var callback = { |
|
88 | 88 | success:function(o){ |
|
89 | 89 | var tr = YUD.get(String(field_id)); |
@@ -106,165 +106,12 b' YUE.onDOMReady(function () {' | |||
|
106 | 106 | YUD.setStyle('add_perm', 'opacity', '0.6'); |
|
107 | 107 | YUD.setStyle('add_perm', 'cursor', 'default'); |
|
108 | 108 | }); |
|
109 | MembersAutoComplete( | |
|
110 | ${c.users_array|n}, | |
|
111 | ${c.users_groups_array|n}, | |
|
112 | "${_('Group')}", | |
|
113 | "${_('members')}" | |
|
114 | ); | |
|
109 | 115 | }); |
|
110 | 116 | |
|
111 | YAHOO.example.FnMultipleFields = function () { | |
|
112 | var myUsers = ${c.users_array|n}; | |
|
113 | var myGroups = ${c.users_groups_array|n}; | |
|
114 | ||
|
115 | // Define a custom search function for the DataSource of users | |
|
116 | var matchUsers = function (sQuery) { | |
|
117 | // Case insensitive matching | |
|
118 | var query = sQuery.toLowerCase(); | |
|
119 | var i = 0; | |
|
120 | var l = myUsers.length; | |
|
121 | var matches = []; | |
|
122 | ||
|
123 | // Match against each name of each contact | |
|
124 | for (; i < l; i++) { | |
|
125 | contact = myUsers[i]; | |
|
126 | if ((contact.fname.toLowerCase().indexOf(query) > -1) || (contact.lname.toLowerCase().indexOf(query) > -1) || (contact.nname && (contact.nname.toLowerCase().indexOf(query) > -1))) { | |
|
127 | matches[matches.length] = contact; | |
|
128 | } | |
|
129 | } | |
|
130 | return matches; | |
|
131 | }; | |
|
132 | ||
|
133 | // Define a custom search function for the DataSource of usersGroups | |
|
134 | var matchGroups = function (sQuery) { | |
|
135 | // Case insensitive matching | |
|
136 | var query = sQuery.toLowerCase(); | |
|
137 | var i = 0; | |
|
138 | var l = myGroups.length; | |
|
139 | var matches = []; | |
|
140 | ||
|
141 | // Match against each name of each contact | |
|
142 | for (; i < l; i++) { | |
|
143 | matched_group = myGroups[i]; | |
|
144 | if (matched_group.grname.toLowerCase().indexOf(query) > -1) { | |
|
145 | matches[matches.length] = matched_group; | |
|
146 | } | |
|
147 | } | |
|
148 | return matches; | |
|
149 | }; | |
|
150 | ||
|
151 | //match all | |
|
152 | var matchAll = function (sQuery) { | |
|
153 | u = matchUsers(sQuery); | |
|
154 | g = matchGroups(sQuery); | |
|
155 | return u.concat(g); | |
|
156 | }; | |
|
157 | ||
|
158 | // DataScheme for members | |
|
159 | var memberDS = new YAHOO.util.FunctionDataSource(matchAll); | |
|
160 | memberDS.responseSchema = { | |
|
161 | fields: ["id", "fname", "lname", "nname", "grname", "grmembers"] | |
|
162 | }; | |
|
163 | ||
|
164 | // DataScheme for owner | |
|
165 | var ownerDS = new YAHOO.util.FunctionDataSource(matchUsers); | |
|
166 | ownerDS.responseSchema = { | |
|
167 | fields: ["id", "fname", "lname", "nname"] | |
|
168 | }; | |
|
169 | ||
|
170 | // Instantiate AutoComplete for perms | |
|
171 | var membersAC = new YAHOO.widget.AutoComplete("perm_new_member_name", "perm_container", memberDS); | |
|
172 | membersAC.useShadow = false; | |
|
173 | membersAC.resultTypeList = false; | |
|
174 | ||
|
175 | // Instantiate AutoComplete for owner | |
|
176 | var ownerAC = new YAHOO.widget.AutoComplete("user", "owner_container", ownerDS); | |
|
177 | ownerAC.useShadow = false; | |
|
178 | ownerAC.resultTypeList = false; | |
|
179 | ||
|
180 | ||
|
181 | // Helper highlight function for the formatter | |
|
182 | var highlightMatch = function (full, snippet, matchindex) { | |
|
183 | return full.substring(0, matchindex) + "<span class='match'>" + full.substr(matchindex, snippet.length) + "</span>" + full.substring(matchindex + snippet.length); | |
|
184 | }; | |
|
185 | ||
|
186 | // Custom formatter to highlight the matching letters | |
|
187 | var custom_formatter = function (oResultData, sQuery, sResultMatch) { | |
|
188 | var query = sQuery.toLowerCase(); | |
|
189 | ||
|
190 | if (oResultData.grname != undefined) { | |
|
191 | var grname = oResultData.grname; | |
|
192 | var grmembers = oResultData.grmembers; | |
|
193 | var grnameMatchIndex = grname.toLowerCase().indexOf(query); | |
|
194 | var grprefix = "${_('Group')}: "; | |
|
195 | var grsuffix = " (" + grmembers + " ${_('members')})"; | |
|
196 | ||
|
197 | if (grnameMatchIndex > -1) { | |
|
198 | return grprefix + highlightMatch(grname, query, grnameMatchIndex) + grsuffix; | |
|
199 | } | |
|
200 | ||
|
201 | return grprefix + oResultData.grname + grsuffix; | |
|
202 | } else if (oResultData.fname != undefined) { | |
|
203 | ||
|
204 | var fname = oResultData.fname, | |
|
205 | lname = oResultData.lname, | |
|
206 | nname = oResultData.nname || "", | |
|
207 | // Guard against null value | |
|
208 | fnameMatchIndex = fname.toLowerCase().indexOf(query), | |
|
209 | lnameMatchIndex = lname.toLowerCase().indexOf(query), | |
|
210 | nnameMatchIndex = nname.toLowerCase().indexOf(query), | |
|
211 | displayfname, displaylname, displaynname; | |
|
212 | ||
|
213 | if (fnameMatchIndex > -1) { | |
|
214 | displayfname = highlightMatch(fname, query, fnameMatchIndex); | |
|
215 | } else { | |
|
216 | displayfname = fname; | |
|
217 | } | |
|
218 | ||
|
219 | if (lnameMatchIndex > -1) { | |
|
220 | displaylname = highlightMatch(lname, query, lnameMatchIndex); | |
|
221 | } else { | |
|
222 | displaylname = lname; | |
|
223 | } | |
|
224 | ||
|
225 | if (nnameMatchIndex > -1) { | |
|
226 | displaynname = "(" + highlightMatch(nname, query, nnameMatchIndex) + ")"; | |
|
227 | } else { | |
|
228 | displaynname = nname ? "(" + nname + ")" : ""; | |
|
229 | } | |
|
230 | ||
|
231 | return displayfname + " " + displaylname + " " + displaynname; | |
|
232 | } else { | |
|
233 | return ''; | |
|
234 | } | |
|
235 | }; | |
|
236 | membersAC.formatResult = custom_formatter; | |
|
237 | ownerAC.formatResult = custom_formatter; | |
|
238 | ||
|
239 | var myHandler = function (sType, aArgs) { | |
|
240 | ||
|
241 | var myAC = aArgs[0]; // reference back to the AC instance | |
|
242 | var elLI = aArgs[1]; // reference to the selected LI element | |
|
243 | var oData = aArgs[2]; // object literal of selected item's result data | |
|
244 | //fill the autocomplete with value | |
|
245 | if (oData.nname != undefined) { | |
|
246 | //users | |
|
247 | myAC.getInputEl().value = oData.nname; | |
|
248 | YUD.get('perm_new_member_type').value = 'user'; | |
|
249 | } else { | |
|
250 | //groups | |
|
251 | myAC.getInputEl().value = oData.grname; | |
|
252 | YUD.get('perm_new_member_type').value = 'users_group'; | |
|
253 | } | |
|
254 | ||
|
255 | }; | |
|
256 | ||
|
257 | membersAC.itemSelectEvent.subscribe(myHandler); | |
|
258 | if(ownerAC.itemSelectEvent){ | |
|
259 | ownerAC.itemSelectEvent.subscribe(myHandler); | |
|
260 | } | |
|
261 | ||
|
262 | return { | |
|
263 | memberDS: memberDS, | |
|
264 | ownerDS: ownerDS, | |
|
265 | membersAC: membersAC, | |
|
266 | ownerAC: ownerAC, | |
|
267 | }; | |
|
268 | }(); | |
|
269 | ||
|
270 | 117 | </script> |
@@ -17,5 +17,5 b'' | |||
|
17 | 17 | ${self.menu('admin')} |
|
18 | 18 | </%def> |
|
19 | 19 | <%def name="main()"> |
|
20 | <%include file="/index_base.html" args="parent=self"/> | |
|
20 | <%include file="/index_base.html" args="parent=self,short_repo_names=True"/> | |
|
21 | 21 | </%def> |
@@ -44,7 +44,7 b'' | |||
|
44 | 44 | <td> |
|
45 | 45 | <div style="white-space: nowrap"> |
|
46 | 46 | <img class="icon" alt="${_('Repositories group')}" src="${h.url('/images/icons/database_link.png')}"/> |
|
47 | ${h.link_to(h.literal(' » '.join([g.name for g in gr.parents+[gr]])),url('edit_repos_group',id=gr.group_id))} | |
|
47 | ${h.link_to(h.literal(' » '.join(map(h.safe_unicode,[g.name for g in gr.parents+[gr]]))),url('edit_repos_group',id=gr.group_id))} | |
|
48 | 48 | </div> |
|
49 | 49 | </td> |
|
50 | 50 | <td>${gr.group_description}</td> |
@@ -113,52 +113,45 b'' | |||
|
113 | 113 | <div class="title"> |
|
114 | 114 | <h5> |
|
115 | 115 | <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" value="${_('quick filter...')}"/> |
|
116 | ${_('My repositories')} | |
|
116 | <a id="show_my" class="link-white" href="#my">${_('My repos')}</a> / <a id="show_perms" class="link-white" href="#perms">${_('My permissions')}</a> | |
|
117 | 117 | </h5> |
|
118 | 118 | %if h.HasPermissionAny('hg.admin','hg.create.repository')(): |
|
119 | 119 | <ul class="links"> |
|
120 | 120 | <li> |
|
121 |
<span>${h.link_to(_('ADD |
|
|
121 | <span>${h.link_to(_('ADD'),h.url('admin_settings_create_repository'))}</span> | |
|
122 | 122 | </li> |
|
123 | 123 | </ul> |
|
124 | 124 | %endif |
|
125 | 125 | </div> |
|
126 | 126 | <!-- end box / title --> |
|
127 | <div class="table"> | |
|
128 | <table> | |
|
127 | <div id="my" class="table"> | |
|
128 | <div id='repos_list_wrap' class="yui-skin-sam"> | |
|
129 | <table id="repos_list"> | |
|
129 | 130 | <thead> |
|
130 | 131 | <tr> |
|
132 | <th></th> | |
|
131 | 133 | <th class="left">${_('Name')}</th> |
|
132 |
<th class="left">${_(' |
|
|
133 |
<th |
|
|
134 | <th class="left">${_('Revision')}</th> | |
|
135 | <th class="left">${_('Action')}</th> | |
|
136 | <th class="left">${_('Action')}</th> | |
|
134 | 137 | </thead> |
|
135 | 138 | <tbody> |
|
139 | <%namespace name="dt" file="/_data_table/_dt_elements.html"/> | |
|
136 | 140 | %if c.user_repos: |
|
137 | 141 | %for repo in c.user_repos: |
|
138 | 142 | <tr> |
|
139 | <td> | |
|
140 | %if h.is_hg(repo['dbrepo']['repo_type']): | |
|
141 | <img class="icon" title="${_('Mercurial repository')}" alt="${_('Mercurial repository')}" src="${h.url('/images/icons/hgicon.png')}"/> | |
|
142 | %elif h.is_git(repo['dbrepo']['repo_type']): | |
|
143 | <img class="icon" title="${_('Git repository')}" alt="${_('Git repository')}" src="${h.url('/images/icons/giticon.png')}"/> | |
|
144 |
|
|
|
145 | ||
|
146 |
|
|
|
147 | %if repo['dbrepo']['private']: | |
|
148 | <img class="icon" alt="${_('private')}" src="${h.url('/images/icons/lock.png')}"/> | |
|
149 | %else: | |
|
150 | <img class="icon" alt="${_('public')}" src="${h.url('/images/icons/lock_open.png')}"/> | |
|
151 | %endif | |
|
152 | ||
|
153 | ${h.link_to(repo['name'], h.url('summary_home',repo_name=repo['name']),class_="repo_name")} | |
|
154 | %if repo['dbrepo_fork']: | |
|
155 | <a href="${h.url('summary_home',repo_name=repo['dbrepo_fork']['repo_name'])}"> | |
|
156 | <img class="icon" alt="${_('public')}" | |
|
157 | title="${_('Fork of')} ${repo['dbrepo_fork']['repo_name']}" | |
|
158 | src="${h.url('/images/icons/arrow_divide.png')}"/></a> | |
|
159 | %endif | |
|
160 | </td> | |
|
161 | <td><span class="tooltip" title="${repo['last_change']}">${("r%s:%s") % (repo['rev'],h.short_id(repo['tip']))}</span></td> | |
|
143 | ##QUICK MENU | |
|
144 | <td class="quick_repo_menu"> | |
|
145 | ${dt.quick_menu(repo['name'])} | |
|
146 | </td> | |
|
147 | ##REPO NAME AND ICONS | |
|
148 | <td class="reponame"> | |
|
149 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'))} | |
|
150 | </td> | |
|
151 | ##LAST REVISION | |
|
152 | <td> | |
|
153 | ${dt.revision(repo['name'],repo['rev'],repo['tip'],repo['author'],repo['last_msg'])} | |
|
154 | </td> | |
|
162 | 155 | <td><a href="${h.url('repo_settings_home',repo_name=repo['name'])}" title="${_('edit')}"><img class="icon" alt="${_('private')}" src="${h.url('/images/icons/application_form_edit.png')}"/></a></td> |
|
163 | 156 | <td> |
|
164 | 157 | ${h.form(url('repo_settings_delete', repo_name=repo['name']),method='delete')} |
@@ -177,14 +170,144 b'' | |||
|
177 | 170 | %endif |
|
178 | 171 | </tbody> |
|
179 | 172 | </table> |
|
173 | </div> | |
|
174 | </div> | |
|
175 | <div id="perms" class="table" style="display:none"> | |
|
176 | %for section in sorted(c.rhodecode_user.permissions.keys()): | |
|
177 | <div class="perms_section_head">${section.replace("_"," ").capitalize()}</div> | |
|
178 | ||
|
179 | <div id='tbl_list_wrap_${section}' class="yui-skin-sam"> | |
|
180 | <table id="tbl_list_${section}"> | |
|
181 | <thead> | |
|
182 | <tr> | |
|
183 | <th class="left">${_('Name')}</th> | |
|
184 | <th class="left">${_('Permission')}</th> | |
|
185 | </thead> | |
|
186 | <tbody> | |
|
187 | %for k in c.rhodecode_user.permissions[section]: | |
|
188 | <% | |
|
189 | if section != 'global': | |
|
190 | section_perm = c.rhodecode_user.permissions[section].get(k) | |
|
191 | _perm = section_perm.split('.')[-1] | |
|
192 | else: | |
|
193 | _perm = section_perm = None | |
|
194 | %> | |
|
195 | %if _perm not in ['none']: | |
|
196 | <tr> | |
|
197 | <td> | |
|
198 | %if section == 'repositories': | |
|
199 | <a href="${h.url('summary_home',repo_name=k)}">${k}</a> | |
|
200 | %elif section == 'repositories_groups': | |
|
201 | <a href="${h.url('repos_group_home',group_name=k)}">${k}</a> | |
|
202 | %else: | |
|
203 | ${k} | |
|
204 | %endif | |
|
205 | </td> | |
|
206 | <td> | |
|
207 | %if section == 'global': | |
|
208 | ${h.bool2icon(True)} | |
|
209 | %else: | |
|
210 | <span class="perm_tag ${_perm}">${section_perm}</span> | |
|
211 | %endif | |
|
212 | </td> | |
|
213 | </tr> | |
|
214 | %endif | |
|
215 | %endfor | |
|
216 | </tbody> | |
|
217 | </table> | |
|
218 | </div> | |
|
219 | %endfor | |
|
180 | 220 | </div> |
|
181 | 221 | </div> |
|
182 | 222 | <script type="text/javascript"> |
|
183 | var nodes = YUQ('div.table tr td a.repo_name'); | |
|
184 | var target = 'q_filter'; | |
|
185 | var func = function(node){ | |
|
186 | return node.parentNode.parentNode; | |
|
223 | var filter_activate = function(){ | |
|
224 | var nodes = YUQ('#my tr td a.repo_name'); | |
|
225 | var func = function(node){ | |
|
226 | return node.parentNode.parentNode.parentNode.parentNode; | |
|
227 | } | |
|
228 | q_filter('q_filter',YUQ('#my tr td a.repo_name'),func); | |
|
187 | 229 | } |
|
188 | q_filter(target,nodes,func); | |
|
230 | ||
|
231 | YUE.on('show_my','click',function(e){ | |
|
232 | YUD.setStyle('perms','display','none'); | |
|
233 | YUD.setStyle('my','display',''); | |
|
234 | YUD.get('q_filter').removeAttribute('disabled'); | |
|
235 | filter_activate(); | |
|
236 | YUE.preventDefault(e); | |
|
237 | }) | |
|
238 | YUE.on('show_perms','click',function(e){ | |
|
239 | YUD.setStyle('my','display','none'); | |
|
240 | YUD.setStyle('perms','display',''); | |
|
241 | YUD.setAttribute('q_filter','disabled','disabled'); | |
|
242 | YUE.preventDefault(e); | |
|
243 | }) | |
|
244 | ||
|
245 | ||
|
246 | // main table sorting | |
|
247 | var myColumnDefs = [ | |
|
248 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, | |
|
249 | {key:"name",label:"${_('Name')}",sortable:true, | |
|
250 | sortOptions: { sortFunction: nameSort }}, | |
|
251 | {key:"tip",label:"${_('Tip')}",sortable:true, | |
|
252 | sortOptions: { sortFunction: revisionSort }}, | |
|
253 | {key:"action1",label:"",sortable:false}, | |
|
254 | {key:"action2",label:"",sortable:false}, | |
|
255 | ]; | |
|
256 | ||
|
257 | var myDataSource = new YAHOO.util.DataSource(YUD.get("repos_list")); | |
|
258 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; | |
|
259 | myDataSource.responseSchema = { | |
|
260 | fields: [ | |
|
261 | {key:"menu"}, | |
|
262 | {key:"name"}, | |
|
263 | {key:"tip"}, | |
|
264 | {key:"action1"}, | |
|
265 | {key:"action2"}, | |
|
266 | ] | |
|
267 | }; | |
|
268 | var trans_defs = { | |
|
269 | sortedBy:{key:"name",dir:"asc"}, | |
|
270 | MSG_SORTASC:"${_('Click to sort ascending')}", | |
|
271 | MSG_SORTDESC:"${_('Click to sort descending')}", | |
|
272 | MSG_EMPTY:"${_('No records found.')}", | |
|
273 | MSG_ERROR:"${_('Data error.')}", | |
|
274 | MSG_LOADING:"${_('Loading...')}", | |
|
275 | } | |
|
276 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource,trans_defs); | |
|
277 | myDataTable.subscribe('postRenderEvent',function(oArgs) { | |
|
278 | tooltip_activate(); | |
|
279 | quick_repo_menu(); | |
|
280 | filter_activate(); | |
|
281 | }); | |
|
282 | ||
|
283 | var permsColumnDefs = [ | |
|
284 | {key:"name",label:"${_('Name')}",sortable:true, sortOptions: { sortFunction: permNameSort }}, | |
|
285 | {key:"perm",label:"${_('Permission')}",sortable:false,}, | |
|
286 | ]; | |
|
287 | ||
|
288 | // perms repos table | |
|
289 | var myDataSource2 = new YAHOO.util.DataSource(YUD.get("tbl_list_repositories")); | |
|
290 | myDataSource2.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; | |
|
291 | myDataSource2.responseSchema = { | |
|
292 | fields: [ | |
|
293 | {key:"name"}, | |
|
294 | {key:"perm"}, | |
|
295 | ] | |
|
296 | }; | |
|
297 | ||
|
298 | new YAHOO.widget.DataTable("tbl_list_wrap_repositories", permsColumnDefs, myDataSource2, trans_defs); | |
|
299 | ||
|
300 | //perms groups table | |
|
301 | var myDataSource3 = new YAHOO.util.DataSource(YUD.get("tbl_list_repositories_groups")); | |
|
302 | myDataSource3.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; | |
|
303 | myDataSource3.responseSchema = { | |
|
304 | fields: [ | |
|
305 | {key:"name"}, | |
|
306 | {key:"perm"}, | |
|
307 | ] | |
|
308 | }; | |
|
309 | ||
|
310 | new YAHOO.widget.DataTable("tbl_list_wrap_repositories_groups", permsColumnDefs, myDataSource3, trans_defs); | |
|
311 | ||
|
189 | 312 | </script> |
|
190 | 313 | </%def> |
@@ -37,7 +37,7 b'' | |||
|
37 | 37 | %for cnt,u_group in enumerate(c.users_groups_list): |
|
38 | 38 | <tr class="parity${cnt%2}"> |
|
39 | 39 | <td>${h.link_to(u_group.users_group_name,h.url('edit_users_group', id=u_group.users_group_id))}</td> |
|
40 | <td><span class="tooltip" title="${', '.join([x.user.username for x in u_group.members[:50]])}">${len(u_group.members)}</span></td> | |
|
40 | <td><span class="tooltip" title="${', '.join(map(h.safe_unicode,[x.user.username for x in u_group.members[:50]]))}">${len(u_group.members)}</span></td> | |
|
41 | 41 | <td>${h.bool2icon(u_group.users_group_active)}</td> |
|
42 | 42 | <td> |
|
43 | 43 | ${h.form(url('users_group', id=u_group.users_group_id),method='delete')} |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
|
2 | <!DOCTYPE html> | |
|
3 | 3 | <html xmlns="http://www.w3.org/1999/xhtml"> |
|
4 | 4 | <head> |
|
5 | 5 | <title>${self.title()}</title> |
@@ -37,7 +37,7 b'' | |||
|
37 | 37 | ## JAVASCRIPT ## |
|
38 | 38 | <%def name="js()"> |
|
39 | 39 | <script type="text/javascript" src="${h.url('/js/yui.2.9.js')}"></script> |
|
40 | <!--[if IE]> | |
|
40 | <!--[if lt IE 9]> | |
|
41 | 41 | <script language="javascript" type="text/javascript" src="${h.url('/js/excanvas.min.js')}"></script> |
|
42 | 42 | <![endif]--> |
|
43 | 43 | <script type="text/javascript" src="${h.url('/js/yui.flot.js')}"></script> |
@@ -130,6 +130,17 b'' | |||
|
130 | 130 | ${self.js()} |
|
131 | 131 | </head> |
|
132 | 132 | <body id="body"> |
|
133 | ${next.body()} | |
|
133 | ## IE hacks | |
|
134 | <!--[if IE 7]> | |
|
135 | <script>YUD.addClass(document.body,'ie7')</script> | |
|
136 | <![endif]--> | |
|
137 | <!--[if IE 8]> | |
|
138 | <script>YUD.addClass(document.body,'ie8')</script> | |
|
139 | <![endif]--> | |
|
140 | <!--[if IE 9]> | |
|
141 | <script>YUD.addClass(document.body,'ie9')</script> | |
|
142 | <![endif]--> | |
|
143 | ||
|
144 | ${next.body()} | |
|
134 | 145 | </body> |
|
135 | 146 | </html> |
@@ -93,11 +93,11 b'' | |||
|
93 | 93 | %endif |
|
94 | 94 | %if h.is_hg(c.rhodecode_repo) and cs.branch: |
|
95 | 95 | <span class="branchtag" title="${'%s %s' % (_('branch'),cs.branch)}"> |
|
96 | ${h.link_to(cs.branch,h.url('files_home',repo_name=c.repo_name,revision=cs.raw_id))}</span> | |
|
96 | ${h.link_to(h.shorter(cs.branch),h.url('files_home',repo_name=c.repo_name,revision=cs.raw_id))}</span> | |
|
97 | 97 | %endif |
|
98 | 98 | %for tag in cs.tags: |
|
99 | 99 | <span class="tagtag" title="${'%s %s' % (_('tag'),tag)}"> |
|
100 | ${h.link_to(tag,h.url('files_home',repo_name=c.repo_name,revision=cs.raw_id))}</span> | |
|
100 | ${h.link_to(h.shorter(tag),h.url('files_home',repo_name=c.repo_name,revision=cs.raw_id))}</span> | |
|
101 | 101 | %endfor |
|
102 | 102 | </span> |
|
103 | 103 | </div> |
@@ -36,8 +36,8 b'' | |||
|
36 | 36 | <div class="diff-actions"> |
|
37 | 37 | <a href="${h.url('raw_changeset_home',repo_name=c.repo_name,revision=c.changeset.raw_id,diff='show')}" title="${_('raw diff')}" class="tooltip"><img class="icon" src="${h.url('/images/icons/page_white.png')}"/></a> |
|
38 | 38 | <a href="${h.url('raw_changeset_home',repo_name=c.repo_name,revision=c.changeset.raw_id,diff='download')}" title="${_('download diff')}" class="tooltip"><img class="icon" src="${h.url('/images/icons/page_white_get.png')}"/></a> |
|
39 | ${c.ignorews_url()} | |
|
40 | ${c.context_url()} | |
|
39 | ${c.ignorews_url(request.GET)} | |
|
40 | ${c.context_url(request.GET)} | |
|
41 | 41 | </div> |
|
42 | 42 | <div class="comments-number" style="float:right;padding-right:5px">${len(c.comments)} comment(s) (${c.inline_cnt} ${_('inline')})</div> |
|
43 | 43 | </div> |
@@ -91,14 +91,14 b'' | |||
|
91 | 91 | </div> |
|
92 | 92 | </div> |
|
93 | 93 | <span> |
|
94 |
${_('%s files affected with %s |
|
|
94 | ${_('%s files affected with %s insertions and %s deletions:') % (len(c.changeset.affected_files),c.lines_added,c.lines_deleted)} | |
|
95 | 95 | </span> |
|
96 | 96 | <div class="cs_files"> |
|
97 | 97 | %for change,filenode,diff,cs1,cs2,stat in c.changes: |
|
98 | 98 | <div class="cs_${change}"> |
|
99 | 99 | <div class="node"> |
|
100 | 100 | %if change != 'removed': |
|
101 | ${h.link_to(h.safe_unicode(filenode.path),c.anchor_url(filenode.changeset.raw_id,filenode.path)+"_target")} | |
|
101 | ${h.link_to(h.safe_unicode(filenode.path),c.anchor_url(filenode.changeset.raw_id,filenode.path,request.GET)+"_target")} | |
|
102 | 102 | %else: |
|
103 | 103 | ${h.link_to(h.safe_unicode(filenode.path),h.url.current(anchor=h.FID('',filenode.path)))} |
|
104 | 104 | %endif |
@@ -7,7 +7,7 b'' | |||
|
7 | 7 | |
|
8 | 8 | %for change,filenode,diff,cs1,cs2,stat in changes: |
|
9 | 9 | %if change !='removed': |
|
10 |
<div id="${h.FID(filenode.changeset.raw_id,filenode.path)}_target" style="clear:both; |
|
|
10 | <div id="${h.FID(filenode.changeset.raw_id,filenode.path)}_target" style="clear:both;margin-top:25px"></div> | |
|
11 | 11 | <div id="${h.FID(filenode.changeset.raw_id,filenode.path)}" class="diffblock margined comm"> |
|
12 | 12 | <div class="code-header"> |
|
13 | 13 | <div class="changeset_header"> |
@@ -19,8 +19,8 b'' | |||
|
19 | 19 | <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=h.safe_unicode(filenode.path),diff2=cs2,diff1=cs1,diff='diff',fulldiff=1)}" title="${_('diff')}" class="tooltip"><img class="icon" src="${h.url('/images/icons/page_white_go.png')}"/></a> |
|
20 | 20 | <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=h.safe_unicode(filenode.path),diff2=cs2,diff1=cs1,diff='raw')}" title="${_('raw diff')}" class="tooltip"><img class="icon" src="${h.url('/images/icons/page_white.png')}"/></a> |
|
21 | 21 | <a href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=h.safe_unicode(filenode.path),diff2=cs2,diff1=cs1,diff='download')}" title="${_('download diff')}" class="tooltip"><img class="icon" src="${h.url('/images/icons/page_white_get.png')}"/></a> |
|
22 | ${c.ignorews_url(h.FID(filenode.changeset.raw_id,filenode.path))} | |
|
23 | ${c.context_url(h.FID(filenode.changeset.raw_id,filenode.path))} | |
|
22 | ${c.ignorews_url(request.GET, h.FID(filenode.changeset.raw_id,filenode.path))} | |
|
23 | ${c.context_url(request.GET, h.FID(filenode.changeset.raw_id,filenode.path))} | |
|
24 | 24 | </div> |
|
25 | 25 | <span style="float:right;margin-top:-3px"> |
|
26 | 26 | <label> |
@@ -4,5 +4,5 b'' | |||
|
4 | 4 | <%def name="breadcrumbs()"></%def> |
|
5 | 5 | <%def name="page_nav()">${self.menu('home')}</%def> |
|
6 | 6 | <%def name="main()"> |
|
7 |
|
|
|
7 | <%include file="index_base.html" args="parent=self"/> | |
|
8 | 8 | </%def> |
@@ -9,7 +9,11 b'' | |||
|
9 | 9 | %if h.HasPermissionAny('hg.admin','hg.create.repository')(): |
|
10 | 10 | <ul class="links"> |
|
11 | 11 | <li> |
|
12 | %if c.group: | |
|
13 | <span>${h.link_to(_('ADD REPOSITORY'),h.url('admin_settings_create_repository',parent_group=c.group.group_id))}</span> | |
|
14 | %else: | |
|
12 | 15 | <span>${h.link_to(_('ADD REPOSITORY'),h.url('admin_settings_create_repository'))}</span> |
|
16 | %endif | |
|
13 | 17 | </li> |
|
14 | 18 | </ul> |
|
15 | 19 | %endif |
@@ -77,7 +81,7 b'' | |||
|
77 | 81 | </td> |
|
78 | 82 | ##REPO NAME AND ICONS |
|
79 | 83 | <td class="reponame"> |
|
80 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'))} | |
|
84 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'),pageargs.get('short_repo_names'))} | |
|
81 | 85 | </td> |
|
82 | 86 | ##DESCRIPTION |
|
83 | 87 | <td><span class="tooltip" title="${h.tooltip(repo['description'])}"> |
@@ -115,7 +119,7 b'' | |||
|
115 | 119 | </div> |
|
116 | 120 | </div> |
|
117 | 121 | <script> |
|
118 |
YUD.get('repo_count').innerHTML = ${cnt |
|
|
122 | YUD.get('repo_count').innerHTML = ${cnt}; | |
|
119 | 123 | var func = function(node){ |
|
120 | 124 | return node.parentNode.parentNode.parentNode.parentNode; |
|
121 | 125 | } |
@@ -48,7 +48,7 b'' | |||
|
48 | 48 | <tr> |
|
49 | 49 | <th></th> |
|
50 | 50 | <th class="left">${_('Name')}</th> |
|
51 |
<th class="left">${_(' |
|
|
51 | <th class="left">${_('Revision')}</th> | |
|
52 | 52 | <th class="left">${_('Action')}</th> |
|
53 | 53 | <th class="left">${_('Action')}</th> |
|
54 | 54 | </thead> |
@@ -34,12 +34,22 b'' | |||
|
34 | 34 | ${h.text('repo_name',class_="small")} |
|
35 | 35 | </div> |
|
36 | 36 | </div> |
|
37 | <div class="field"> | |
|
38 | <div class="label"> | |
|
39 | <label for="clone_uri">${_('Clone uri')}:</label> | |
|
40 | </div> | |
|
41 | <div class="input"> | |
|
42 | ${h.text('clone_uri',class_="medium")} | |
|
43 | <span class="help-block">${_('Optional http[s] url from which repository should be cloned.')}</span> | |
|
44 | </div> | |
|
45 | </div> | |
|
37 | 46 | <div class="field"> |
|
38 | 47 | <div class="label"> |
|
39 | 48 | <label for="repo_group">${_('Repository group')}:</label> |
|
40 | 49 | </div> |
|
41 | 50 | <div class="input"> |
|
42 | 51 | ${h.select('repo_group','',c.repo_groups,class_="medium")} |
|
52 | <span class="help-block">${_('Optional select a group to put this repository into.')}</span> | |
|
43 | 53 | </div> |
|
44 | 54 | </div> |
|
45 | 55 | <div class="field"> |
@@ -47,16 +57,18 b'' | |||
|
47 | 57 | <label for="description">${_('Description')}:</label> |
|
48 | 58 | </div> |
|
49 | 59 | <div class="textarea text-area editor"> |
|
50 |
${h.textarea('description' |
|
|
60 | ${h.textarea('description')} | |
|
61 | <span class="help-block">${_('Keep it short and to the point. Use a README file for longer descriptions.')}</span> | |
|
51 | 62 | </div> |
|
52 | 63 | </div> |
|
53 | 64 | |
|
54 | 65 | <div class="field"> |
|
55 | 66 | <div class="label label-checkbox"> |
|
56 | <label for="private">${_('Private')}:</label> | |
|
67 | <label for="private">${_('Private repository')}:</label> | |
|
57 | 68 | </div> |
|
58 | 69 | <div class="checkboxes"> |
|
59 | 70 | ${h.checkbox('private',value="True")} |
|
71 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> | |
|
60 | 72 | </div> |
|
61 | 73 | </div> |
|
62 | 74 |
@@ -2,7 +2,8 b' from rhodecode.tests import *' | |||
|
2 | 2 | from rhodecode.model.db import ChangesetComment, Notification, User, \ |
|
3 | 3 | UserNotification |
|
4 | 4 | |
|
5 | class TestChangeSetCommentrController(TestController): | |
|
5 | ||
|
6 | class TestChangeSetCommentsController(TestController): | |
|
6 | 7 | |
|
7 | 8 | def setUp(self): |
|
8 | 9 | for x in ChangesetComment.query().all(): |
@@ -27,7 +28,7 b' class TestChangeSetCommentrController(Te' | |||
|
27 | 28 | rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc' |
|
28 | 29 | text = u'CommentOnRevision' |
|
29 | 30 | |
|
30 | params = {'text':text} | |
|
31 | params = {'text': text} | |
|
31 | 32 | response = self.app.post(url(controller='changeset', action='comment', |
|
32 | 33 | repo_name=HG_REPO, revision=rev), |
|
33 | 34 | params=params) |
@@ -42,13 +43,18 b' class TestChangeSetCommentrController(Te' | |||
|
42 | 43 | self.assertTrue('''<div class="comments-number">%s ''' |
|
43 | 44 | '''comment(s) (0 inline)</div>''' % 1 in response.body) |
|
44 | 45 | |
|
46 | self.assertEqual(Notification.query().count(), 1) | |
|
47 | self.assertEqual(ChangesetComment.query().count(), 1) | |
|
45 | 48 | |
|
46 | self.assertEqual(Notification.query().count(), 1) | |
|
47 | 49 | notification = Notification.query().all()[0] |
|
48 | 50 | |
|
49 | self.assertEqual(notification.type_, Notification.TYPE_CHANGESET_COMMENT) | |
|
50 | self.assertTrue((u'/vcs_test_hg/changeset/27cd5cce30c96924232df' | |
|
51 | 'fcd24178a07ffeb5dfc#comment-1') in notification.subject) | |
|
51 | ID = ChangesetComment.query().first().comment_id | |
|
52 | self.assertEqual(notification.type_, | |
|
53 | Notification.TYPE_CHANGESET_COMMENT) | |
|
54 | sbj = (u'/vcs_test_hg/changeset/' | |
|
55 | '27cd5cce30c96924232dffcd24178a07ffeb5dfc#comment-%s' % ID) | |
|
56 | print "%s vs %s" % (sbj, notification.subject) | |
|
57 | self.assertTrue(sbj in notification.subject) | |
|
52 | 58 | |
|
53 | 59 | def test_create_inline(self): |
|
54 | 60 | self.log_user() |
@@ -57,7 +63,7 b' class TestChangeSetCommentrController(Te' | |||
|
57 | 63 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
58 | 64 | line = 'n1' |
|
59 | 65 | |
|
60 | params = {'text':text, 'f_path':f_path, 'line':line} | |
|
66 | params = {'text': text, 'f_path': f_path, 'line': line} | |
|
61 | 67 | response = self.app.post(url(controller='changeset', action='comment', |
|
62 | 68 | repo_name=HG_REPO, revision=rev), |
|
63 | 69 | params=params) |
@@ -76,11 +82,16 b' class TestChangeSetCommentrController(Te' | |||
|
76 | 82 | '''repositorypy">''' in response.body) |
|
77 | 83 | |
|
78 | 84 | self.assertEqual(Notification.query().count(), 1) |
|
79 | notification = Notification.query().all()[0] | |
|
85 | self.assertEqual(ChangesetComment.query().count(), 1) | |
|
80 | 86 | |
|
81 | self.assertEqual(notification.type_, Notification.TYPE_CHANGESET_COMMENT) | |
|
82 | self.assertTrue((u'/vcs_test_hg/changeset/27cd5cce30c96924232df' | |
|
83 | 'fcd24178a07ffeb5dfc#comment-1') in notification.subject) | |
|
87 | notification = Notification.query().all()[0] | |
|
88 | ID = ChangesetComment.query().first().comment_id | |
|
89 | self.assertEqual(notification.type_, | |
|
90 | Notification.TYPE_CHANGESET_COMMENT) | |
|
91 | sbj = (u'/vcs_test_hg/changeset/' | |
|
92 | '27cd5cce30c96924232dffcd24178a07ffeb5dfc#comment-%s' % ID) | |
|
93 | print "%s vs %s" % (sbj, notification.subject) | |
|
94 | self.assertTrue(sbj in notification.subject) | |
|
84 | 95 | |
|
85 | 96 | def test_create_with_mention(self): |
|
86 | 97 | self.log_user() |
@@ -103,7 +114,6 b' class TestChangeSetCommentrController(Te' | |||
|
103 | 114 | self.assertTrue('''<div class="comments-number">%s ''' |
|
104 | 115 | '''comment(s) (0 inline)</div>''' % 1 in response.body) |
|
105 | 116 | |
|
106 | ||
|
107 | 117 | self.assertEqual(Notification.query().count(), 2) |
|
108 | 118 | users = [x.user.username for x in UserNotification.query().all()] |
|
109 | 119 | |
@@ -115,7 +125,7 b' class TestChangeSetCommentrController(Te' | |||
|
115 | 125 | rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc' |
|
116 | 126 | text = u'CommentOnRevision' |
|
117 | 127 | |
|
118 | params = {'text':text} | |
|
128 | params = {'text': text} | |
|
119 | 129 | response = self.app.post(url(controller='changeset', action='comment', |
|
120 | 130 | repo_name=HG_REPO, revision=rev), |
|
121 | 131 | params=params) |
@@ -124,7 +134,6 b' class TestChangeSetCommentrController(Te' | |||
|
124 | 134 | self.assertEqual(len(comments), 1) |
|
125 | 135 | comment_id = comments[0].comment_id |
|
126 | 136 | |
|
127 | ||
|
128 | 137 | self.app.delete(url(controller='changeset', |
|
129 | 138 | action='delete_comment', |
|
130 | 139 | repo_name=HG_REPO, |
@@ -1,10 +1,11 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | from rhodecode.tests import * |
|
3 | 3 | from rhodecode.model.db import User, Notification |
|
4 | from rhodecode.lib import generate_api_key | |
|
4 | from rhodecode.lib.utils2 import generate_api_key | |
|
5 | 5 | from rhodecode.lib.auth import check_password |
|
6 | 6 | from rhodecode.model.meta import Session |
|
7 | 7 | |
|
8 | ||
|
8 | 9 | class TestLoginController(TestController): |
|
9 | 10 | |
|
10 | 11 | def tearDown(self): |
@@ -65,22 +65,20 b' TEST_URLS += [' | |||
|
65 | 65 | |
|
66 | 66 | class TestLibs(unittest.TestCase): |
|
67 | 67 | |
|
68 | ||
|
69 | 68 | def test_uri_filter(self): |
|
70 | from rhodecode.lib import uri_filter | |
|
69 | from rhodecode.lib.utils2 import uri_filter | |
|
71 | 70 | |
|
72 | 71 | for url in TEST_URLS: |
|
73 | 72 | self.assertEqual(uri_filter(url[0]), url[1]) |
|
74 | 73 | |
|
75 | 74 | def test_credentials_filter(self): |
|
76 | from rhodecode.lib import credentials_filter | |
|
75 | from rhodecode.lib.utils2 import credentials_filter | |
|
77 | 76 | |
|
78 | 77 | for url in TEST_URLS: |
|
79 | 78 | self.assertEqual(credentials_filter(url[0]), url[2]) |
|
80 | 79 | |
|
81 | ||
|
82 | 80 | def test_str2bool(self): |
|
83 | from rhodecode.lib import str2bool | |
|
81 | from rhodecode.lib.utils2 import str2bool | |
|
84 | 82 | test_cases = [ |
|
85 | 83 | ('t', True), |
|
86 | 84 | ('true', True), |
@@ -103,9 +101,8 b' class TestLibs(unittest.TestCase):' | |||
|
103 | 101 | for case in test_cases: |
|
104 | 102 | self.assertEqual(str2bool(case[0]), case[1]) |
|
105 | 103 | |
|
106 | ||
|
107 | 104 | def test_mention_extractor(self): |
|
108 | from rhodecode.lib import extract_mentioned_users | |
|
105 | from rhodecode.lib.utils2 import extract_mentioned_users | |
|
109 | 106 | sample = ("@first hi there @marcink here's my email marcin@email.com " |
|
110 | 107 | "@lukaszb check it pls @ ttwelve @D[] @one@two@three " |
|
111 | 108 | "@MARCIN @maRCiN @2one_more22") |
@@ -5,7 +5,7 b' from rhodecode.tests import *' | |||
|
5 | 5 | from rhodecode.model.repos_group import ReposGroupModel |
|
6 | 6 | from rhodecode.model.repo import RepoModel |
|
7 | 7 | from rhodecode.model.db import RepoGroup, User, Notification, UserNotification, \ |
|
8 | UsersGroup, UsersGroupMember, Permission | |
|
8 | UsersGroup, UsersGroupMember, Permission, UsersGroupRepoGroupToPerm | |
|
9 | 9 | from sqlalchemy.exc import IntegrityError |
|
10 | 10 | from rhodecode.model.user import UserModel |
|
11 | 11 | |
@@ -430,6 +430,11 b' class TestPermissions(unittest.TestCase)' | |||
|
430 | 430 | username=u'u1', password=u'qweqwe', |
|
431 | 431 | email=u'u1@rhodecode.org', name=u'u1', lastname=u'u1' |
|
432 | 432 | ) |
|
433 | self.u2 = UserModel().create_or_update( | |
|
434 | username=u'u2', password=u'qweqwe', | |
|
435 | email=u'u2@rhodecode.org', name=u'u2', lastname=u'u2' | |
|
436 | ) | |
|
437 | self.anon = User.get_by_username('default') | |
|
433 | 438 | self.a1 = UserModel().create_or_update( |
|
434 | 439 | username=u'a1', password=u'qweqwe', |
|
435 | 440 | email=u'a1@rhodecode.org', name=u'a1', lastname=u'a1', admin=True |
@@ -437,7 +442,10 b' class TestPermissions(unittest.TestCase)' | |||
|
437 | 442 | Session.commit() |
|
438 | 443 | |
|
439 | 444 | def tearDown(self): |
|
445 | if hasattr(self, 'test_repo'): | |
|
446 | RepoModel().delete(repo=self.test_repo) | |
|
440 | 447 | UserModel().delete(self.u1) |
|
448 | UserModel().delete(self.u2) | |
|
441 | 449 | UserModel().delete(self.a1) |
|
442 | 450 | if hasattr(self, 'g1'): |
|
443 | 451 | ReposGroupModel().delete(self.g1.group_id) |
@@ -578,3 +586,130 b' class TestPermissions(unittest.TestCase)' | |||
|
578 | 586 | new_perm_h) |
|
579 | 587 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
580 | 588 | perms['repositories_groups']) |
|
589 | ||
|
590 | def test_repo_in_group_permissions(self): | |
|
591 | self.g1 = _make_group('group1', skip_if_exists=True) | |
|
592 | self.g2 = _make_group('group2', skip_if_exists=True) | |
|
593 | Session.commit() | |
|
594 | # both perms should be read ! | |
|
595 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
|
596 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
|
597 | {u'group1': u'group.read', u'group2': u'group.read'}) | |
|
598 | ||
|
599 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
600 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
601 | {u'group1': u'group.read', u'group2': u'group.read'}) | |
|
602 | ||
|
603 | #Change perms to none for both groups | |
|
604 | ReposGroupModel().grant_user_permission(repos_group=self.g1, | |
|
605 | user=self.anon, | |
|
606 | perm='group.none') | |
|
607 | ReposGroupModel().grant_user_permission(repos_group=self.g2, | |
|
608 | user=self.anon, | |
|
609 | perm='group.none') | |
|
610 | ||
|
611 | ||
|
612 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
|
613 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
|
614 | {u'group1': u'group.none', u'group2': u'group.none'}) | |
|
615 | ||
|
616 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
617 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
618 | {u'group1': u'group.none', u'group2': u'group.none'}) | |
|
619 | ||
|
620 | # add repo to group | |
|
621 | form_data = { | |
|
622 | 'repo_name':HG_REPO, | |
|
623 | 'repo_name_full':os.path.join(self.g1.group_name,HG_REPO), | |
|
624 | 'repo_type':'hg', | |
|
625 | 'clone_uri':'', | |
|
626 | 'repo_group':self.g1.group_id, | |
|
627 | 'description':'desc', | |
|
628 | 'private':False | |
|
629 | } | |
|
630 | self.test_repo = RepoModel().create(form_data, cur_user=self.u1) | |
|
631 | Session.commit() | |
|
632 | ||
|
633 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
|
634 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
|
635 | {u'group1': u'group.none', u'group2': u'group.none'}) | |
|
636 | ||
|
637 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
638 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
639 | {u'group1': u'group.none', u'group2': u'group.none'}) | |
|
640 | ||
|
641 | #grant permission for u2 ! | |
|
642 | ReposGroupModel().grant_user_permission(repos_group=self.g1, | |
|
643 | user=self.u2, | |
|
644 | perm='group.read') | |
|
645 | ReposGroupModel().grant_user_permission(repos_group=self.g2, | |
|
646 | user=self.u2, | |
|
647 | perm='group.read') | |
|
648 | Session.commit() | |
|
649 | self.assertNotEqual(self.u1, self.u2) | |
|
650 | #u1 and anon should have not change perms while u2 should ! | |
|
651 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
|
652 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
|
653 | {u'group1': u'group.none', u'group2': u'group.none'}) | |
|
654 | ||
|
655 | u2_auth = AuthUser(user_id=self.u2.user_id) | |
|
656 | self.assertEqual(u2_auth.permissions['repositories_groups'], | |
|
657 | {u'group1': u'group.read', u'group2': u'group.read'}) | |
|
658 | ||
|
659 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
660 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
661 | {u'group1': u'group.none', u'group2': u'group.none'}) | |
|
662 | ||
|
663 | def test_repo_group_user_as_user_group_member(self): | |
|
664 | # create Group1 | |
|
665 | self.g1 = _make_group('group1', skip_if_exists=True) | |
|
666 | Session.commit() | |
|
667 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
668 | ||
|
669 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
670 | {u'group1': u'group.read'}) | |
|
671 | ||
|
672 | # set default permission to none | |
|
673 | ReposGroupModel().grant_user_permission(repos_group=self.g1, | |
|
674 | user=self.anon, | |
|
675 | perm='group.none') | |
|
676 | # make group | |
|
677 | self.ug1 = UsersGroupModel().create('G1') | |
|
678 | # add user to group | |
|
679 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) | |
|
680 | Session.commit() | |
|
681 | ||
|
682 | # check if user is in the group | |
|
683 | membrs = [x.user_id for x in UsersGroupModel().get(self.ug1.users_group_id).members] | |
|
684 | self.assertEqual(membrs, [self.u1.user_id]) | |
|
685 | # add some user to that group | |
|
686 | ||
|
687 | # check his permissions | |
|
688 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
689 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
690 | {u'group1': u'group.none'}) | |
|
691 | ||
|
692 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
|
693 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
|
694 | {u'group1': u'group.none'}) | |
|
695 | ||
|
696 | # grant ug1 read permissions for | |
|
697 | ReposGroupModel().grant_users_group_permission(repos_group=self.g1, | |
|
698 | group_name=self.ug1, | |
|
699 | perm='group.read') | |
|
700 | Session.commit() | |
|
701 | # check if the | |
|
702 | obj = Session.query(UsersGroupRepoGroupToPerm)\ | |
|
703 | .filter(UsersGroupRepoGroupToPerm.group == self.g1)\ | |
|
704 | .filter(UsersGroupRepoGroupToPerm.users_group == self.ug1)\ | |
|
705 | .scalar() | |
|
706 | self.assertEqual(obj.permission.permission_name, 'group.read') | |
|
707 | ||
|
708 | a1_auth = AuthUser(user_id=self.anon.user_id) | |
|
709 | ||
|
710 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
|
711 | {u'group1': u'group.none'}) | |
|
712 | ||
|
713 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
|
714 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
|
715 | {u'group1': u'group.read'}) |
@@ -94,8 +94,9 b' setup(' | |||
|
94 | 94 | main = pylons.util:PylonsInstaller |
|
95 | 95 | |
|
96 | 96 | [paste.global_paster_command] |
|
97 |
make-index |
|
|
98 | upgrade-db = rhodecode.lib.dbmigrate:UpgradeDb | |
|
97 | make-index=rhodecode.lib.indexers:MakeIndex | |
|
98 | make-rcext=rhodecode.config.rcextensions.make_rcextensions:MakeRcExt | |
|
99 | upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb | |
|
99 | 100 | celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand |
|
100 | 101 | """, |
|
101 | 102 | ) |
@@ -17,6 +17,7 b' pdebug = false' | |||
|
17 | 17 | #error_email_from = paste_error@localhost |
|
18 | 18 | #app_email_from = rhodecode-noreply@localhost |
|
19 | 19 | #error_message = |
|
20 | #email_prefix = [RhodeCode] | |
|
20 | 21 | |
|
21 | 22 | #smtp_server = mail.server.com |
|
22 | 23 | #smtp_username = |
@@ -24,6 +25,8 b' pdebug = false' | |||
|
24 | 25 | #smtp_port = |
|
25 | 26 | #smtp_use_tls = false |
|
26 | 27 | #smtp_use_ssl = true |
|
28 | # Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) | |
|
29 | #smtp_auth = | |
|
27 | 30 | |
|
28 | 31 | [server:main] |
|
29 | 32 | ##nr of threads to spawn |
@@ -53,6 +56,42 b' commit_parse_limit = 25' | |||
|
53 | 56 | use_gravatar = true |
|
54 | 57 | container_auth_enabled = false |
|
55 | 58 | proxypass_auth_enabled = false |
|
59 | ||
|
60 | ||
|
61 | ## overwrite schema of clone url | |
|
62 | ## available vars: | |
|
63 | ## scheme - http/https | |
|
64 | ## user - current user | |
|
65 | ## pass - password | |
|
66 | ## netloc - network location | |
|
67 | ## path - usually repo_name | |
|
68 | ||
|
69 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} | |
|
70 | ||
|
71 | ## issue tracking mapping for commits messages | |
|
72 | ## comment out issue_pat, issue_server, issue_prefix to enable | |
|
73 | ||
|
74 | ## pattern to get the issues from commit messages | |
|
75 | ## default one used here is #<numbers> with a regex passive group for `#` | |
|
76 | ## {id} will be all groups matched from this pattern | |
|
77 | ||
|
78 | issue_pat = (?:\s*#)(\d+) | |
|
79 | ||
|
80 | ## server url to the issue, each {id} will be replaced with match | |
|
81 | ## fetched from the regex and {repo} is replaced with repository name | |
|
82 | ||
|
83 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} | |
|
84 | ||
|
85 | ## prefix to add to link to indicate it's an url | |
|
86 | ## #314 will be replaced by <issue_prefix><id> | |
|
87 | ||
|
88 | issue_prefix = # | |
|
89 | ||
|
90 | ## instance-id prefix | |
|
91 | ## a prefix key for this instance used for cache invalidation when running | |
|
92 | ## multiple instances of rhodecode, make sure it's globally unique for | |
|
93 | ## all running rhodecode instances. Leave empty if you don't use it | |
|
94 | instance_id = | |
|
56 | 95 | |
|
57 | 96 | #################################### |
|
58 | 97 | ### CELERY CONFIG #### |
@@ -86,6 +125,7 b' celery.always.eager = false' | |||
|
86 | 125 | #################################### |
|
87 | 126 | beaker.cache.data_dir=/tmp/data/cache/data |
|
88 | 127 | beaker.cache.lock_dir=/tmp/data/cache/lock |
|
128 | ||
|
89 | 129 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
90 | 130 | |
|
91 | 131 | beaker.cache.super_short_term.type=memory |
@@ -118,12 +158,27 b' beaker.cache.sql_cache_long.key_length =' | |||
|
118 | 158 | ## Type of storage used for the session, current types are |
|
119 | 159 | ## dbm, file, memcached, database, and memory. |
|
120 | 160 | ## The storage uses the Container API |
|
121 | ##that is also used by the cache system. | |
|
122 | beaker.session.type = file | |
|
161 | ## that is also used by the cache system. | |
|
162 | ||
|
163 | ## db session example | |
|
164 | ||
|
165 | #beaker.session.type = ext:database | |
|
166 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode | |
|
167 | #beaker.session.table_name = db_session | |
|
168 | ||
|
169 | ## encrypted cookie session, good for many instances | |
|
170 | #beaker.session.type = cookie | |
|
123 | 171 | |
|
172 | beaker.session.type = file | |
|
124 | 173 | beaker.session.key = rhodecode |
|
125 | beaker.session.secret = g654dcno0-9873jhgfreyu | |
|
174 | # secure cookie requires AES python libraries | |
|
175 | #beaker.session.encrypt_key = g654dcno0-9873jhgfreyu | |
|
176 | #beaker.session.validate_key = 9712sds2212c--zxc123 | |
|
126 | 177 | beaker.session.timeout = 36000 |
|
178 | beaker.session.httponly = true | |
|
179 | ||
|
180 | ## uncomment for https secure cookie | |
|
181 | beaker.session.secure = false | |
|
127 | 182 | |
|
128 | 183 | ##auto save the session to not to use .save() |
|
129 | 184 | beaker.session.auto = False |
@@ -131,7 +186,7 b' beaker.session.auto = False' | |||
|
131 | 186 | ##true exire at browser close |
|
132 | 187 | #beaker.session.cookie_expires = 3600 |
|
133 | 188 | |
|
134 | ||
|
189 | ||
|
135 | 190 | ################################################################################ |
|
136 | 191 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
137 | 192 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
@@ -151,15 +206,17 b' logview.pylons.util = #eee' | |||
|
151 | 206 | ######################################################### |
|
152 | 207 | sqlalchemy.db1.url = sqlite:///%(here)s/test.db |
|
153 | 208 | #sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode_tests |
|
154 | #sqlalchemy.db1.echo = false | |
|
155 | #sqlalchemy.db1.pool_recycle = 3600 | |
|
156 | sqlalchemy.convert_unicode = true | |
|
209 | #sqlalchemy.db1.url = mysql://root:qwe123qwe@localhost/rhodecode_tests | |
|
210 | ||
|
211 | sqlalchemy.db1.echo = false | |
|
212 | sqlalchemy.db1.pool_recycle = 3600 | |
|
213 | sqlalchemy.db1.convert_unicode = true | |
|
157 | 214 | |
|
158 | 215 | ################################ |
|
159 | 216 | ### LOGGING CONFIGURATION #### |
|
160 | 217 | ################################ |
|
161 | 218 | [loggers] |
|
162 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates | |
|
219 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer | |
|
163 | 220 | |
|
164 | 221 | [handlers] |
|
165 | 222 | keys = console |
@@ -205,6 +262,12 b' handlers = console' | |||
|
205 | 262 | qualname = sqlalchemy.engine |
|
206 | 263 | propagate = 0 |
|
207 | 264 | |
|
265 | [logger_whoosh_indexer] | |
|
266 | level = DEBUG | |
|
267 | handlers = | |
|
268 | qualname = whoosh_indexer | |
|
269 | propagate = 1 | |
|
270 | ||
|
208 | 271 | ############## |
|
209 | 272 | ## HANDLERS ## |
|
210 | 273 | ############## |
General Comments 0
You need to be logged in to leave comments.
Login now