Show More
@@ -41,7 +41,8 b' from rhodecode.lib.base import BaseRepoC' | |||||
41 | from rhodecode.lib.utils import OrderedDict, EmptyChangeset |
|
41 | from rhodecode.lib.utils import OrderedDict, EmptyChangeset | |
42 |
|
42 | |||
43 | from rhodecode.lib.celerylib import run_task |
|
43 | from rhodecode.lib.celerylib import run_task | |
44 | from rhodecode.lib.celerylib.tasks import get_commits_stats |
|
44 | from rhodecode.lib.celerylib.tasks import get_commits_stats, \ | |
|
45 | LANGUAGES_EXTENSIONS_MAP | |||
45 | from rhodecode.lib.helpers import RepoPage |
|
46 | from rhodecode.lib.helpers import RepoPage | |
46 |
|
47 | |||
47 | try: |
|
48 | try: | |
@@ -131,8 +132,14 b' class SummaryController(BaseRepoControll' | |||||
131 | lang_stats = json.loads(stats.languages) |
|
132 | lang_stats = json.loads(stats.languages) | |
132 | c.commit_data = stats.commit_activity |
|
133 | c.commit_data = stats.commit_activity | |
133 | c.overview_data = stats.commit_activity_combined |
|
134 | c.overview_data = stats.commit_activity_combined | |
|
135 | ||||
|
136 | lang_stats = [(x, {"count":y, | |||
|
137 | "desc":LANGUAGES_EXTENSIONS_MAP.get(x)}) | |||
|
138 | for x, y in lang_stats.items()] | |||
|
139 | print lang_stats | |||
|
140 | ||||
134 | c.trending_languages = json.dumps(OrderedDict( |
|
141 | c.trending_languages = json.dumps(OrderedDict( | |
135 |
sorted(lang_stats |
|
142 | sorted(lang_stats, reverse=True, | |
136 | key=lambda k: k[1])[:10] |
|
143 | key=lambda k: k[1])[:10] | |
137 | ) |
|
144 | ) | |
138 | ) |
|
145 | ) |
@@ -31,6 +31,8 b' import logging' | |||||
31 |
|
31 | |||
32 | from time import mktime |
|
32 | from time import mktime | |
33 | from operator import itemgetter |
|
33 | from operator import itemgetter | |
|
34 | from pygments import lexers | |||
|
35 | from string import lower | |||
34 |
|
36 | |||
35 | from pylons import config |
|
37 | from pylons import config | |
36 | from pylons.i18n.translation import _ |
|
38 | from pylons.i18n.translation import _ | |
@@ -60,6 +62,43 b' except ImportError:' | |||||
60 |
|
62 | |||
61 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) |
|
63 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) | |
62 |
|
64 | |||
|
65 | LANGUAGES_EXTENSIONS_MAP = {} | |||
|
66 | ||||
|
67 | ||||
|
68 | def __clean(s): | |||
|
69 | ||||
|
70 | s = s.lstrip('*') | |||
|
71 | s = s.lstrip('.') | |||
|
72 | ||||
|
73 | if s.find('[') != -1: | |||
|
74 | exts = [] | |||
|
75 | start, stop = s.find('['), s.find(']') | |||
|
76 | ||||
|
77 | for suffix in s[start + 1:stop]: | |||
|
78 | exts.append(s[:s.find('[')] + suffix) | |||
|
79 | return map(lower, exts) | |||
|
80 | else: | |||
|
81 | return map(lower, [s]) | |||
|
82 | ||||
|
83 | for lx, t in sorted(lexers.LEXERS.items()): | |||
|
84 | m = map(__clean, t[-2]) | |||
|
85 | if m: | |||
|
86 | m = reduce(lambda x, y: x + y, m) | |||
|
87 | for ext in m: | |||
|
88 | desc = lx.replace('Lexer', '') | |||
|
89 | if ext in LANGUAGES_EXTENSIONS_MAP: | |||
|
90 | if desc not in LANGUAGES_EXTENSIONS_MAP[ext]: | |||
|
91 | LANGUAGES_EXTENSIONS_MAP[ext].append(desc) | |||
|
92 | else: | |||
|
93 | LANGUAGES_EXTENSIONS_MAP[ext] = [desc] | |||
|
94 | ||||
|
95 | #Additional mappings that are not present in the pygments lexers | |||
|
96 | # NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP | |||
|
97 | ADDITIONAL_MAPPINGS = {'xaml': 'XAML'} | |||
|
98 | ||||
|
99 | LANGUAGES_EXTENSIONS_MAP.update(ADDITIONAL_MAPPINGS) | |||
|
100 | ||||
|
101 | ||||
63 | def get_session(): |
|
102 | def get_session(): | |
64 | if CELERY_ON: |
|
103 | if CELERY_ON: | |
65 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
104 | engine = engine_from_config(config, 'sqlalchemy.db1.') | |
@@ -67,11 +106,13 b' def get_session():' | |||||
67 | sa = meta.Session() |
|
106 | sa = meta.Session() | |
68 | return sa |
|
107 | return sa | |
69 |
|
108 | |||
|
109 | ||||
70 | def get_repos_path(): |
|
110 | def get_repos_path(): | |
71 | sa = get_session() |
|
111 | sa = get_session() | |
72 | q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() |
|
112 | q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() | |
73 | return q.ui_value |
|
113 | return q.ui_value | |
74 |
|
114 | |||
|
115 | ||||
75 | @task(ignore_result=True) |
|
116 | @task(ignore_result=True) | |
76 | @locked_task |
|
117 | @locked_task | |
77 | def whoosh_index(repo_location, full_index): |
|
118 | def whoosh_index(repo_location, full_index): | |
@@ -82,6 +123,7 b' def whoosh_index(repo_location, full_ind' | |||||
82 | repo_location=repo_location, sa=get_session())\ |
|
123 | repo_location=repo_location, sa=get_session())\ | |
83 | .run(full_index=full_index) |
|
124 | .run(full_index=full_index) | |
84 |
|
125 | |||
|
126 | ||||
85 | @task(ignore_result=True) |
|
127 | @task(ignore_result=True) | |
86 | @locked_task |
|
128 | @locked_task | |
87 | def get_commits_stats(repo_name, ts_min_y, ts_max_y): |
|
129 | def get_commits_stats(repo_name, ts_min_y, ts_max_y): | |
@@ -93,9 +135,9 b' def get_commits_stats(repo_name, ts_min_' | |||||
93 | from rhodecode.model.db import Statistics, Repository |
|
135 | from rhodecode.model.db import Statistics, Repository | |
94 |
|
136 | |||
95 | #for js data compatibilty |
|
137 | #for js data compatibilty | |
96 |
a |
|
138 | akc = lambda k: person(k).replace('"', "") | |
97 |
|
139 | |||
98 |
co |
|
140 | co_day_auth_aggr = {} | |
99 | commits_by_day_aggregate = {} |
|
141 | commits_by_day_aggregate = {} | |
100 | repos_path = get_repos_path() |
|
142 | repos_path = get_repos_path() | |
101 | p = os.path.join(repos_path, repo_name) |
|
143 | p = os.path.join(repos_path, repo_name) | |
@@ -130,7 +172,7 b' def get_commits_stats(repo_name, ts_min_' | |||||
130 | commits_by_day_aggregate = OrderedDict( |
|
172 | commits_by_day_aggregate = OrderedDict( | |
131 | json.loads( |
|
173 | json.loads( | |
132 | cur_stats.commit_activity_combined)) |
|
174 | cur_stats.commit_activity_combined)) | |
133 |
co |
|
175 | co_day_auth_aggr = json.loads(cur_stats.commit_activity) | |
134 |
|
176 | |||
135 | log.debug('starting parsing %s', parse_limit) |
|
177 | log.debug('starting parsing %s', parse_limit) | |
136 | lmktime = mktime |
|
178 | lmktime = mktime | |
@@ -138,22 +180,21 b' def get_commits_stats(repo_name, ts_min_' | |||||
138 | last_rev = last_rev + 1 if last_rev > 0 else last_rev |
|
180 | last_rev = last_rev + 1 if last_rev > 0 else last_rev | |
139 |
|
181 | |||
140 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
182 | for cs in repo[last_rev:last_rev + parse_limit]: | |
141 | last_cs = cs #remember last parsed changeset |
|
183 | last_cs = cs # remember last parsed changeset | |
142 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], |
|
184 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], | |
143 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) |
|
185 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) | |
144 |
|
186 | |||
145 | if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)): |
|
187 | if akc(cs.author) in co_day_auth_aggr: | |
146 | try: |
|
188 | try: | |
147 |
l = [timegetter(x) for x in |
|
189 | l = [timegetter(x) for x in | |
148 |
|
|
190 | co_day_auth_aggr[akc(cs.author)]['data']] | |
149 | time_pos = l.index(k) |
|
191 | time_pos = l.index(k) | |
150 | except ValueError: |
|
192 | except ValueError: | |
151 | time_pos = False |
|
193 | time_pos = False | |
152 |
|
194 | |||
153 | if time_pos >= 0 and time_pos is not False: |
|
195 | if time_pos >= 0 and time_pos is not False: | |
154 |
|
196 | |||
155 |
datadict = co |
|
197 | datadict = co_day_auth_aggr[akc(cs.author)]['data'][time_pos] | |
156 | [author_key_cleaner(cs.author)]['data'][time_pos] |
|
|||
157 |
|
198 | |||
158 | datadict["commits"] += 1 |
|
199 | datadict["commits"] += 1 | |
159 | datadict["added"] += len(cs.added) |
|
200 | datadict["added"] += len(cs.added) | |
@@ -163,44 +204,44 b' def get_commits_stats(repo_name, ts_min_' | |||||
163 | else: |
|
204 | else: | |
164 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
205 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
165 |
|
206 | |||
166 | datadict = {"time":k, |
|
207 | datadict = {"time": k, | |
167 | "commits":1, |
|
208 | "commits": 1, | |
168 | "added":len(cs.added), |
|
209 | "added": len(cs.added), | |
169 | "changed":len(cs.changed), |
|
210 | "changed": len(cs.changed), | |
170 | "removed":len(cs.removed), |
|
211 | "removed": len(cs.removed), | |
171 | } |
|
212 | } | |
172 |
co |
|
213 | co_day_auth_aggr[akc(cs.author)]['data']\ | |
173 |
|
|
214 | .append(datadict) | |
174 |
|
215 | |||
175 | else: |
|
216 | else: | |
176 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
217 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
177 |
co |
|
218 | co_day_auth_aggr[akc(cs.author)] = { | |
178 |
"label":a |
|
219 | "label": akc(cs.author), | |
179 | "data":[{"time":k, |
|
220 | "data": [{"time":k, | |
180 | "commits":1, |
|
221 | "commits":1, | |
181 | "added":len(cs.added), |
|
222 | "added":len(cs.added), | |
182 | "changed":len(cs.changed), |
|
223 | "changed":len(cs.changed), | |
183 | "removed":len(cs.removed), |
|
224 | "removed":len(cs.removed), | |
184 | }], |
|
225 | }], | |
185 | "schema":["commits"], |
|
226 | "schema": ["commits"], | |
186 | } |
|
227 | } | |
187 |
|
228 | |||
188 | #gather all data by day |
|
229 | #gather all data by day | |
189 |
if commits_by_day_aggregate |
|
230 | if k in commits_by_day_aggregate: | |
190 | commits_by_day_aggregate[k] += 1 |
|
231 | commits_by_day_aggregate[k] += 1 | |
191 | else: |
|
232 | else: | |
192 | commits_by_day_aggregate[k] = 1 |
|
233 | commits_by_day_aggregate[k] = 1 | |
193 |
|
234 | |||
194 | overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) |
|
235 | overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) | |
195 |
if not co |
|
236 | if not co_day_auth_aggr: | |
196 |
co |
|
237 | co_day_auth_aggr[akc(repo.contact)] = { | |
197 |
"label":a |
|
238 | "label": akc(repo.contact), | |
198 | "data":[0, 1], |
|
239 | "data": [0, 1], | |
199 | "schema":["commits"], |
|
240 | "schema": ["commits"], | |
200 | } |
|
241 | } | |
201 |
|
242 | |||
202 | stats = cur_stats if cur_stats else Statistics() |
|
243 | stats = cur_stats if cur_stats else Statistics() | |
203 |
stats.commit_activity = json.dumps(co |
|
244 | stats.commit_activity = json.dumps(co_day_auth_aggr) | |
204 | stats.commit_activity_combined = json.dumps(overview_data) |
|
245 | stats.commit_activity_combined = json.dumps(overview_data) | |
205 |
|
246 | |||
206 | log.debug('last revison %s', last_rev) |
|
247 | log.debug('last revison %s', last_rev) | |
@@ -225,6 +266,7 b' def get_commits_stats(repo_name, ts_min_' | |||||
225 |
|
266 | |||
226 | return True |
|
267 | return True | |
227 |
|
268 | |||
|
269 | ||||
228 | @task(ignore_result=True) |
|
270 | @task(ignore_result=True) | |
229 | def reset_user_password(user_email): |
|
271 | def reset_user_password(user_email): | |
230 | try: |
|
272 | try: | |
@@ -259,13 +301,13 b' def reset_user_password(user_email):' | |||||
259 | 'Your new rhodecode password:%s' % (new_passwd)) |
|
301 | 'Your new rhodecode password:%s' % (new_passwd)) | |
260 | log.info('send new password mail to %s', user_email) |
|
302 | log.info('send new password mail to %s', user_email) | |
261 |
|
303 | |||
262 |
|
||||
263 | except: |
|
304 | except: | |
264 | log.error('Failed to update user password') |
|
305 | log.error('Failed to update user password') | |
265 | log.error(traceback.format_exc()) |
|
306 | log.error(traceback.format_exc()) | |
266 |
|
307 | |||
267 | return True |
|
308 | return True | |
268 |
|
309 | |||
|
310 | ||||
269 | @task(ignore_result=True) |
|
311 | @task(ignore_result=True) | |
270 | def send_email(recipients, subject, body): |
|
312 | def send_email(recipients, subject, body): | |
271 | """ |
|
313 | """ | |
@@ -306,6 +348,7 b' def send_email(recipients, subject, body' | |||||
306 | return False |
|
348 | return False | |
307 | return True |
|
349 | return True | |
308 |
|
350 | |||
|
351 | ||||
309 | @task(ignore_result=True) |
|
352 | @task(ignore_result=True) | |
310 | def create_repo_fork(form_data, cur_user): |
|
353 | def create_repo_fork(form_data, cur_user): | |
311 | try: |
|
354 | try: | |
@@ -328,60 +371,8 b' def create_repo_fork(form_data, cur_user' | |||||
328 | backend = get_backend(alias) |
|
371 | backend = get_backend(alias) | |
329 | backend(str(repo_fork_path), create=True, src_url=str(repo_path)) |
|
372 | backend(str(repo_fork_path), create=True, src_url=str(repo_path)) | |
330 |
|
373 | |||
|
374 | ||||
331 | def __get_codes_stats(repo_name): |
|
375 | def __get_codes_stats(repo_name): | |
332 | LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout': |
|
|||
333 | 'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp', |
|
|||
334 | 'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads': |
|
|||
335 | 'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada', |
|
|||
336 | 'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx': |
|
|||
337 | 'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader', |
|
|||
338 | 'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake': |
|
|||
339 | 'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader', |
|
|||
340 | 'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp': |
|
|||
341 | 'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md': |
|
|||
342 | 'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp', |
|
|||
343 | 'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm': |
|
|||
344 | 'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html', |
|
|||
345 | 'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs': |
|
|||
346 | 'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf', |
|
|||
347 | 'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile', |
|
|||
348 | 'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb': |
|
|||
349 | 'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl': |
|
|||
350 | 'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt': |
|
|||
351 | 'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S', |
|
|||
352 | 'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties': |
|
|||
353 | 'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's': |
|
|||
354 | 'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog', |
|
|||
355 | 'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot': |
|
|||
356 | 'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct': |
|
|||
357 | 'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython', |
|
|||
358 | 'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python', |
|
|||
359 | 'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc': |
|
|||
360 | 'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog', |
|
|||
361 | 'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy': |
|
|||
362 | 'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb': |
|
|||
363 | 'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex', |
|
|||
364 | 'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml', |
|
|||
365 | 'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby', |
|
|||
366 | '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ', |
|
|||
367 | 'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm', |
|
|||
368 | 'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini': |
|
|||
369 | 'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro': |
|
|||
370 | 'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd': |
|
|||
371 | 'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt': |
|
|||
372 | 'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake': |
|
|||
373 | 'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html', |
|
|||
374 | 'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html', |
|
|||
375 | 'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css', |
|
|||
376 | 'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs', |
|
|||
377 | 'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp', |
|
|||
378 | 'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile', |
|
|||
379 | 'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass': |
|
|||
380 | 'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl': |
|
|||
381 | 'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh': |
|
|||
382 | 'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd': |
|
|||
383 | 'VbNetAspx', 'sc': 'Python'} |
|
|||
384 |
|
||||
385 | repos_path = get_repos_path() |
|
376 | repos_path = get_repos_path() | |
386 | p = os.path.join(repos_path, repo_name) |
|
377 | p = os.path.join(repos_path, repo_name) | |
387 | repo = get_repo(p) |
|
378 | repo = get_repo(p) | |
@@ -390,14 +381,12 b' def __get_codes_stats(repo_name):' | |||||
390 |
|
381 | |||
391 | def aggregate(cs): |
|
382 | def aggregate(cs): | |
392 | for f in cs[2]: |
|
383 | for f in cs[2]: | |
393 | ext = f.extension |
|
384 | ext = lower(f.extension) | |
394 | key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext) |
|
|||
395 | key = key or ext |
|
|||
396 | if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary: |
|
385 | if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary: | |
397 |
if code_stats |
|
386 | if ext in code_stats: | |
398 |
code_stats[ |
|
387 | code_stats[ext] += 1 | |
399 | else: |
|
388 | else: | |
400 |
code_stats[ |
|
389 | code_stats[ext] = 1 | |
401 |
|
390 | |||
402 | map(aggregate, tip.walk('/')) |
|
391 | map(aggregate, tip.walk('/')) | |
403 |
|
392 |
@@ -197,26 +197,26 b'' | |||||
197 | var total = 0; |
|
197 | var total = 0; | |
198 | var no_data = true; |
|
198 | var no_data = true; | |
199 | for (k in data){ |
|
199 | for (k in data){ | |
200 | total += data[k]; |
|
200 | total += data[k].count; | |
201 | no_data = false; |
|
201 | no_data = false; | |
202 | } |
|
202 | } | |
203 | var tbl = document.createElement('table'); |
|
203 | var tbl = document.createElement('table'); | |
204 | tbl.setAttribute('class','trending_language_tbl'); |
|
204 | tbl.setAttribute('class','trending_language_tbl'); | |
205 | var cnt =0; |
|
205 | var cnt = 0; | |
206 | for (k in data){ |
|
206 | for (k in data){ | |
207 | cnt+=1; |
|
207 | cnt += 1; | |
208 | var hide = cnt>2; |
|
208 | var hide = cnt>2; | |
209 | var tr = document.createElement('tr'); |
|
209 | var tr = document.createElement('tr'); | |
210 | if (hide){ |
|
210 | if (hide){ | |
211 | tr.setAttribute('style','display:none'); |
|
211 | tr.setAttribute('style','display:none'); | |
212 | tr.setAttribute('class','stats_hidden'); |
|
212 | tr.setAttribute('class','stats_hidden'); | |
213 | } |
|
213 | } | |
214 | var percentage = Math.round((data[k]/total*100),2); |
|
214 | var percentage = Math.round((data[k].count/total*100),2); | |
215 | var value = data[k]; |
|
215 | var value = data[k].count; | |
216 | var td1 = document.createElement('td'); |
|
216 | var td1 = document.createElement('td'); | |
217 | td1.width=150; |
|
217 | td1.width = 150; | |
218 | var trending_language_label = document.createElement('div'); |
|
218 | var trending_language_label = document.createElement('div'); | |
219 | trending_language_label.innerHTML = k; |
|
219 | trending_language_label.innerHTML = data[k].desc+" ("+k+")"; | |
220 | td1.appendChild(trending_language_label); |
|
220 | td1.appendChild(trending_language_label); | |
221 |
|
221 | |||
222 | var td2 = document.createElement('td'); |
|
222 | var td2 = document.createElement('td'); | |
@@ -226,7 +226,7 b'' | |||||
226 |
|
226 | |||
227 | trending_language.title = k+" "+nr_files; |
|
227 | trending_language.title = k+" "+nr_files; | |
228 |
|
228 | |||
229 |
if (percentage>2 |
|
229 | if (percentage>22){ | |
230 | trending_language.innerHTML = "<b style='font-size:0.8em'>"+percentage+"% "+nr_files+ "</b>"; |
|
230 | trending_language.innerHTML = "<b style='font-size:0.8em'>"+percentage+"% "+nr_files+ "</b>"; | |
231 | } |
|
231 | } | |
232 | else{ |
|
232 | else{ | |
@@ -245,7 +245,7 b'' | |||||
245 | var td=document.createElement('td'); |
|
245 | var td=document.createElement('td'); | |
246 | lnk = document.createElement('a'); |
|
246 | lnk = document.createElement('a'); | |
247 | lnk.href='#'; |
|
247 | lnk.href='#'; | |
248 |
lnk.innerHTML = "${_( |
|
248 | lnk.innerHTML = "${_('show more')}"; | |
249 | lnk.id='code_stats_show_more'; |
|
249 | lnk.id='code_stats_show_more'; | |
250 | td.appendChild(lnk); |
|
250 | td.appendChild(lnk); | |
251 | show_more.appendChild(td); |
|
251 | show_more.appendChild(td); |
General Comments 0
You need to be logged in to leave comments.
Login now