|
@@
-1,1114
+1,1114
b''
|
|
1
|
# -*- coding: utf-8 -*-
|
|
1
|
# -*- coding: utf-8 -*-
|
|
2
|
|
|
2
|
|
|
3
|
# Copyright (C) 2010-2016 RhodeCode GmbH
|
|
3
|
# Copyright (C) 2010-2016 RhodeCode GmbH
|
|
4
|
#
|
|
4
|
#
|
|
5
|
# This program is free software: you can redistribute it and/or modify
|
|
5
|
# This program is free software: you can redistribute it and/or modify
|
|
6
|
# it under the terms of the GNU Affero General Public License, version 3
|
|
6
|
# it under the terms of the GNU Affero General Public License, version 3
|
|
7
|
# (only), as published by the Free Software Foundation.
|
|
7
|
# (only), as published by the Free Software Foundation.
|
|
8
|
#
|
|
8
|
#
|
|
9
|
# This program is distributed in the hope that it will be useful,
|
|
9
|
# This program is distributed in the hope that it will be useful,
|
|
10
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
12
|
# GNU General Public License for more details.
|
|
12
|
# GNU General Public License for more details.
|
|
13
|
#
|
|
13
|
#
|
|
14
|
# You should have received a copy of the GNU Affero General Public License
|
|
14
|
# You should have received a copy of the GNU Affero General Public License
|
|
15
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
15
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
16
|
#
|
|
16
|
#
|
|
17
|
# This program is dual-licensed. If you wish to learn more about the
|
|
17
|
# This program is dual-licensed. If you wish to learn more about the
|
|
18
|
# RhodeCode Enterprise Edition, including its added features, Support services,
|
|
18
|
# RhodeCode Enterprise Edition, including its added features, Support services,
|
|
19
|
# and proprietary license terms, please see https://rhodecode.com/licenses/
|
|
19
|
# and proprietary license terms, please see https://rhodecode.com/licenses/
|
|
20
|
|
|
20
|
|
|
21
|
"""
|
|
21
|
"""
|
|
22
|
Files controller for RhodeCode Enterprise
|
|
22
|
Files controller for RhodeCode Enterprise
|
|
23
|
"""
|
|
23
|
"""
|
|
24
|
|
|
24
|
|
|
25
|
import itertools
|
|
25
|
import itertools
|
|
26
|
import logging
|
|
26
|
import logging
|
|
27
|
import os
|
|
27
|
import os
|
|
28
|
import shutil
|
|
28
|
import shutil
|
|
29
|
import tempfile
|
|
29
|
import tempfile
|
|
30
|
|
|
30
|
|
|
31
|
from pylons import request, response, tmpl_context as c, url
|
|
31
|
from pylons import request, response, tmpl_context as c, url
|
|
32
|
from pylons.i18n.translation import _
|
|
32
|
from pylons.i18n.translation import _
|
|
33
|
from pylons.controllers.util import redirect
|
|
33
|
from pylons.controllers.util import redirect
|
|
34
|
from webob.exc import HTTPNotFound, HTTPBadRequest
|
|
34
|
from webob.exc import HTTPNotFound, HTTPBadRequest
|
|
35
|
|
|
35
|
|
|
36
|
from rhodecode.controllers.utils import parse_path_ref
|
|
36
|
from rhodecode.controllers.utils import parse_path_ref
|
|
37
|
from rhodecode.lib import diffs, helpers as h, caches
|
|
37
|
from rhodecode.lib import diffs, helpers as h, caches
|
|
38
|
from rhodecode.lib.compat import OrderedDict
|
|
38
|
from rhodecode.lib.compat import OrderedDict
|
|
39
|
from rhodecode.lib.utils import jsonify, action_logger
|
|
39
|
from rhodecode.lib.utils import jsonify, action_logger
|
|
40
|
from rhodecode.lib.utils2 import (
|
|
40
|
from rhodecode.lib.utils2 import (
|
|
41
|
convert_line_endings, detect_mode, safe_str, str2bool)
|
|
41
|
convert_line_endings, detect_mode, safe_str, str2bool)
|
|
42
|
from rhodecode.lib.auth import (
|
|
42
|
from rhodecode.lib.auth import (
|
|
43
|
LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
|
|
43
|
LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
|
|
44
|
from rhodecode.lib.base import BaseRepoController, render
|
|
44
|
from rhodecode.lib.base import BaseRepoController, render
|
|
45
|
from rhodecode.lib.vcs import path as vcspath
|
|
45
|
from rhodecode.lib.vcs import path as vcspath
|
|
46
|
from rhodecode.lib.vcs.backends.base import EmptyCommit
|
|
46
|
from rhodecode.lib.vcs.backends.base import EmptyCommit
|
|
47
|
from rhodecode.lib.vcs.conf import settings
|
|
47
|
from rhodecode.lib.vcs.conf import settings
|
|
48
|
from rhodecode.lib.vcs.exceptions import (
|
|
48
|
from rhodecode.lib.vcs.exceptions import (
|
|
49
|
RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
|
|
49
|
RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
|
|
50
|
ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
|
|
50
|
ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
|
|
51
|
NodeDoesNotExistError, CommitError, NodeError)
|
|
51
|
NodeDoesNotExistError, CommitError, NodeError)
|
|
52
|
from rhodecode.lib.vcs.nodes import FileNode
|
|
52
|
from rhodecode.lib.vcs.nodes import FileNode
|
|
53
|
|
|
53
|
|
|
54
|
from rhodecode.model.repo import RepoModel
|
|
54
|
from rhodecode.model.repo import RepoModel
|
|
55
|
from rhodecode.model.scm import ScmModel
|
|
55
|
from rhodecode.model.scm import ScmModel
|
|
56
|
from rhodecode.model.db import Repository
|
|
56
|
from rhodecode.model.db import Repository
|
|
57
|
|
|
57
|
|
|
58
|
from rhodecode.controllers.changeset import (
|
|
58
|
from rhodecode.controllers.changeset import (
|
|
59
|
_ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
|
|
59
|
_ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
|
|
60
|
from rhodecode.lib.exceptions import NonRelativePathError
|
|
60
|
from rhodecode.lib.exceptions import NonRelativePathError
|
|
61
|
|
|
61
|
|
|
62
|
log = logging.getLogger(__name__)
|
|
62
|
log = logging.getLogger(__name__)
|
|
63
|
|
|
63
|
|
|
64
|
|
|
64
|
|
|
65
|
class FilesController(BaseRepoController):
|
|
65
|
class FilesController(BaseRepoController):
|
|
66
|
|
|
66
|
|
|
67
|
def __before__(self):
|
|
67
|
def __before__(self):
|
|
68
|
super(FilesController, self).__before__()
|
|
68
|
super(FilesController, self).__before__()
|
|
69
|
c.cut_off_limit = self.cut_off_limit_file
|
|
69
|
c.cut_off_limit = self.cut_off_limit_file
|
|
70
|
|
|
70
|
|
|
71
|
def _get_default_encoding(self):
|
|
71
|
def _get_default_encoding(self):
|
|
72
|
enc_list = getattr(c, 'default_encodings', [])
|
|
72
|
enc_list = getattr(c, 'default_encodings', [])
|
|
73
|
return enc_list[0] if enc_list else 'UTF-8'
|
|
73
|
return enc_list[0] if enc_list else 'UTF-8'
|
|
74
|
|
|
74
|
|
|
75
|
def __get_commit_or_redirect(self, commit_id, repo_name,
|
|
75
|
def __get_commit_or_redirect(self, commit_id, repo_name,
|
|
76
|
redirect_after=True):
|
|
76
|
redirect_after=True):
|
|
77
|
"""
|
|
77
|
"""
|
|
78
|
This is a safe way to get commit. If an error occurs it redirects to
|
|
78
|
This is a safe way to get commit. If an error occurs it redirects to
|
|
79
|
tip with proper message
|
|
79
|
tip with proper message
|
|
80
|
|
|
80
|
|
|
81
|
:param commit_id: id of commit to fetch
|
|
81
|
:param commit_id: id of commit to fetch
|
|
82
|
:param repo_name: repo name to redirect after
|
|
82
|
:param repo_name: repo name to redirect after
|
|
83
|
:param redirect_after: toggle redirection
|
|
83
|
:param redirect_after: toggle redirection
|
|
84
|
"""
|
|
84
|
"""
|
|
85
|
try:
|
|
85
|
try:
|
|
86
|
return c.rhodecode_repo.get_commit(commit_id)
|
|
86
|
return c.rhodecode_repo.get_commit(commit_id)
|
|
87
|
except EmptyRepositoryError:
|
|
87
|
except EmptyRepositoryError:
|
|
88
|
if not redirect_after:
|
|
88
|
if not redirect_after:
|
|
89
|
return None
|
|
89
|
return None
|
|
90
|
url_ = url('files_add_home',
|
|
90
|
url_ = url('files_add_home',
|
|
91
|
repo_name=c.repo_name,
|
|
91
|
repo_name=c.repo_name,
|
|
92
|
revision=0, f_path='', anchor='edit')
|
|
92
|
revision=0, f_path='', anchor='edit')
|
|
93
|
if h.HasRepoPermissionAny(
|
|
93
|
if h.HasRepoPermissionAny(
|
|
94
|
'repository.write', 'repository.admin')(c.repo_name):
|
|
94
|
'repository.write', 'repository.admin')(c.repo_name):
|
|
95
|
add_new = h.link_to(
|
|
95
|
add_new = h.link_to(
|
|
96
|
_('Click here to add a new file.'),
|
|
96
|
_('Click here to add a new file.'),
|
|
97
|
url_, class_="alert-link")
|
|
97
|
url_, class_="alert-link")
|
|
98
|
else:
|
|
98
|
else:
|
|
99
|
add_new = ""
|
|
99
|
add_new = ""
|
|
100
|
h.flash(h.literal(
|
|
100
|
h.flash(h.literal(
|
|
101
|
_('There are no files yet. %s') % add_new), category='warning')
|
|
101
|
_('There are no files yet. %s') % add_new), category='warning')
|
|
102
|
redirect(h.url('summary_home', repo_name=repo_name))
|
|
102
|
redirect(h.url('summary_home', repo_name=repo_name))
|
|
103
|
except (CommitDoesNotExistError, LookupError):
|
|
103
|
except (CommitDoesNotExistError, LookupError):
|
|
104
|
msg = _('No such commit exists for this repository')
|
|
104
|
msg = _('No such commit exists for this repository')
|
|
105
|
h.flash(msg, category='error')
|
|
105
|
h.flash(msg, category='error')
|
|
106
|
raise HTTPNotFound()
|
|
106
|
raise HTTPNotFound()
|
|
107
|
except RepositoryError as e:
|
|
107
|
except RepositoryError as e:
|
|
108
|
h.flash(safe_str(e), category='error')
|
|
108
|
h.flash(safe_str(e), category='error')
|
|
109
|
raise HTTPNotFound()
|
|
109
|
raise HTTPNotFound()
|
|
110
|
|
|
110
|
|
|
111
|
def __get_filenode_or_redirect(self, repo_name, commit, path):
|
|
111
|
def __get_filenode_or_redirect(self, repo_name, commit, path):
|
|
112
|
"""
|
|
112
|
"""
|
|
113
|
Returns file_node, if error occurs or given path is directory,
|
|
113
|
Returns file_node, if error occurs or given path is directory,
|
|
114
|
it'll redirect to top level path
|
|
114
|
it'll redirect to top level path
|
|
115
|
|
|
115
|
|
|
116
|
:param repo_name: repo_name
|
|
116
|
:param repo_name: repo_name
|
|
117
|
:param commit: given commit
|
|
117
|
:param commit: given commit
|
|
118
|
:param path: path to lookup
|
|
118
|
:param path: path to lookup
|
|
119
|
"""
|
|
119
|
"""
|
|
120
|
try:
|
|
120
|
try:
|
|
121
|
file_node = commit.get_node(path)
|
|
121
|
file_node = commit.get_node(path)
|
|
122
|
if file_node.is_dir():
|
|
122
|
if file_node.is_dir():
|
|
123
|
raise RepositoryError('The given path is a directory')
|
|
123
|
raise RepositoryError('The given path is a directory')
|
|
124
|
except CommitDoesNotExistError:
|
|
124
|
except CommitDoesNotExistError:
|
|
125
|
msg = _('No such commit exists for this repository')
|
|
125
|
msg = _('No such commit exists for this repository')
|
|
126
|
log.exception(msg)
|
|
126
|
log.exception(msg)
|
|
127
|
h.flash(msg, category='error')
|
|
127
|
h.flash(msg, category='error')
|
|
128
|
raise HTTPNotFound()
|
|
128
|
raise HTTPNotFound()
|
|
129
|
except RepositoryError as e:
|
|
129
|
except RepositoryError as e:
|
|
130
|
h.flash(safe_str(e), category='error')
|
|
130
|
h.flash(safe_str(e), category='error')
|
|
131
|
raise HTTPNotFound()
|
|
131
|
raise HTTPNotFound()
|
|
132
|
|
|
132
|
|
|
133
|
return file_node
|
|
133
|
return file_node
|
|
134
|
|
|
134
|
|
|
135
|
def __get_tree_cache_manager(self, repo_name, namespace_type):
|
|
135
|
def __get_tree_cache_manager(self, repo_name, namespace_type):
|
|
136
|
_namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
|
|
136
|
_namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
|
|
137
|
return caches.get_cache_manager('repo_cache_long', _namespace)
|
|
137
|
return caches.get_cache_manager('repo_cache_long', _namespace)
|
|
138
|
|
|
138
|
|
|
139
|
def _get_tree_at_commit(self, repo_name, commit_id, f_path):
|
|
139
|
def _get_tree_at_commit(self, repo_name, commit_id, f_path):
|
|
140
|
def _cached_tree():
|
|
140
|
def _cached_tree():
|
|
141
|
log.debug('Generating cached file tree for %s, %s, %s',
|
|
141
|
log.debug('Generating cached file tree for %s, %s, %s',
|
|
142
|
repo_name, commit_id, f_path)
|
|
142
|
repo_name, commit_id, f_path)
|
|
143
|
return render('files/files_browser.html')
|
|
143
|
return render('files/files_browser_tree.html')
|
|
144
|
|
|
144
|
|
|
145
|
cache_manager = self.__get_tree_cache_manager(
|
|
145
|
cache_manager = self.__get_tree_cache_manager(
|
|
146
|
repo_name, caches.FILE_TREE)
|
|
146
|
repo_name, caches.FILE_TREE)
|
|
147
|
|
|
147
|
|
|
148
|
cache_key = caches.compute_key_from_params(
|
|
148
|
cache_key = caches.compute_key_from_params(
|
|
149
|
repo_name, commit_id, f_path)
|
|
149
|
repo_name, commit_id, f_path)
|
|
150
|
|
|
150
|
|
|
151
|
return cache_manager.get(cache_key, createfunc=_cached_tree)
|
|
151
|
return cache_manager.get(cache_key, createfunc=_cached_tree)
|
|
152
|
|
|
152
|
|
|
153
|
def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
|
|
153
|
def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
|
|
154
|
def _cached_nodes():
|
|
154
|
def _cached_nodes():
|
|
155
|
log.debug('Generating cached nodelist for %s, %s, %s',
|
|
155
|
log.debug('Generating cached nodelist for %s, %s, %s',
|
|
156
|
repo_name, commit_id, f_path)
|
|
156
|
repo_name, commit_id, f_path)
|
|
157
|
_d, _f = ScmModel().get_nodes(
|
|
157
|
_d, _f = ScmModel().get_nodes(
|
|
158
|
repo_name, commit_id, f_path, flat=False)
|
|
158
|
repo_name, commit_id, f_path, flat=False)
|
|
159
|
return _d + _f
|
|
159
|
return _d + _f
|
|
160
|
|
|
160
|
|
|
161
|
cache_manager = self.__get_tree_cache_manager(
|
|
161
|
cache_manager = self.__get_tree_cache_manager(
|
|
162
|
repo_name, caches.FILE_SEARCH_TREE_META)
|
|
162
|
repo_name, caches.FILE_SEARCH_TREE_META)
|
|
163
|
|
|
163
|
|
|
164
|
cache_key = caches.compute_key_from_params(
|
|
164
|
cache_key = caches.compute_key_from_params(
|
|
165
|
repo_name, commit_id, f_path)
|
|
165
|
repo_name, commit_id, f_path)
|
|
166
|
return cache_manager.get(cache_key, createfunc=_cached_nodes)
|
|
166
|
return cache_manager.get(cache_key, createfunc=_cached_nodes)
|
|
167
|
|
|
167
|
|
|
168
|
def _get_metadata_at_commit(self, repo_name, commit, dir_node):
|
|
168
|
def _get_metadata_at_commit(self, repo_name, commit, dir_node):
|
|
169
|
def _cached_metadata():
|
|
169
|
def _cached_metadata():
|
|
170
|
log.debug('Generating cached metadata for %s, %s, %s',
|
|
170
|
log.debug('Generating cached metadata for %s, %s, %s',
|
|
171
|
repo_name, commit.raw_id, safe_str(dir_node.path))
|
|
171
|
repo_name, commit.raw_id, safe_str(dir_node.path))
|
|
172
|
|
|
172
|
|
|
173
|
data = ScmModel().get_dirnode_metadata(commit, dir_node)
|
|
173
|
data = ScmModel().get_dirnode_metadata(commit, dir_node)
|
|
174
|
return data
|
|
174
|
return data
|
|
175
|
|
|
175
|
|
|
176
|
cache_manager = self.__get_tree_cache_manager(
|
|
176
|
cache_manager = self.__get_tree_cache_manager(
|
|
177
|
repo_name, caches.FILE_TREE_META)
|
|
177
|
repo_name, caches.FILE_TREE_META)
|
|
178
|
|
|
178
|
|
|
179
|
cache_key = caches.compute_key_from_params(
|
|
179
|
cache_key = caches.compute_key_from_params(
|
|
180
|
repo_name, commit.raw_id, safe_str(dir_node.path))
|
|
180
|
repo_name, commit.raw_id, safe_str(dir_node.path))
|
|
181
|
|
|
181
|
|
|
182
|
return cache_manager.get(cache_key, createfunc=_cached_metadata)
|
|
182
|
return cache_manager.get(cache_key, createfunc=_cached_metadata)
|
|
183
|
|
|
183
|
|
|
184
|
@LoginRequired()
|
|
184
|
@LoginRequired()
|
|
185
|
@HasRepoPermissionAnyDecorator(
|
|
185
|
@HasRepoPermissionAnyDecorator(
|
|
186
|
'repository.read', 'repository.write', 'repository.admin')
|
|
186
|
'repository.read', 'repository.write', 'repository.admin')
|
|
187
|
def index(
|
|
187
|
def index(
|
|
188
|
self, repo_name, revision, f_path, annotate=False, rendered=False):
|
|
188
|
self, repo_name, revision, f_path, annotate=False, rendered=False):
|
|
189
|
commit_id = revision
|
|
189
|
commit_id = revision
|
|
190
|
|
|
190
|
|
|
191
|
# redirect to given commit_id from form if given
|
|
191
|
# redirect to given commit_id from form if given
|
|
192
|
get_commit_id = request.GET.get('at_rev', None)
|
|
192
|
get_commit_id = request.GET.get('at_rev', None)
|
|
193
|
if get_commit_id:
|
|
193
|
if get_commit_id:
|
|
194
|
self.__get_commit_or_redirect(get_commit_id, repo_name)
|
|
194
|
self.__get_commit_or_redirect(get_commit_id, repo_name)
|
|
195
|
|
|
195
|
|
|
196
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
196
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
197
|
c.branch = request.GET.get('branch', None)
|
|
197
|
c.branch = request.GET.get('branch', None)
|
|
198
|
c.f_path = f_path
|
|
198
|
c.f_path = f_path
|
|
199
|
c.annotate = annotate
|
|
199
|
c.annotate = annotate
|
|
200
|
# default is false, but .rst/.md files later are autorendered, we can
|
|
200
|
# default is false, but .rst/.md files later are autorendered, we can
|
|
201
|
# overwrite autorendering by setting this GET flag
|
|
201
|
# overwrite autorendering by setting this GET flag
|
|
202
|
c.renderer = rendered or not request.GET.get('no-render', False)
|
|
202
|
c.renderer = rendered or not request.GET.get('no-render', False)
|
|
203
|
|
|
203
|
|
|
204
|
# prev link
|
|
204
|
# prev link
|
|
205
|
try:
|
|
205
|
try:
|
|
206
|
prev_commit = c.commit.prev(c.branch)
|
|
206
|
prev_commit = c.commit.prev(c.branch)
|
|
207
|
c.prev_commit = prev_commit
|
|
207
|
c.prev_commit = prev_commit
|
|
208
|
c.url_prev = url('files_home', repo_name=c.repo_name,
|
|
208
|
c.url_prev = url('files_home', repo_name=c.repo_name,
|
|
209
|
revision=prev_commit.raw_id, f_path=f_path)
|
|
209
|
revision=prev_commit.raw_id, f_path=f_path)
|
|
210
|
if c.branch:
|
|
210
|
if c.branch:
|
|
211
|
c.url_prev += '?branch=%s' % c.branch
|
|
211
|
c.url_prev += '?branch=%s' % c.branch
|
|
212
|
except (CommitDoesNotExistError, VCSError):
|
|
212
|
except (CommitDoesNotExistError, VCSError):
|
|
213
|
c.url_prev = '#'
|
|
213
|
c.url_prev = '#'
|
|
214
|
c.prev_commit = EmptyCommit()
|
|
214
|
c.prev_commit = EmptyCommit()
|
|
215
|
|
|
215
|
|
|
216
|
# next link
|
|
216
|
# next link
|
|
217
|
try:
|
|
217
|
try:
|
|
218
|
next_commit = c.commit.next(c.branch)
|
|
218
|
next_commit = c.commit.next(c.branch)
|
|
219
|
c.next_commit = next_commit
|
|
219
|
c.next_commit = next_commit
|
|
220
|
c.url_next = url('files_home', repo_name=c.repo_name,
|
|
220
|
c.url_next = url('files_home', repo_name=c.repo_name,
|
|
221
|
revision=next_commit.raw_id, f_path=f_path)
|
|
221
|
revision=next_commit.raw_id, f_path=f_path)
|
|
222
|
if c.branch:
|
|
222
|
if c.branch:
|
|
223
|
c.url_next += '?branch=%s' % c.branch
|
|
223
|
c.url_next += '?branch=%s' % c.branch
|
|
224
|
except (CommitDoesNotExistError, VCSError):
|
|
224
|
except (CommitDoesNotExistError, VCSError):
|
|
225
|
c.url_next = '#'
|
|
225
|
c.url_next = '#'
|
|
226
|
c.next_commit = EmptyCommit()
|
|
226
|
c.next_commit = EmptyCommit()
|
|
227
|
|
|
227
|
|
|
228
|
# files or dirs
|
|
228
|
# files or dirs
|
|
229
|
try:
|
|
229
|
try:
|
|
230
|
c.file = c.commit.get_node(f_path)
|
|
230
|
c.file = c.commit.get_node(f_path)
|
|
231
|
c.file_author = True
|
|
231
|
c.file_author = True
|
|
232
|
c.file_tree = ''
|
|
232
|
c.file_tree = ''
|
|
233
|
if c.file.is_file():
|
|
233
|
if c.file.is_file():
|
|
234
|
c.renderer = (
|
|
234
|
c.renderer = (
|
|
235
|
c.renderer and h.renderer_from_filename(c.file.path))
|
|
235
|
c.renderer and h.renderer_from_filename(c.file.path))
|
|
236
|
c.file_last_commit = c.file.last_commit
|
|
236
|
c.file_last_commit = c.file.last_commit
|
|
237
|
|
|
237
|
|
|
238
|
c.on_branch_head = self._is_valid_head(
|
|
238
|
c.on_branch_head = self._is_valid_head(
|
|
239
|
commit_id, c.rhodecode_repo)
|
|
239
|
commit_id, c.rhodecode_repo)
|
|
240
|
c.branch_or_raw_id = c.commit.branch or c.commit.raw_id
|
|
240
|
c.branch_or_raw_id = c.commit.branch or c.commit.raw_id
|
|
241
|
|
|
241
|
|
|
242
|
author = c.file_last_commit.author
|
|
242
|
author = c.file_last_commit.author
|
|
243
|
c.authors = [(h.email(author),
|
|
243
|
c.authors = [(h.email(author),
|
|
244
|
h.person(author, 'username_or_name_or_email'))]
|
|
244
|
h.person(author, 'username_or_name_or_email'))]
|
|
245
|
else:
|
|
245
|
else:
|
|
246
|
c.authors = []
|
|
246
|
c.authors = []
|
|
247
|
c.file_tree = self._get_tree_at_commit(
|
|
247
|
c.file_tree = self._get_tree_at_commit(
|
|
248
|
repo_name, c.commit.raw_id, f_path)
|
|
248
|
repo_name, c.commit.raw_id, f_path)
|
|
249
|
except RepositoryError as e:
|
|
249
|
except RepositoryError as e:
|
|
250
|
h.flash(safe_str(e), category='error')
|
|
250
|
h.flash(safe_str(e), category='error')
|
|
251
|
raise HTTPNotFound()
|
|
251
|
raise HTTPNotFound()
|
|
252
|
|
|
252
|
|
|
253
|
if request.environ.get('HTTP_X_PJAX'):
|
|
253
|
if request.environ.get('HTTP_X_PJAX'):
|
|
254
|
return render('files/files_pjax.html')
|
|
254
|
return render('files/files_pjax.html')
|
|
255
|
|
|
255
|
|
|
256
|
return render('files/files.html')
|
|
256
|
return render('files/files.html')
|
|
257
|
|
|
257
|
|
|
258
|
@LoginRequired()
|
|
258
|
@LoginRequired()
|
|
259
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
259
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
260
|
'repository.admin')
|
|
260
|
'repository.admin')
|
|
261
|
@jsonify
|
|
261
|
@jsonify
|
|
262
|
def history(self, repo_name, revision, f_path):
|
|
262
|
def history(self, repo_name, revision, f_path):
|
|
263
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
263
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
264
|
f_path = f_path
|
|
264
|
f_path = f_path
|
|
265
|
_file = commit.get_node(f_path)
|
|
265
|
_file = commit.get_node(f_path)
|
|
266
|
if _file.is_file():
|
|
266
|
if _file.is_file():
|
|
267
|
file_history, _hist = self._get_node_history(commit, f_path)
|
|
267
|
file_history, _hist = self._get_node_history(commit, f_path)
|
|
268
|
|
|
268
|
|
|
269
|
res = []
|
|
269
|
res = []
|
|
270
|
for obj in file_history:
|
|
270
|
for obj in file_history:
|
|
271
|
res.append({
|
|
271
|
res.append({
|
|
272
|
'text': obj[1],
|
|
272
|
'text': obj[1],
|
|
273
|
'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
|
|
273
|
'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
|
|
274
|
})
|
|
274
|
})
|
|
275
|
|
|
275
|
|
|
276
|
data = {
|
|
276
|
data = {
|
|
277
|
'more': False,
|
|
277
|
'more': False,
|
|
278
|
'results': res
|
|
278
|
'results': res
|
|
279
|
}
|
|
279
|
}
|
|
280
|
return data
|
|
280
|
return data
|
|
281
|
|
|
281
|
|
|
282
|
@LoginRequired()
|
|
282
|
@LoginRequired()
|
|
283
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
283
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
284
|
'repository.admin')
|
|
284
|
'repository.admin')
|
|
285
|
def authors(self, repo_name, revision, f_path):
|
|
285
|
def authors(self, repo_name, revision, f_path):
|
|
286
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
286
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
287
|
file_node = commit.get_node(f_path)
|
|
287
|
file_node = commit.get_node(f_path)
|
|
288
|
if file_node.is_file():
|
|
288
|
if file_node.is_file():
|
|
289
|
c.file_last_commit = file_node.last_commit
|
|
289
|
c.file_last_commit = file_node.last_commit
|
|
290
|
if request.GET.get('annotate') == '1':
|
|
290
|
if request.GET.get('annotate') == '1':
|
|
291
|
# use _hist from annotation if annotation mode is on
|
|
291
|
# use _hist from annotation if annotation mode is on
|
|
292
|
commit_ids = set(x[1] for x in file_node.annotate)
|
|
292
|
commit_ids = set(x[1] for x in file_node.annotate)
|
|
293
|
_hist = (
|
|
293
|
_hist = (
|
|
294
|
c.rhodecode_repo.get_commit(commit_id)
|
|
294
|
c.rhodecode_repo.get_commit(commit_id)
|
|
295
|
for commit_id in commit_ids)
|
|
295
|
for commit_id in commit_ids)
|
|
296
|
else:
|
|
296
|
else:
|
|
297
|
_f_history, _hist = self._get_node_history(commit, f_path)
|
|
297
|
_f_history, _hist = self._get_node_history(commit, f_path)
|
|
298
|
c.file_author = False
|
|
298
|
c.file_author = False
|
|
299
|
c.authors = []
|
|
299
|
c.authors = []
|
|
300
|
for author in set(commit.author for commit in _hist):
|
|
300
|
for author in set(commit.author for commit in _hist):
|
|
301
|
c.authors.append((
|
|
301
|
c.authors.append((
|
|
302
|
h.email(author),
|
|
302
|
h.email(author),
|
|
303
|
h.person(author, 'username_or_name_or_email')))
|
|
303
|
h.person(author, 'username_or_name_or_email')))
|
|
304
|
return render('files/file_authors_box.html')
|
|
304
|
return render('files/file_authors_box.html')
|
|
305
|
|
|
305
|
|
|
306
|
@LoginRequired()
|
|
306
|
@LoginRequired()
|
|
307
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
307
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
308
|
'repository.admin')
|
|
308
|
'repository.admin')
|
|
309
|
def rawfile(self, repo_name, revision, f_path):
|
|
309
|
def rawfile(self, repo_name, revision, f_path):
|
|
310
|
"""
|
|
310
|
"""
|
|
311
|
Action for download as raw
|
|
311
|
Action for download as raw
|
|
312
|
"""
|
|
312
|
"""
|
|
313
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
313
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
314
|
file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
|
|
314
|
file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
|
|
315
|
|
|
315
|
|
|
316
|
response.content_disposition = 'attachment; filename=%s' % \
|
|
316
|
response.content_disposition = 'attachment; filename=%s' % \
|
|
317
|
safe_str(f_path.split(Repository.NAME_SEP)[-1])
|
|
317
|
safe_str(f_path.split(Repository.NAME_SEP)[-1])
|
|
318
|
|
|
318
|
|
|
319
|
response.content_type = file_node.mimetype
|
|
319
|
response.content_type = file_node.mimetype
|
|
320
|
charset = self._get_default_encoding()
|
|
320
|
charset = self._get_default_encoding()
|
|
321
|
if charset:
|
|
321
|
if charset:
|
|
322
|
response.charset = charset
|
|
322
|
response.charset = charset
|
|
323
|
|
|
323
|
|
|
324
|
return file_node.content
|
|
324
|
return file_node.content
|
|
325
|
|
|
325
|
|
|
326
|
@LoginRequired()
|
|
326
|
@LoginRequired()
|
|
327
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
327
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
328
|
'repository.admin')
|
|
328
|
'repository.admin')
|
|
329
|
def raw(self, repo_name, revision, f_path):
|
|
329
|
def raw(self, repo_name, revision, f_path):
|
|
330
|
"""
|
|
330
|
"""
|
|
331
|
Action for show as raw, some mimetypes are "rendered",
|
|
331
|
Action for show as raw, some mimetypes are "rendered",
|
|
332
|
those include images, icons.
|
|
332
|
those include images, icons.
|
|
333
|
"""
|
|
333
|
"""
|
|
334
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
334
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
335
|
file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
|
|
335
|
file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
|
|
336
|
|
|
336
|
|
|
337
|
raw_mimetype_mapping = {
|
|
337
|
raw_mimetype_mapping = {
|
|
338
|
# map original mimetype to a mimetype used for "show as raw"
|
|
338
|
# map original mimetype to a mimetype used for "show as raw"
|
|
339
|
# you can also provide a content-disposition to override the
|
|
339
|
# you can also provide a content-disposition to override the
|
|
340
|
# default "attachment" disposition.
|
|
340
|
# default "attachment" disposition.
|
|
341
|
# orig_type: (new_type, new_dispo)
|
|
341
|
# orig_type: (new_type, new_dispo)
|
|
342
|
|
|
342
|
|
|
343
|
# show images inline:
|
|
343
|
# show images inline:
|
|
344
|
# Do not re-add SVG: it is unsafe and permits XSS attacks. One can
|
|
344
|
# Do not re-add SVG: it is unsafe and permits XSS attacks. One can
|
|
345
|
# for example render an SVG with javascript inside or even render
|
|
345
|
# for example render an SVG with javascript inside or even render
|
|
346
|
# HTML.
|
|
346
|
# HTML.
|
|
347
|
'image/x-icon': ('image/x-icon', 'inline'),
|
|
347
|
'image/x-icon': ('image/x-icon', 'inline'),
|
|
348
|
'image/png': ('image/png', 'inline'),
|
|
348
|
'image/png': ('image/png', 'inline'),
|
|
349
|
'image/gif': ('image/gif', 'inline'),
|
|
349
|
'image/gif': ('image/gif', 'inline'),
|
|
350
|
'image/jpeg': ('image/jpeg', 'inline'),
|
|
350
|
'image/jpeg': ('image/jpeg', 'inline'),
|
|
351
|
}
|
|
351
|
}
|
|
352
|
|
|
352
|
|
|
353
|
mimetype = file_node.mimetype
|
|
353
|
mimetype = file_node.mimetype
|
|
354
|
try:
|
|
354
|
try:
|
|
355
|
mimetype, dispo = raw_mimetype_mapping[mimetype]
|
|
355
|
mimetype, dispo = raw_mimetype_mapping[mimetype]
|
|
356
|
except KeyError:
|
|
356
|
except KeyError:
|
|
357
|
# we don't know anything special about this, handle it safely
|
|
357
|
# we don't know anything special about this, handle it safely
|
|
358
|
if file_node.is_binary:
|
|
358
|
if file_node.is_binary:
|
|
359
|
# do same as download raw for binary files
|
|
359
|
# do same as download raw for binary files
|
|
360
|
mimetype, dispo = 'application/octet-stream', 'attachment'
|
|
360
|
mimetype, dispo = 'application/octet-stream', 'attachment'
|
|
361
|
else:
|
|
361
|
else:
|
|
362
|
# do not just use the original mimetype, but force text/plain,
|
|
362
|
# do not just use the original mimetype, but force text/plain,
|
|
363
|
# otherwise it would serve text/html and that might be unsafe.
|
|
363
|
# otherwise it would serve text/html and that might be unsafe.
|
|
364
|
# Note: underlying vcs library fakes text/plain mimetype if the
|
|
364
|
# Note: underlying vcs library fakes text/plain mimetype if the
|
|
365
|
# mimetype can not be determined and it thinks it is not
|
|
365
|
# mimetype can not be determined and it thinks it is not
|
|
366
|
# binary.This might lead to erroneous text display in some
|
|
366
|
# binary.This might lead to erroneous text display in some
|
|
367
|
# cases, but helps in other cases, like with text files
|
|
367
|
# cases, but helps in other cases, like with text files
|
|
368
|
# without extension.
|
|
368
|
# without extension.
|
|
369
|
mimetype, dispo = 'text/plain', 'inline'
|
|
369
|
mimetype, dispo = 'text/plain', 'inline'
|
|
370
|
|
|
370
|
|
|
371
|
if dispo == 'attachment':
|
|
371
|
if dispo == 'attachment':
|
|
372
|
dispo = 'attachment; filename=%s' % safe_str(
|
|
372
|
dispo = 'attachment; filename=%s' % safe_str(
|
|
373
|
f_path.split(os.sep)[-1])
|
|
373
|
f_path.split(os.sep)[-1])
|
|
374
|
|
|
374
|
|
|
375
|
response.content_disposition = dispo
|
|
375
|
response.content_disposition = dispo
|
|
376
|
response.content_type = mimetype
|
|
376
|
response.content_type = mimetype
|
|
377
|
charset = self._get_default_encoding()
|
|
377
|
charset = self._get_default_encoding()
|
|
378
|
if charset:
|
|
378
|
if charset:
|
|
379
|
response.charset = charset
|
|
379
|
response.charset = charset
|
|
380
|
return file_node.content
|
|
380
|
return file_node.content
|
|
381
|
|
|
381
|
|
|
382
|
@CSRFRequired()
|
|
382
|
@CSRFRequired()
|
|
383
|
@LoginRequired()
|
|
383
|
@LoginRequired()
|
|
384
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
384
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
385
|
def delete(self, repo_name, revision, f_path):
|
|
385
|
def delete(self, repo_name, revision, f_path):
|
|
386
|
commit_id = revision
|
|
386
|
commit_id = revision
|
|
387
|
|
|
387
|
|
|
388
|
repo = c.rhodecode_db_repo
|
|
388
|
repo = c.rhodecode_db_repo
|
|
389
|
if repo.enable_locking and repo.locked[0]:
|
|
389
|
if repo.enable_locking and repo.locked[0]:
|
|
390
|
h.flash(_('This repository has been locked by %s on %s')
|
|
390
|
h.flash(_('This repository has been locked by %s on %s')
|
|
391
|
% (h.person_by_id(repo.locked[0]),
|
|
391
|
% (h.person_by_id(repo.locked[0]),
|
|
392
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
392
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
393
|
'warning')
|
|
393
|
'warning')
|
|
394
|
return redirect(h.url('files_home',
|
|
394
|
return redirect(h.url('files_home',
|
|
395
|
repo_name=repo_name, revision='tip'))
|
|
395
|
repo_name=repo_name, revision='tip'))
|
|
396
|
|
|
396
|
|
|
397
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
397
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
398
|
h.flash(_('You can only delete files with revision '
|
|
398
|
h.flash(_('You can only delete files with revision '
|
|
399
|
'being a valid branch '), category='warning')
|
|
399
|
'being a valid branch '), category='warning')
|
|
400
|
return redirect(h.url('files_home',
|
|
400
|
return redirect(h.url('files_home',
|
|
401
|
repo_name=repo_name, revision='tip',
|
|
401
|
repo_name=repo_name, revision='tip',
|
|
402
|
f_path=f_path))
|
|
402
|
f_path=f_path))
|
|
403
|
|
|
403
|
|
|
404
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
404
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
405
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
405
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
406
|
|
|
406
|
|
|
407
|
c.default_message = _(
|
|
407
|
c.default_message = _(
|
|
408
|
'Deleted file %s via RhodeCode Enterprise') % (f_path)
|
|
408
|
'Deleted file %s via RhodeCode Enterprise') % (f_path)
|
|
409
|
c.f_path = f_path
|
|
409
|
c.f_path = f_path
|
|
410
|
node_path = f_path
|
|
410
|
node_path = f_path
|
|
411
|
author = c.rhodecode_user.full_contact
|
|
411
|
author = c.rhodecode_user.full_contact
|
|
412
|
message = request.POST.get('message') or c.default_message
|
|
412
|
message = request.POST.get('message') or c.default_message
|
|
413
|
try:
|
|
413
|
try:
|
|
414
|
nodes = {
|
|
414
|
nodes = {
|
|
415
|
node_path: {
|
|
415
|
node_path: {
|
|
416
|
'content': ''
|
|
416
|
'content': ''
|
|
417
|
}
|
|
417
|
}
|
|
418
|
}
|
|
418
|
}
|
|
419
|
self.scm_model.delete_nodes(
|
|
419
|
self.scm_model.delete_nodes(
|
|
420
|
user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
|
|
420
|
user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
|
|
421
|
message=message,
|
|
421
|
message=message,
|
|
422
|
nodes=nodes,
|
|
422
|
nodes=nodes,
|
|
423
|
parent_commit=c.commit,
|
|
423
|
parent_commit=c.commit,
|
|
424
|
author=author,
|
|
424
|
author=author,
|
|
425
|
)
|
|
425
|
)
|
|
426
|
|
|
426
|
|
|
427
|
h.flash(_('Successfully deleted file %s') % f_path,
|
|
427
|
h.flash(_('Successfully deleted file %s') % f_path,
|
|
428
|
category='success')
|
|
428
|
category='success')
|
|
429
|
except Exception:
|
|
429
|
except Exception:
|
|
430
|
msg = _('Error occurred during commit')
|
|
430
|
msg = _('Error occurred during commit')
|
|
431
|
log.exception(msg)
|
|
431
|
log.exception(msg)
|
|
432
|
h.flash(msg, category='error')
|
|
432
|
h.flash(msg, category='error')
|
|
433
|
return redirect(url('changeset_home',
|
|
433
|
return redirect(url('changeset_home',
|
|
434
|
repo_name=c.repo_name, revision='tip'))
|
|
434
|
repo_name=c.repo_name, revision='tip'))
|
|
435
|
|
|
435
|
|
|
436
|
@LoginRequired()
|
|
436
|
@LoginRequired()
|
|
437
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
437
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
438
|
def delete_home(self, repo_name, revision, f_path):
|
|
438
|
def delete_home(self, repo_name, revision, f_path):
|
|
439
|
commit_id = revision
|
|
439
|
commit_id = revision
|
|
440
|
|
|
440
|
|
|
441
|
repo = c.rhodecode_db_repo
|
|
441
|
repo = c.rhodecode_db_repo
|
|
442
|
if repo.enable_locking and repo.locked[0]:
|
|
442
|
if repo.enable_locking and repo.locked[0]:
|
|
443
|
h.flash(_('This repository has been locked by %s on %s')
|
|
443
|
h.flash(_('This repository has been locked by %s on %s')
|
|
444
|
% (h.person_by_id(repo.locked[0]),
|
|
444
|
% (h.person_by_id(repo.locked[0]),
|
|
445
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
445
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
446
|
'warning')
|
|
446
|
'warning')
|
|
447
|
return redirect(h.url('files_home',
|
|
447
|
return redirect(h.url('files_home',
|
|
448
|
repo_name=repo_name, revision='tip'))
|
|
448
|
repo_name=repo_name, revision='tip'))
|
|
449
|
|
|
449
|
|
|
450
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
450
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
451
|
h.flash(_('You can only delete files with revision '
|
|
451
|
h.flash(_('You can only delete files with revision '
|
|
452
|
'being a valid branch '), category='warning')
|
|
452
|
'being a valid branch '), category='warning')
|
|
453
|
return redirect(h.url('files_home',
|
|
453
|
return redirect(h.url('files_home',
|
|
454
|
repo_name=repo_name, revision='tip',
|
|
454
|
repo_name=repo_name, revision='tip',
|
|
455
|
f_path=f_path))
|
|
455
|
f_path=f_path))
|
|
456
|
|
|
456
|
|
|
457
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
457
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
458
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
458
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
459
|
|
|
459
|
|
|
460
|
c.default_message = _(
|
|
460
|
c.default_message = _(
|
|
461
|
'Deleted file %s via RhodeCode Enterprise') % (f_path)
|
|
461
|
'Deleted file %s via RhodeCode Enterprise') % (f_path)
|
|
462
|
c.f_path = f_path
|
|
462
|
c.f_path = f_path
|
|
463
|
|
|
463
|
|
|
464
|
return render('files/files_delete.html')
|
|
464
|
return render('files/files_delete.html')
|
|
465
|
|
|
465
|
|
|
466
|
@CSRFRequired()
|
|
466
|
@CSRFRequired()
|
|
467
|
@LoginRequired()
|
|
467
|
@LoginRequired()
|
|
468
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
468
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
469
|
def edit(self, repo_name, revision, f_path):
|
|
469
|
def edit(self, repo_name, revision, f_path):
|
|
470
|
commit_id = revision
|
|
470
|
commit_id = revision
|
|
471
|
|
|
471
|
|
|
472
|
repo = c.rhodecode_db_repo
|
|
472
|
repo = c.rhodecode_db_repo
|
|
473
|
if repo.enable_locking and repo.locked[0]:
|
|
473
|
if repo.enable_locking and repo.locked[0]:
|
|
474
|
h.flash(_('This repository has been locked by %s on %s')
|
|
474
|
h.flash(_('This repository has been locked by %s on %s')
|
|
475
|
% (h.person_by_id(repo.locked[0]),
|
|
475
|
% (h.person_by_id(repo.locked[0]),
|
|
476
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
476
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
477
|
'warning')
|
|
477
|
'warning')
|
|
478
|
return redirect(h.url('files_home',
|
|
478
|
return redirect(h.url('files_home',
|
|
479
|
repo_name=repo_name, revision='tip'))
|
|
479
|
repo_name=repo_name, revision='tip'))
|
|
480
|
|
|
480
|
|
|
481
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
481
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
482
|
h.flash(_('You can only edit files with revision '
|
|
482
|
h.flash(_('You can only edit files with revision '
|
|
483
|
'being a valid branch '), category='warning')
|
|
483
|
'being a valid branch '), category='warning')
|
|
484
|
return redirect(h.url('files_home',
|
|
484
|
return redirect(h.url('files_home',
|
|
485
|
repo_name=repo_name, revision='tip',
|
|
485
|
repo_name=repo_name, revision='tip',
|
|
486
|
f_path=f_path))
|
|
486
|
f_path=f_path))
|
|
487
|
|
|
487
|
|
|
488
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
488
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
489
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
489
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
490
|
|
|
490
|
|
|
491
|
if c.file.is_binary:
|
|
491
|
if c.file.is_binary:
|
|
492
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
492
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
493
|
revision=c.commit.raw_id, f_path=f_path))
|
|
493
|
revision=c.commit.raw_id, f_path=f_path))
|
|
494
|
c.default_message = _(
|
|
494
|
c.default_message = _(
|
|
495
|
'Edited file %s via RhodeCode Enterprise') % (f_path)
|
|
495
|
'Edited file %s via RhodeCode Enterprise') % (f_path)
|
|
496
|
c.f_path = f_path
|
|
496
|
c.f_path = f_path
|
|
497
|
old_content = c.file.content
|
|
497
|
old_content = c.file.content
|
|
498
|
sl = old_content.splitlines(1)
|
|
498
|
sl = old_content.splitlines(1)
|
|
499
|
first_line = sl[0] if sl else ''
|
|
499
|
first_line = sl[0] if sl else ''
|
|
500
|
|
|
500
|
|
|
501
|
# modes: 0 - Unix, 1 - Mac, 2 - DOS
|
|
501
|
# modes: 0 - Unix, 1 - Mac, 2 - DOS
|
|
502
|
mode = detect_mode(first_line, 0)
|
|
502
|
mode = detect_mode(first_line, 0)
|
|
503
|
content = convert_line_endings(request.POST.get('content', ''), mode)
|
|
503
|
content = convert_line_endings(request.POST.get('content', ''), mode)
|
|
504
|
|
|
504
|
|
|
505
|
message = request.POST.get('message') or c.default_message
|
|
505
|
message = request.POST.get('message') or c.default_message
|
|
506
|
org_f_path = c.file.unicode_path
|
|
506
|
org_f_path = c.file.unicode_path
|
|
507
|
filename = request.POST['filename']
|
|
507
|
filename = request.POST['filename']
|
|
508
|
org_filename = c.file.name
|
|
508
|
org_filename = c.file.name
|
|
509
|
|
|
509
|
|
|
510
|
if content == old_content and filename == org_filename:
|
|
510
|
if content == old_content and filename == org_filename:
|
|
511
|
h.flash(_('No changes'), category='warning')
|
|
511
|
h.flash(_('No changes'), category='warning')
|
|
512
|
return redirect(url('changeset_home', repo_name=c.repo_name,
|
|
512
|
return redirect(url('changeset_home', repo_name=c.repo_name,
|
|
513
|
revision='tip'))
|
|
513
|
revision='tip'))
|
|
514
|
try:
|
|
514
|
try:
|
|
515
|
mapping = {
|
|
515
|
mapping = {
|
|
516
|
org_f_path: {
|
|
516
|
org_f_path: {
|
|
517
|
'org_filename': org_f_path,
|
|
517
|
'org_filename': org_f_path,
|
|
518
|
'filename': os.path.join(c.file.dir_path, filename),
|
|
518
|
'filename': os.path.join(c.file.dir_path, filename),
|
|
519
|
'content': content,
|
|
519
|
'content': content,
|
|
520
|
'lexer': '',
|
|
520
|
'lexer': '',
|
|
521
|
'op': 'mod',
|
|
521
|
'op': 'mod',
|
|
522
|
}
|
|
522
|
}
|
|
523
|
}
|
|
523
|
}
|
|
524
|
|
|
524
|
|
|
525
|
ScmModel().update_nodes(
|
|
525
|
ScmModel().update_nodes(
|
|
526
|
user=c.rhodecode_user.user_id,
|
|
526
|
user=c.rhodecode_user.user_id,
|
|
527
|
repo=c.rhodecode_db_repo,
|
|
527
|
repo=c.rhodecode_db_repo,
|
|
528
|
message=message,
|
|
528
|
message=message,
|
|
529
|
nodes=mapping,
|
|
529
|
nodes=mapping,
|
|
530
|
parent_commit=c.commit,
|
|
530
|
parent_commit=c.commit,
|
|
531
|
)
|
|
531
|
)
|
|
532
|
|
|
532
|
|
|
533
|
h.flash(_('Successfully committed to %s') % f_path,
|
|
533
|
h.flash(_('Successfully committed to %s') % f_path,
|
|
534
|
category='success')
|
|
534
|
category='success')
|
|
535
|
except Exception:
|
|
535
|
except Exception:
|
|
536
|
msg = _('Error occurred during commit')
|
|
536
|
msg = _('Error occurred during commit')
|
|
537
|
log.exception(msg)
|
|
537
|
log.exception(msg)
|
|
538
|
h.flash(msg, category='error')
|
|
538
|
h.flash(msg, category='error')
|
|
539
|
return redirect(url('changeset_home',
|
|
539
|
return redirect(url('changeset_home',
|
|
540
|
repo_name=c.repo_name, revision='tip'))
|
|
540
|
repo_name=c.repo_name, revision='tip'))
|
|
541
|
|
|
541
|
|
|
542
|
@LoginRequired()
|
|
542
|
@LoginRequired()
|
|
543
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
543
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
544
|
def edit_home(self, repo_name, revision, f_path):
|
|
544
|
def edit_home(self, repo_name, revision, f_path):
|
|
545
|
commit_id = revision
|
|
545
|
commit_id = revision
|
|
546
|
|
|
546
|
|
|
547
|
repo = c.rhodecode_db_repo
|
|
547
|
repo = c.rhodecode_db_repo
|
|
548
|
if repo.enable_locking and repo.locked[0]:
|
|
548
|
if repo.enable_locking and repo.locked[0]:
|
|
549
|
h.flash(_('This repository has been locked by %s on %s')
|
|
549
|
h.flash(_('This repository has been locked by %s on %s')
|
|
550
|
% (h.person_by_id(repo.locked[0]),
|
|
550
|
% (h.person_by_id(repo.locked[0]),
|
|
551
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
551
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
552
|
'warning')
|
|
552
|
'warning')
|
|
553
|
return redirect(h.url('files_home',
|
|
553
|
return redirect(h.url('files_home',
|
|
554
|
repo_name=repo_name, revision='tip'))
|
|
554
|
repo_name=repo_name, revision='tip'))
|
|
555
|
|
|
555
|
|
|
556
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
556
|
if not self._is_valid_head(commit_id, repo.scm_instance()):
|
|
557
|
h.flash(_('You can only edit files with revision '
|
|
557
|
h.flash(_('You can only edit files with revision '
|
|
558
|
'being a valid branch '), category='warning')
|
|
558
|
'being a valid branch '), category='warning')
|
|
559
|
return redirect(h.url('files_home',
|
|
559
|
return redirect(h.url('files_home',
|
|
560
|
repo_name=repo_name, revision='tip',
|
|
560
|
repo_name=repo_name, revision='tip',
|
|
561
|
f_path=f_path))
|
|
561
|
f_path=f_path))
|
|
562
|
|
|
562
|
|
|
563
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
563
|
c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
|
|
564
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
564
|
c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
|
|
565
|
|
|
565
|
|
|
566
|
if c.file.is_binary:
|
|
566
|
if c.file.is_binary:
|
|
567
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
567
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
568
|
revision=c.commit.raw_id, f_path=f_path))
|
|
568
|
revision=c.commit.raw_id, f_path=f_path))
|
|
569
|
c.default_message = _(
|
|
569
|
c.default_message = _(
|
|
570
|
'Edited file %s via RhodeCode Enterprise') % (f_path)
|
|
570
|
'Edited file %s via RhodeCode Enterprise') % (f_path)
|
|
571
|
c.f_path = f_path
|
|
571
|
c.f_path = f_path
|
|
572
|
|
|
572
|
|
|
573
|
return render('files/files_edit.html')
|
|
573
|
return render('files/files_edit.html')
|
|
574
|
|
|
574
|
|
|
575
|
def _is_valid_head(self, commit_id, repo):
|
|
575
|
def _is_valid_head(self, commit_id, repo):
|
|
576
|
# check if commit is a branch identifier- basically we cannot
|
|
576
|
# check if commit is a branch identifier- basically we cannot
|
|
577
|
# create multiple heads via file editing
|
|
577
|
# create multiple heads via file editing
|
|
578
|
valid_heads = repo.branches.keys() + repo.branches.values()
|
|
578
|
valid_heads = repo.branches.keys() + repo.branches.values()
|
|
579
|
|
|
579
|
|
|
580
|
if h.is_svn(repo) and not repo.is_empty():
|
|
580
|
if h.is_svn(repo) and not repo.is_empty():
|
|
581
|
# Note: Subversion only has one head, we add it here in case there
|
|
581
|
# Note: Subversion only has one head, we add it here in case there
|
|
582
|
# is no branch matched.
|
|
582
|
# is no branch matched.
|
|
583
|
valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
|
|
583
|
valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
|
|
584
|
|
|
584
|
|
|
585
|
# check if commit is a branch name or branch hash
|
|
585
|
# check if commit is a branch name or branch hash
|
|
586
|
return commit_id in valid_heads
|
|
586
|
return commit_id in valid_heads
|
|
587
|
|
|
587
|
|
|
588
|
@CSRFRequired()
|
|
588
|
@CSRFRequired()
|
|
589
|
@LoginRequired()
|
|
589
|
@LoginRequired()
|
|
590
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
590
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
591
|
def add(self, repo_name, revision, f_path):
|
|
591
|
def add(self, repo_name, revision, f_path):
|
|
592
|
repo = Repository.get_by_repo_name(repo_name)
|
|
592
|
repo = Repository.get_by_repo_name(repo_name)
|
|
593
|
if repo.enable_locking and repo.locked[0]:
|
|
593
|
if repo.enable_locking and repo.locked[0]:
|
|
594
|
h.flash(_('This repository has been locked by %s on %s')
|
|
594
|
h.flash(_('This repository has been locked by %s on %s')
|
|
595
|
% (h.person_by_id(repo.locked[0]),
|
|
595
|
% (h.person_by_id(repo.locked[0]),
|
|
596
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
596
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
597
|
'warning')
|
|
597
|
'warning')
|
|
598
|
return redirect(h.url('files_home',
|
|
598
|
return redirect(h.url('files_home',
|
|
599
|
repo_name=repo_name, revision='tip'))
|
|
599
|
repo_name=repo_name, revision='tip'))
|
|
600
|
|
|
600
|
|
|
601
|
r_post = request.POST
|
|
601
|
r_post = request.POST
|
|
602
|
|
|
602
|
|
|
603
|
c.commit = self.__get_commit_or_redirect(
|
|
603
|
c.commit = self.__get_commit_or_redirect(
|
|
604
|
revision, repo_name, redirect_after=False)
|
|
604
|
revision, repo_name, redirect_after=False)
|
|
605
|
if c.commit is None:
|
|
605
|
if c.commit is None:
|
|
606
|
c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
|
|
606
|
c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
|
|
607
|
c.default_message = (_('Added file via RhodeCode Enterprise'))
|
|
607
|
c.default_message = (_('Added file via RhodeCode Enterprise'))
|
|
608
|
c.f_path = f_path
|
|
608
|
c.f_path = f_path
|
|
609
|
unix_mode = 0
|
|
609
|
unix_mode = 0
|
|
610
|
content = convert_line_endings(r_post.get('content', ''), unix_mode)
|
|
610
|
content = convert_line_endings(r_post.get('content', ''), unix_mode)
|
|
611
|
|
|
611
|
|
|
612
|
message = r_post.get('message') or c.default_message
|
|
612
|
message = r_post.get('message') or c.default_message
|
|
613
|
filename = r_post.get('filename')
|
|
613
|
filename = r_post.get('filename')
|
|
614
|
location = r_post.get('location', '') # dir location
|
|
614
|
location = r_post.get('location', '') # dir location
|
|
615
|
file_obj = r_post.get('upload_file', None)
|
|
615
|
file_obj = r_post.get('upload_file', None)
|
|
616
|
|
|
616
|
|
|
617
|
if file_obj is not None and hasattr(file_obj, 'filename'):
|
|
617
|
if file_obj is not None and hasattr(file_obj, 'filename'):
|
|
618
|
filename = file_obj.filename
|
|
618
|
filename = file_obj.filename
|
|
619
|
content = file_obj.file
|
|
619
|
content = file_obj.file
|
|
620
|
|
|
620
|
|
|
621
|
if hasattr(content, 'file'):
|
|
621
|
if hasattr(content, 'file'):
|
|
622
|
# non posix systems store real file under file attr
|
|
622
|
# non posix systems store real file under file attr
|
|
623
|
content = content.file
|
|
623
|
content = content.file
|
|
624
|
|
|
624
|
|
|
625
|
# If there's no commit, redirect to repo summary
|
|
625
|
# If there's no commit, redirect to repo summary
|
|
626
|
if type(c.commit) is EmptyCommit:
|
|
626
|
if type(c.commit) is EmptyCommit:
|
|
627
|
redirect_url = "summary_home"
|
|
627
|
redirect_url = "summary_home"
|
|
628
|
else:
|
|
628
|
else:
|
|
629
|
redirect_url = "changeset_home"
|
|
629
|
redirect_url = "changeset_home"
|
|
630
|
|
|
630
|
|
|
631
|
if not filename:
|
|
631
|
if not filename:
|
|
632
|
h.flash(_('No filename'), category='warning')
|
|
632
|
h.flash(_('No filename'), category='warning')
|
|
633
|
return redirect(url(redirect_url, repo_name=c.repo_name,
|
|
633
|
return redirect(url(redirect_url, repo_name=c.repo_name,
|
|
634
|
revision='tip'))
|
|
634
|
revision='tip'))
|
|
635
|
|
|
635
|
|
|
636
|
# extract the location from filename,
|
|
636
|
# extract the location from filename,
|
|
637
|
# allows using foo/bar.txt syntax to create subdirectories
|
|
637
|
# allows using foo/bar.txt syntax to create subdirectories
|
|
638
|
subdir_loc = filename.rsplit('/', 1)
|
|
638
|
subdir_loc = filename.rsplit('/', 1)
|
|
639
|
if len(subdir_loc) == 2:
|
|
639
|
if len(subdir_loc) == 2:
|
|
640
|
location = os.path.join(location, subdir_loc[0])
|
|
640
|
location = os.path.join(location, subdir_loc[0])
|
|
641
|
|
|
641
|
|
|
642
|
# strip all crap out of file, just leave the basename
|
|
642
|
# strip all crap out of file, just leave the basename
|
|
643
|
filename = os.path.basename(filename)
|
|
643
|
filename = os.path.basename(filename)
|
|
644
|
node_path = os.path.join(location, filename)
|
|
644
|
node_path = os.path.join(location, filename)
|
|
645
|
author = c.rhodecode_user.full_contact
|
|
645
|
author = c.rhodecode_user.full_contact
|
|
646
|
|
|
646
|
|
|
647
|
try:
|
|
647
|
try:
|
|
648
|
nodes = {
|
|
648
|
nodes = {
|
|
649
|
node_path: {
|
|
649
|
node_path: {
|
|
650
|
'content': content
|
|
650
|
'content': content
|
|
651
|
}
|
|
651
|
}
|
|
652
|
}
|
|
652
|
}
|
|
653
|
self.scm_model.create_nodes(
|
|
653
|
self.scm_model.create_nodes(
|
|
654
|
user=c.rhodecode_user.user_id,
|
|
654
|
user=c.rhodecode_user.user_id,
|
|
655
|
repo=c.rhodecode_db_repo,
|
|
655
|
repo=c.rhodecode_db_repo,
|
|
656
|
message=message,
|
|
656
|
message=message,
|
|
657
|
nodes=nodes,
|
|
657
|
nodes=nodes,
|
|
658
|
parent_commit=c.commit,
|
|
658
|
parent_commit=c.commit,
|
|
659
|
author=author,
|
|
659
|
author=author,
|
|
660
|
)
|
|
660
|
)
|
|
661
|
|
|
661
|
|
|
662
|
h.flash(_('Successfully committed to %s') % node_path,
|
|
662
|
h.flash(_('Successfully committed to %s') % node_path,
|
|
663
|
category='success')
|
|
663
|
category='success')
|
|
664
|
except NonRelativePathError as e:
|
|
664
|
except NonRelativePathError as e:
|
|
665
|
h.flash(_(
|
|
665
|
h.flash(_(
|
|
666
|
'The location specified must be a relative path and must not '
|
|
666
|
'The location specified must be a relative path and must not '
|
|
667
|
'contain .. in the path'), category='warning')
|
|
667
|
'contain .. in the path'), category='warning')
|
|
668
|
return redirect(url('changeset_home', repo_name=c.repo_name,
|
|
668
|
return redirect(url('changeset_home', repo_name=c.repo_name,
|
|
669
|
revision='tip'))
|
|
669
|
revision='tip'))
|
|
670
|
except (NodeError, NodeAlreadyExistsError) as e:
|
|
670
|
except (NodeError, NodeAlreadyExistsError) as e:
|
|
671
|
h.flash(_(e), category='error')
|
|
671
|
h.flash(_(e), category='error')
|
|
672
|
except Exception:
|
|
672
|
except Exception:
|
|
673
|
msg = _('Error occurred during commit')
|
|
673
|
msg = _('Error occurred during commit')
|
|
674
|
log.exception(msg)
|
|
674
|
log.exception(msg)
|
|
675
|
h.flash(msg, category='error')
|
|
675
|
h.flash(msg, category='error')
|
|
676
|
return redirect(url('changeset_home',
|
|
676
|
return redirect(url('changeset_home',
|
|
677
|
repo_name=c.repo_name, revision='tip'))
|
|
677
|
repo_name=c.repo_name, revision='tip'))
|
|
678
|
|
|
678
|
|
|
679
|
@LoginRequired()
|
|
679
|
@LoginRequired()
|
|
680
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
680
|
@HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
|
|
681
|
def add_home(self, repo_name, revision, f_path):
|
|
681
|
def add_home(self, repo_name, revision, f_path):
|
|
682
|
|
|
682
|
|
|
683
|
repo = Repository.get_by_repo_name(repo_name)
|
|
683
|
repo = Repository.get_by_repo_name(repo_name)
|
|
684
|
if repo.enable_locking and repo.locked[0]:
|
|
684
|
if repo.enable_locking and repo.locked[0]:
|
|
685
|
h.flash(_('This repository has been locked by %s on %s')
|
|
685
|
h.flash(_('This repository has been locked by %s on %s')
|
|
686
|
% (h.person_by_id(repo.locked[0]),
|
|
686
|
% (h.person_by_id(repo.locked[0]),
|
|
687
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
687
|
h.format_date(h.time_to_datetime(repo.locked[1]))),
|
|
688
|
'warning')
|
|
688
|
'warning')
|
|
689
|
return redirect(h.url('files_home',
|
|
689
|
return redirect(h.url('files_home',
|
|
690
|
repo_name=repo_name, revision='tip'))
|
|
690
|
repo_name=repo_name, revision='tip'))
|
|
691
|
|
|
691
|
|
|
692
|
c.commit = self.__get_commit_or_redirect(
|
|
692
|
c.commit = self.__get_commit_or_redirect(
|
|
693
|
revision, repo_name, redirect_after=False)
|
|
693
|
revision, repo_name, redirect_after=False)
|
|
694
|
if c.commit is None:
|
|
694
|
if c.commit is None:
|
|
695
|
c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
|
|
695
|
c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
|
|
696
|
c.default_message = (_('Added file via RhodeCode Enterprise'))
|
|
696
|
c.default_message = (_('Added file via RhodeCode Enterprise'))
|
|
697
|
c.f_path = f_path
|
|
697
|
c.f_path = f_path
|
|
698
|
|
|
698
|
|
|
699
|
return render('files/files_add.html')
|
|
699
|
return render('files/files_add.html')
|
|
700
|
|
|
700
|
|
|
701
|
@LoginRequired()
|
|
701
|
@LoginRequired()
|
|
702
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
702
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
703
|
'repository.admin')
|
|
703
|
'repository.admin')
|
|
704
|
def archivefile(self, repo_name, fname):
|
|
704
|
def archivefile(self, repo_name, fname):
|
|
705
|
fileformat = None
|
|
705
|
fileformat = None
|
|
706
|
commit_id = None
|
|
706
|
commit_id = None
|
|
707
|
ext = None
|
|
707
|
ext = None
|
|
708
|
subrepos = request.GET.get('subrepos') == 'true'
|
|
708
|
subrepos = request.GET.get('subrepos') == 'true'
|
|
709
|
|
|
709
|
|
|
710
|
for a_type, ext_data in settings.ARCHIVE_SPECS.items():
|
|
710
|
for a_type, ext_data in settings.ARCHIVE_SPECS.items():
|
|
711
|
archive_spec = fname.split(ext_data[1])
|
|
711
|
archive_spec = fname.split(ext_data[1])
|
|
712
|
if len(archive_spec) == 2 and archive_spec[1] == '':
|
|
712
|
if len(archive_spec) == 2 and archive_spec[1] == '':
|
|
713
|
fileformat = a_type or ext_data[1]
|
|
713
|
fileformat = a_type or ext_data[1]
|
|
714
|
commit_id = archive_spec[0]
|
|
714
|
commit_id = archive_spec[0]
|
|
715
|
ext = ext_data[1]
|
|
715
|
ext = ext_data[1]
|
|
716
|
|
|
716
|
|
|
717
|
dbrepo = RepoModel().get_by_repo_name(repo_name)
|
|
717
|
dbrepo = RepoModel().get_by_repo_name(repo_name)
|
|
718
|
if not dbrepo.enable_downloads:
|
|
718
|
if not dbrepo.enable_downloads:
|
|
719
|
return _('Downloads disabled')
|
|
719
|
return _('Downloads disabled')
|
|
720
|
|
|
720
|
|
|
721
|
try:
|
|
721
|
try:
|
|
722
|
commit = c.rhodecode_repo.get_commit(commit_id)
|
|
722
|
commit = c.rhodecode_repo.get_commit(commit_id)
|
|
723
|
content_type = settings.ARCHIVE_SPECS[fileformat][0]
|
|
723
|
content_type = settings.ARCHIVE_SPECS[fileformat][0]
|
|
724
|
except CommitDoesNotExistError:
|
|
724
|
except CommitDoesNotExistError:
|
|
725
|
return _('Unknown revision %s') % commit_id
|
|
725
|
return _('Unknown revision %s') % commit_id
|
|
726
|
except EmptyRepositoryError:
|
|
726
|
except EmptyRepositoryError:
|
|
727
|
return _('Empty repository')
|
|
727
|
return _('Empty repository')
|
|
728
|
except KeyError:
|
|
728
|
except KeyError:
|
|
729
|
return _('Unknown archive type')
|
|
729
|
return _('Unknown archive type')
|
|
730
|
|
|
730
|
|
|
731
|
# archive cache
|
|
731
|
# archive cache
|
|
732
|
from rhodecode import CONFIG
|
|
732
|
from rhodecode import CONFIG
|
|
733
|
|
|
733
|
|
|
734
|
archive_name = '%s-%s%s%s' % (
|
|
734
|
archive_name = '%s-%s%s%s' % (
|
|
735
|
safe_str(repo_name.replace('/', '_')),
|
|
735
|
safe_str(repo_name.replace('/', '_')),
|
|
736
|
'-sub' if subrepos else '',
|
|
736
|
'-sub' if subrepos else '',
|
|
737
|
safe_str(commit.short_id), ext)
|
|
737
|
safe_str(commit.short_id), ext)
|
|
738
|
|
|
738
|
|
|
739
|
use_cached_archive = False
|
|
739
|
use_cached_archive = False
|
|
740
|
archive_cache_enabled = CONFIG.get(
|
|
740
|
archive_cache_enabled = CONFIG.get(
|
|
741
|
'archive_cache_dir') and not request.GET.get('no_cache')
|
|
741
|
'archive_cache_dir') and not request.GET.get('no_cache')
|
|
742
|
|
|
742
|
|
|
743
|
if archive_cache_enabled:
|
|
743
|
if archive_cache_enabled:
|
|
744
|
# check if we it's ok to write
|
|
744
|
# check if we it's ok to write
|
|
745
|
if not os.path.isdir(CONFIG['archive_cache_dir']):
|
|
745
|
if not os.path.isdir(CONFIG['archive_cache_dir']):
|
|
746
|
os.makedirs(CONFIG['archive_cache_dir'])
|
|
746
|
os.makedirs(CONFIG['archive_cache_dir'])
|
|
747
|
cached_archive_path = os.path.join(
|
|
747
|
cached_archive_path = os.path.join(
|
|
748
|
CONFIG['archive_cache_dir'], archive_name)
|
|
748
|
CONFIG['archive_cache_dir'], archive_name)
|
|
749
|
if os.path.isfile(cached_archive_path):
|
|
749
|
if os.path.isfile(cached_archive_path):
|
|
750
|
log.debug('Found cached archive in %s', cached_archive_path)
|
|
750
|
log.debug('Found cached archive in %s', cached_archive_path)
|
|
751
|
fd, archive = None, cached_archive_path
|
|
751
|
fd, archive = None, cached_archive_path
|
|
752
|
use_cached_archive = True
|
|
752
|
use_cached_archive = True
|
|
753
|
else:
|
|
753
|
else:
|
|
754
|
log.debug('Archive %s is not yet cached', archive_name)
|
|
754
|
log.debug('Archive %s is not yet cached', archive_name)
|
|
755
|
|
|
755
|
|
|
756
|
if not use_cached_archive:
|
|
756
|
if not use_cached_archive:
|
|
757
|
# generate new archive
|
|
757
|
# generate new archive
|
|
758
|
fd, archive = tempfile.mkstemp()
|
|
758
|
fd, archive = tempfile.mkstemp()
|
|
759
|
log.debug('Creating new temp archive in %s' % (archive,))
|
|
759
|
log.debug('Creating new temp archive in %s' % (archive,))
|
|
760
|
try:
|
|
760
|
try:
|
|
761
|
commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
|
|
761
|
commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
|
|
762
|
except ImproperArchiveTypeError:
|
|
762
|
except ImproperArchiveTypeError:
|
|
763
|
return _('Unknown archive type')
|
|
763
|
return _('Unknown archive type')
|
|
764
|
if archive_cache_enabled:
|
|
764
|
if archive_cache_enabled:
|
|
765
|
# if we generated the archive and we have cache enabled
|
|
765
|
# if we generated the archive and we have cache enabled
|
|
766
|
# let's use this for future
|
|
766
|
# let's use this for future
|
|
767
|
log.debug('Storing new archive in %s' % (cached_archive_path,))
|
|
767
|
log.debug('Storing new archive in %s' % (cached_archive_path,))
|
|
768
|
shutil.move(archive, cached_archive_path)
|
|
768
|
shutil.move(archive, cached_archive_path)
|
|
769
|
archive = cached_archive_path
|
|
769
|
archive = cached_archive_path
|
|
770
|
|
|
770
|
|
|
771
|
def get_chunked_archive(archive):
|
|
771
|
def get_chunked_archive(archive):
|
|
772
|
with open(archive, 'rb') as stream:
|
|
772
|
with open(archive, 'rb') as stream:
|
|
773
|
while True:
|
|
773
|
while True:
|
|
774
|
data = stream.read(16 * 1024)
|
|
774
|
data = stream.read(16 * 1024)
|
|
775
|
if not data:
|
|
775
|
if not data:
|
|
776
|
if fd: # fd means we used temporary file
|
|
776
|
if fd: # fd means we used temporary file
|
|
777
|
os.close(fd)
|
|
777
|
os.close(fd)
|
|
778
|
if not archive_cache_enabled:
|
|
778
|
if not archive_cache_enabled:
|
|
779
|
log.debug('Destroying temp archive %s', archive)
|
|
779
|
log.debug('Destroying temp archive %s', archive)
|
|
780
|
os.remove(archive)
|
|
780
|
os.remove(archive)
|
|
781
|
break
|
|
781
|
break
|
|
782
|
yield data
|
|
782
|
yield data
|
|
783
|
|
|
783
|
|
|
784
|
# store download action
|
|
784
|
# store download action
|
|
785
|
action_logger(user=c.rhodecode_user,
|
|
785
|
action_logger(user=c.rhodecode_user,
|
|
786
|
action='user_downloaded_archive:%s' % archive_name,
|
|
786
|
action='user_downloaded_archive:%s' % archive_name,
|
|
787
|
repo=repo_name, ipaddr=self.ip_addr, commit=True)
|
|
787
|
repo=repo_name, ipaddr=self.ip_addr, commit=True)
|
|
788
|
response.content_disposition = str(
|
|
788
|
response.content_disposition = str(
|
|
789
|
'attachment; filename=%s' % archive_name)
|
|
789
|
'attachment; filename=%s' % archive_name)
|
|
790
|
response.content_type = str(content_type)
|
|
790
|
response.content_type = str(content_type)
|
|
791
|
|
|
791
|
|
|
792
|
return get_chunked_archive(archive)
|
|
792
|
return get_chunked_archive(archive)
|
|
793
|
|
|
793
|
|
|
794
|
@LoginRequired()
|
|
794
|
@LoginRequired()
|
|
795
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
795
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
796
|
'repository.admin')
|
|
796
|
'repository.admin')
|
|
797
|
def diff(self, repo_name, f_path):
|
|
797
|
def diff(self, repo_name, f_path):
|
|
798
|
ignore_whitespace = request.GET.get('ignorews') == '1'
|
|
798
|
ignore_whitespace = request.GET.get('ignorews') == '1'
|
|
799
|
line_context = request.GET.get('context', 3)
|
|
799
|
line_context = request.GET.get('context', 3)
|
|
800
|
diff1 = request.GET.get('diff1', '')
|
|
800
|
diff1 = request.GET.get('diff1', '')
|
|
801
|
|
|
801
|
|
|
802
|
path1, diff1 = parse_path_ref(diff1, default_path=f_path)
|
|
802
|
path1, diff1 = parse_path_ref(diff1, default_path=f_path)
|
|
803
|
|
|
803
|
|
|
804
|
diff2 = request.GET.get('diff2', '')
|
|
804
|
diff2 = request.GET.get('diff2', '')
|
|
805
|
c.action = request.GET.get('diff')
|
|
805
|
c.action = request.GET.get('diff')
|
|
806
|
c.no_changes = diff1 == diff2
|
|
806
|
c.no_changes = diff1 == diff2
|
|
807
|
c.f_path = f_path
|
|
807
|
c.f_path = f_path
|
|
808
|
c.big_diff = False
|
|
808
|
c.big_diff = False
|
|
809
|
c.ignorews_url = _ignorews_url
|
|
809
|
c.ignorews_url = _ignorews_url
|
|
810
|
c.context_url = _context_url
|
|
810
|
c.context_url = _context_url
|
|
811
|
c.changes = OrderedDict()
|
|
811
|
c.changes = OrderedDict()
|
|
812
|
c.changes[diff2] = []
|
|
812
|
c.changes[diff2] = []
|
|
813
|
|
|
813
|
|
|
814
|
if not any((diff1, diff2)):
|
|
814
|
if not any((diff1, diff2)):
|
|
815
|
h.flash(
|
|
815
|
h.flash(
|
|
816
|
'Need query parameter "diff1" or "diff2" to generate a diff.',
|
|
816
|
'Need query parameter "diff1" or "diff2" to generate a diff.',
|
|
817
|
category='error')
|
|
817
|
category='error')
|
|
818
|
raise HTTPBadRequest()
|
|
818
|
raise HTTPBadRequest()
|
|
819
|
|
|
819
|
|
|
820
|
# special case if we want a show commit_id only, it's impl here
|
|
820
|
# special case if we want a show commit_id only, it's impl here
|
|
821
|
# to reduce JS and callbacks
|
|
821
|
# to reduce JS and callbacks
|
|
822
|
|
|
822
|
|
|
823
|
if request.GET.get('show_rev') and diff1:
|
|
823
|
if request.GET.get('show_rev') and diff1:
|
|
824
|
if str2bool(request.GET.get('annotate', 'False')):
|
|
824
|
if str2bool(request.GET.get('annotate', 'False')):
|
|
825
|
_url = url('files_annotate_home', repo_name=c.repo_name,
|
|
825
|
_url = url('files_annotate_home', repo_name=c.repo_name,
|
|
826
|
revision=diff1, f_path=path1)
|
|
826
|
revision=diff1, f_path=path1)
|
|
827
|
else:
|
|
827
|
else:
|
|
828
|
_url = url('files_home', repo_name=c.repo_name,
|
|
828
|
_url = url('files_home', repo_name=c.repo_name,
|
|
829
|
revision=diff1, f_path=path1)
|
|
829
|
revision=diff1, f_path=path1)
|
|
830
|
|
|
830
|
|
|
831
|
return redirect(_url)
|
|
831
|
return redirect(_url)
|
|
832
|
|
|
832
|
|
|
833
|
try:
|
|
833
|
try:
|
|
834
|
node1 = self._get_file_node(diff1, path1)
|
|
834
|
node1 = self._get_file_node(diff1, path1)
|
|
835
|
node2 = self._get_file_node(diff2, f_path)
|
|
835
|
node2 = self._get_file_node(diff2, f_path)
|
|
836
|
except (RepositoryError, NodeError):
|
|
836
|
except (RepositoryError, NodeError):
|
|
837
|
log.exception("Exception while trying to get node from repository")
|
|
837
|
log.exception("Exception while trying to get node from repository")
|
|
838
|
return redirect(url(
|
|
838
|
return redirect(url(
|
|
839
|
'files_home', repo_name=c.repo_name, f_path=f_path))
|
|
839
|
'files_home', repo_name=c.repo_name, f_path=f_path))
|
|
840
|
|
|
840
|
|
|
841
|
if all(isinstance(node.commit, EmptyCommit)
|
|
841
|
if all(isinstance(node.commit, EmptyCommit)
|
|
842
|
for node in (node1, node2)):
|
|
842
|
for node in (node1, node2)):
|
|
843
|
raise HTTPNotFound
|
|
843
|
raise HTTPNotFound
|
|
844
|
|
|
844
|
|
|
845
|
c.commit_1 = node1.commit
|
|
845
|
c.commit_1 = node1.commit
|
|
846
|
c.commit_2 = node2.commit
|
|
846
|
c.commit_2 = node2.commit
|
|
847
|
|
|
847
|
|
|
848
|
if c.action == 'download':
|
|
848
|
if c.action == 'download':
|
|
849
|
_diff = diffs.get_gitdiff(node1, node2,
|
|
849
|
_diff = diffs.get_gitdiff(node1, node2,
|
|
850
|
ignore_whitespace=ignore_whitespace,
|
|
850
|
ignore_whitespace=ignore_whitespace,
|
|
851
|
context=line_context)
|
|
851
|
context=line_context)
|
|
852
|
diff = diffs.DiffProcessor(_diff, format='gitdiff')
|
|
852
|
diff = diffs.DiffProcessor(_diff, format='gitdiff')
|
|
853
|
|
|
853
|
|
|
854
|
diff_name = '%s_vs_%s.diff' % (diff1, diff2)
|
|
854
|
diff_name = '%s_vs_%s.diff' % (diff1, diff2)
|
|
855
|
response.content_type = 'text/plain'
|
|
855
|
response.content_type = 'text/plain'
|
|
856
|
response.content_disposition = (
|
|
856
|
response.content_disposition = (
|
|
857
|
'attachment; filename=%s' % (diff_name,)
|
|
857
|
'attachment; filename=%s' % (diff_name,)
|
|
858
|
)
|
|
858
|
)
|
|
859
|
charset = self._get_default_encoding()
|
|
859
|
charset = self._get_default_encoding()
|
|
860
|
if charset:
|
|
860
|
if charset:
|
|
861
|
response.charset = charset
|
|
861
|
response.charset = charset
|
|
862
|
return diff.as_raw()
|
|
862
|
return diff.as_raw()
|
|
863
|
|
|
863
|
|
|
864
|
elif c.action == 'raw':
|
|
864
|
elif c.action == 'raw':
|
|
865
|
_diff = diffs.get_gitdiff(node1, node2,
|
|
865
|
_diff = diffs.get_gitdiff(node1, node2,
|
|
866
|
ignore_whitespace=ignore_whitespace,
|
|
866
|
ignore_whitespace=ignore_whitespace,
|
|
867
|
context=line_context)
|
|
867
|
context=line_context)
|
|
868
|
diff = diffs.DiffProcessor(_diff, format='gitdiff')
|
|
868
|
diff = diffs.DiffProcessor(_diff, format='gitdiff')
|
|
869
|
response.content_type = 'text/plain'
|
|
869
|
response.content_type = 'text/plain'
|
|
870
|
charset = self._get_default_encoding()
|
|
870
|
charset = self._get_default_encoding()
|
|
871
|
if charset:
|
|
871
|
if charset:
|
|
872
|
response.charset = charset
|
|
872
|
response.charset = charset
|
|
873
|
return diff.as_raw()
|
|
873
|
return diff.as_raw()
|
|
874
|
|
|
874
|
|
|
875
|
else:
|
|
875
|
else:
|
|
876
|
fid = h.FID(diff2, node2.path)
|
|
876
|
fid = h.FID(diff2, node2.path)
|
|
877
|
line_context_lcl = get_line_ctx(fid, request.GET)
|
|
877
|
line_context_lcl = get_line_ctx(fid, request.GET)
|
|
878
|
ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
|
|
878
|
ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
|
|
879
|
|
|
879
|
|
|
880
|
__, commit1, commit2, diff, st, data = diffs.wrapped_diff(
|
|
880
|
__, commit1, commit2, diff, st, data = diffs.wrapped_diff(
|
|
881
|
filenode_old=node1,
|
|
881
|
filenode_old=node1,
|
|
882
|
filenode_new=node2,
|
|
882
|
filenode_new=node2,
|
|
883
|
diff_limit=self.cut_off_limit_diff,
|
|
883
|
diff_limit=self.cut_off_limit_diff,
|
|
884
|
file_limit=self.cut_off_limit_file,
|
|
884
|
file_limit=self.cut_off_limit_file,
|
|
885
|
show_full_diff=request.GET.get('fulldiff'),
|
|
885
|
show_full_diff=request.GET.get('fulldiff'),
|
|
886
|
ignore_whitespace=ign_whitespace_lcl,
|
|
886
|
ignore_whitespace=ign_whitespace_lcl,
|
|
887
|
line_context=line_context_lcl,)
|
|
887
|
line_context=line_context_lcl,)
|
|
888
|
|
|
888
|
|
|
889
|
c.lines_added = data['stats']['added'] if data else 0
|
|
889
|
c.lines_added = data['stats']['added'] if data else 0
|
|
890
|
c.lines_deleted = data['stats']['deleted'] if data else 0
|
|
890
|
c.lines_deleted = data['stats']['deleted'] if data else 0
|
|
891
|
c.files = [data]
|
|
891
|
c.files = [data]
|
|
892
|
c.commit_ranges = [c.commit_1, c.commit_2]
|
|
892
|
c.commit_ranges = [c.commit_1, c.commit_2]
|
|
893
|
c.ancestor = None
|
|
893
|
c.ancestor = None
|
|
894
|
c.statuses = []
|
|
894
|
c.statuses = []
|
|
895
|
c.target_repo = c.rhodecode_db_repo
|
|
895
|
c.target_repo = c.rhodecode_db_repo
|
|
896
|
c.filename1 = node1.path
|
|
896
|
c.filename1 = node1.path
|
|
897
|
c.filename = node2.path
|
|
897
|
c.filename = node2.path
|
|
898
|
c.binary_file = node1.is_binary or node2.is_binary
|
|
898
|
c.binary_file = node1.is_binary or node2.is_binary
|
|
899
|
operation = data['operation'] if data else ''
|
|
899
|
operation = data['operation'] if data else ''
|
|
900
|
|
|
900
|
|
|
901
|
commit_changes = {
|
|
901
|
commit_changes = {
|
|
902
|
# TODO: it's passing the old file to the diff to keep the
|
|
902
|
# TODO: it's passing the old file to the diff to keep the
|
|
903
|
# standard but this is not being used for this template,
|
|
903
|
# standard but this is not being used for this template,
|
|
904
|
# but might need both files in the future or a more standard
|
|
904
|
# but might need both files in the future or a more standard
|
|
905
|
# way to work with that
|
|
905
|
# way to work with that
|
|
906
|
'fid': [commit1, commit2, operation,
|
|
906
|
'fid': [commit1, commit2, operation,
|
|
907
|
c.filename, diff, st, data]
|
|
907
|
c.filename, diff, st, data]
|
|
908
|
}
|
|
908
|
}
|
|
909
|
|
|
909
|
|
|
910
|
c.changes = commit_changes
|
|
910
|
c.changes = commit_changes
|
|
911
|
|
|
911
|
|
|
912
|
return render('files/file_diff.html')
|
|
912
|
return render('files/file_diff.html')
|
|
913
|
|
|
913
|
|
|
914
|
@LoginRequired()
|
|
914
|
@LoginRequired()
|
|
915
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
915
|
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
|
|
916
|
'repository.admin')
|
|
916
|
'repository.admin')
|
|
917
|
def diff_2way(self, repo_name, f_path):
|
|
917
|
def diff_2way(self, repo_name, f_path):
|
|
918
|
diff1 = request.GET.get('diff1', '')
|
|
918
|
diff1 = request.GET.get('diff1', '')
|
|
919
|
diff2 = request.GET.get('diff2', '')
|
|
919
|
diff2 = request.GET.get('diff2', '')
|
|
920
|
|
|
920
|
|
|
921
|
nodes = []
|
|
921
|
nodes = []
|
|
922
|
unknown_commits = []
|
|
922
|
unknown_commits = []
|
|
923
|
for commit in [diff1, diff2]:
|
|
923
|
for commit in [diff1, diff2]:
|
|
924
|
try:
|
|
924
|
try:
|
|
925
|
nodes.append(self._get_file_node(commit, f_path))
|
|
925
|
nodes.append(self._get_file_node(commit, f_path))
|
|
926
|
except (RepositoryError, NodeError):
|
|
926
|
except (RepositoryError, NodeError):
|
|
927
|
log.exception('%(commit)s does not exist' % {'commit': commit})
|
|
927
|
log.exception('%(commit)s does not exist' % {'commit': commit})
|
|
928
|
unknown_commits.append(commit)
|
|
928
|
unknown_commits.append(commit)
|
|
929
|
h.flash(h.literal(
|
|
929
|
h.flash(h.literal(
|
|
930
|
_('Commit %(commit)s does not exist.') % {'commit': commit}
|
|
930
|
_('Commit %(commit)s does not exist.') % {'commit': commit}
|
|
931
|
), category='error')
|
|
931
|
), category='error')
|
|
932
|
|
|
932
|
|
|
933
|
if unknown_commits:
|
|
933
|
if unknown_commits:
|
|
934
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
934
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
935
|
f_path=f_path))
|
|
935
|
f_path=f_path))
|
|
936
|
|
|
936
|
|
|
937
|
if all(isinstance(node.commit, EmptyCommit) for node in nodes):
|
|
937
|
if all(isinstance(node.commit, EmptyCommit) for node in nodes):
|
|
938
|
raise HTTPNotFound
|
|
938
|
raise HTTPNotFound
|
|
939
|
|
|
939
|
|
|
940
|
node1, node2 = nodes
|
|
940
|
node1, node2 = nodes
|
|
941
|
|
|
941
|
|
|
942
|
f_gitdiff = diffs.get_gitdiff(node1, node2, ignore_whitespace=False)
|
|
942
|
f_gitdiff = diffs.get_gitdiff(node1, node2, ignore_whitespace=False)
|
|
943
|
diff_processor = diffs.DiffProcessor(f_gitdiff, format='gitdiff')
|
|
943
|
diff_processor = diffs.DiffProcessor(f_gitdiff, format='gitdiff')
|
|
944
|
diff_data = diff_processor.prepare()
|
|
944
|
diff_data = diff_processor.prepare()
|
|
945
|
|
|
945
|
|
|
946
|
if not diff_data or diff_data[0]['raw_diff'] == '':
|
|
946
|
if not diff_data or diff_data[0]['raw_diff'] == '':
|
|
947
|
h.flash(h.literal(_('%(file_path)s has not changed '
|
|
947
|
h.flash(h.literal(_('%(file_path)s has not changed '
|
|
948
|
'between %(commit_1)s and %(commit_2)s.') % {
|
|
948
|
'between %(commit_1)s and %(commit_2)s.') % {
|
|
949
|
'file_path': f_path,
|
|
949
|
'file_path': f_path,
|
|
950
|
'commit_1': node1.commit.id,
|
|
950
|
'commit_1': node1.commit.id,
|
|
951
|
'commit_2': node2.commit.id
|
|
951
|
'commit_2': node2.commit.id
|
|
952
|
}), category='error')
|
|
952
|
}), category='error')
|
|
953
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
953
|
return redirect(url('files_home', repo_name=c.repo_name,
|
|
954
|
f_path=f_path))
|
|
954
|
f_path=f_path))
|
|
955
|
|
|
955
|
|
|
956
|
c.diff_data = diff_data[0]
|
|
956
|
c.diff_data = diff_data[0]
|
|
957
|
c.FID = h.FID(diff2, node2.path)
|
|
957
|
c.FID = h.FID(diff2, node2.path)
|
|
958
|
# cleanup some unneeded data
|
|
958
|
# cleanup some unneeded data
|
|
959
|
del c.diff_data['raw_diff']
|
|
959
|
del c.diff_data['raw_diff']
|
|
960
|
del c.diff_data['chunks']
|
|
960
|
del c.diff_data['chunks']
|
|
961
|
|
|
961
|
|
|
962
|
c.node1 = node1
|
|
962
|
c.node1 = node1
|
|
963
|
c.commit_1 = node1.commit
|
|
963
|
c.commit_1 = node1.commit
|
|
964
|
c.node2 = node2
|
|
964
|
c.node2 = node2
|
|
965
|
c.commit_2 = node2.commit
|
|
965
|
c.commit_2 = node2.commit
|
|
966
|
|
|
966
|
|
|
967
|
return render('files/diff_2way.html')
|
|
967
|
return render('files/diff_2way.html')
|
|
968
|
|
|
968
|
|
|
969
|
def _get_file_node(self, commit_id, f_path):
|
|
969
|
def _get_file_node(self, commit_id, f_path):
|
|
970
|
if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
|
|
970
|
if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
|
|
971
|
commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
|
|
971
|
commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
|
|
972
|
try:
|
|
972
|
try:
|
|
973
|
node = commit.get_node(f_path)
|
|
973
|
node = commit.get_node(f_path)
|
|
974
|
if node.is_dir():
|
|
974
|
if node.is_dir():
|
|
975
|
raise NodeError('%s path is a %s not a file'
|
|
975
|
raise NodeError('%s path is a %s not a file'
|
|
976
|
% (node, type(node)))
|
|
976
|
% (node, type(node)))
|
|
977
|
except NodeDoesNotExistError:
|
|
977
|
except NodeDoesNotExistError:
|
|
978
|
commit = EmptyCommit(
|
|
978
|
commit = EmptyCommit(
|
|
979
|
commit_id=commit_id,
|
|
979
|
commit_id=commit_id,
|
|
980
|
idx=commit.idx,
|
|
980
|
idx=commit.idx,
|
|
981
|
repo=commit.repository,
|
|
981
|
repo=commit.repository,
|
|
982
|
alias=commit.repository.alias,
|
|
982
|
alias=commit.repository.alias,
|
|
983
|
message=commit.message,
|
|
983
|
message=commit.message,
|
|
984
|
author=commit.author,
|
|
984
|
author=commit.author,
|
|
985
|
date=commit.date)
|
|
985
|
date=commit.date)
|
|
986
|
node = FileNode(f_path, '', commit=commit)
|
|
986
|
node = FileNode(f_path, '', commit=commit)
|
|
987
|
else:
|
|
987
|
else:
|
|
988
|
commit = EmptyCommit(
|
|
988
|
commit = EmptyCommit(
|
|
989
|
repo=c.rhodecode_repo,
|
|
989
|
repo=c.rhodecode_repo,
|
|
990
|
alias=c.rhodecode_repo.alias)
|
|
990
|
alias=c.rhodecode_repo.alias)
|
|
991
|
node = FileNode(f_path, '', commit=commit)
|
|
991
|
node = FileNode(f_path, '', commit=commit)
|
|
992
|
return node
|
|
992
|
return node
|
|
993
|
|
|
993
|
|
|
994
|
def _get_node_history(self, commit, f_path, commits=None):
|
|
994
|
def _get_node_history(self, commit, f_path, commits=None):
|
|
995
|
"""
|
|
995
|
"""
|
|
996
|
get commit history for given node
|
|
996
|
get commit history for given node
|
|
997
|
|
|
997
|
|
|
998
|
:param commit: commit to calculate history
|
|
998
|
:param commit: commit to calculate history
|
|
999
|
:param f_path: path for node to calculate history for
|
|
999
|
:param f_path: path for node to calculate history for
|
|
1000
|
:param commits: if passed don't calculate history and take
|
|
1000
|
:param commits: if passed don't calculate history and take
|
|
1001
|
commits defined in this list
|
|
1001
|
commits defined in this list
|
|
1002
|
"""
|
|
1002
|
"""
|
|
1003
|
# calculate history based on tip
|
|
1003
|
# calculate history based on tip
|
|
1004
|
tip = c.rhodecode_repo.get_commit()
|
|
1004
|
tip = c.rhodecode_repo.get_commit()
|
|
1005
|
if commits is None:
|
|
1005
|
if commits is None:
|
|
1006
|
pre_load = ["author", "branch"]
|
|
1006
|
pre_load = ["author", "branch"]
|
|
1007
|
try:
|
|
1007
|
try:
|
|
1008
|
commits = tip.get_file_history(f_path, pre_load=pre_load)
|
|
1008
|
commits = tip.get_file_history(f_path, pre_load=pre_load)
|
|
1009
|
except (NodeDoesNotExistError, CommitError):
|
|
1009
|
except (NodeDoesNotExistError, CommitError):
|
|
1010
|
# this node is not present at tip!
|
|
1010
|
# this node is not present at tip!
|
|
1011
|
commits = commit.get_file_history(f_path, pre_load=pre_load)
|
|
1011
|
commits = commit.get_file_history(f_path, pre_load=pre_load)
|
|
1012
|
|
|
1012
|
|
|
1013
|
history = []
|
|
1013
|
history = []
|
|
1014
|
commits_group = ([], _("Changesets"))
|
|
1014
|
commits_group = ([], _("Changesets"))
|
|
1015
|
for commit in commits:
|
|
1015
|
for commit in commits:
|
|
1016
|
branch = ' (%s)' % commit.branch if commit.branch else ''
|
|
1016
|
branch = ' (%s)' % commit.branch if commit.branch else ''
|
|
1017
|
n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
|
|
1017
|
n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
|
|
1018
|
commits_group[0].append((commit.raw_id, n_desc,))
|
|
1018
|
commits_group[0].append((commit.raw_id, n_desc,))
|
|
1019
|
history.append(commits_group)
|
|
1019
|
history.append(commits_group)
|
|
1020
|
|
|
1020
|
|
|
1021
|
symbolic_reference = self._symbolic_reference
|
|
1021
|
symbolic_reference = self._symbolic_reference
|
|
1022
|
|
|
1022
|
|
|
1023
|
if c.rhodecode_repo.alias == 'svn':
|
|
1023
|
if c.rhodecode_repo.alias == 'svn':
|
|
1024
|
adjusted_f_path = self._adjust_file_path_for_svn(
|
|
1024
|
adjusted_f_path = self._adjust_file_path_for_svn(
|
|
1025
|
f_path, c.rhodecode_repo)
|
|
1025
|
f_path, c.rhodecode_repo)
|
|
1026
|
if adjusted_f_path != f_path:
|
|
1026
|
if adjusted_f_path != f_path:
|
|
1027
|
log.debug(
|
|
1027
|
log.debug(
|
|
1028
|
'Recognized svn tag or branch in file "%s", using svn '
|
|
1028
|
'Recognized svn tag or branch in file "%s", using svn '
|
|
1029
|
'specific symbolic references', f_path)
|
|
1029
|
'specific symbolic references', f_path)
|
|
1030
|
f_path = adjusted_f_path
|
|
1030
|
f_path = adjusted_f_path
|
|
1031
|
symbolic_reference = self._symbolic_reference_svn
|
|
1031
|
symbolic_reference = self._symbolic_reference_svn
|
|
1032
|
|
|
1032
|
|
|
1033
|
branches = self._create_references(
|
|
1033
|
branches = self._create_references(
|
|
1034
|
c.rhodecode_repo.branches, symbolic_reference, f_path)
|
|
1034
|
c.rhodecode_repo.branches, symbolic_reference, f_path)
|
|
1035
|
branches_group = (branches, _("Branches"))
|
|
1035
|
branches_group = (branches, _("Branches"))
|
|
1036
|
|
|
1036
|
|
|
1037
|
tags = self._create_references(
|
|
1037
|
tags = self._create_references(
|
|
1038
|
c.rhodecode_repo.tags, symbolic_reference, f_path)
|
|
1038
|
c.rhodecode_repo.tags, symbolic_reference, f_path)
|
|
1039
|
tags_group = (tags, _("Tags"))
|
|
1039
|
tags_group = (tags, _("Tags"))
|
|
1040
|
|
|
1040
|
|
|
1041
|
history.append(branches_group)
|
|
1041
|
history.append(branches_group)
|
|
1042
|
history.append(tags_group)
|
|
1042
|
history.append(tags_group)
|
|
1043
|
|
|
1043
|
|
|
1044
|
return history, commits
|
|
1044
|
return history, commits
|
|
1045
|
|
|
1045
|
|
|
1046
|
def _adjust_file_path_for_svn(self, f_path, repo):
|
|
1046
|
def _adjust_file_path_for_svn(self, f_path, repo):
|
|
1047
|
"""
|
|
1047
|
"""
|
|
1048
|
Computes the relative path of `f_path`.
|
|
1048
|
Computes the relative path of `f_path`.
|
|
1049
|
|
|
1049
|
|
|
1050
|
This is mainly based on prefix matching of the recognized tags and
|
|
1050
|
This is mainly based on prefix matching of the recognized tags and
|
|
1051
|
branches in the underlying repository.
|
|
1051
|
branches in the underlying repository.
|
|
1052
|
"""
|
|
1052
|
"""
|
|
1053
|
tags_and_branches = itertools.chain(
|
|
1053
|
tags_and_branches = itertools.chain(
|
|
1054
|
repo.branches.iterkeys(),
|
|
1054
|
repo.branches.iterkeys(),
|
|
1055
|
repo.tags.iterkeys())
|
|
1055
|
repo.tags.iterkeys())
|
|
1056
|
tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
|
|
1056
|
tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
|
|
1057
|
|
|
1057
|
|
|
1058
|
for name in tags_and_branches:
|
|
1058
|
for name in tags_and_branches:
|
|
1059
|
if f_path.startswith(name + '/'):
|
|
1059
|
if f_path.startswith(name + '/'):
|
|
1060
|
f_path = vcspath.relpath(f_path, name)
|
|
1060
|
f_path = vcspath.relpath(f_path, name)
|
|
1061
|
break
|
|
1061
|
break
|
|
1062
|
return f_path
|
|
1062
|
return f_path
|
|
1063
|
|
|
1063
|
|
|
1064
|
def _create_references(
|
|
1064
|
def _create_references(
|
|
1065
|
self, branches_or_tags, symbolic_reference, f_path):
|
|
1065
|
self, branches_or_tags, symbolic_reference, f_path):
|
|
1066
|
items = []
|
|
1066
|
items = []
|
|
1067
|
for name, commit_id in branches_or_tags.items():
|
|
1067
|
for name, commit_id in branches_or_tags.items():
|
|
1068
|
sym_ref = symbolic_reference(commit_id, name, f_path)
|
|
1068
|
sym_ref = symbolic_reference(commit_id, name, f_path)
|
|
1069
|
items.append((sym_ref, name))
|
|
1069
|
items.append((sym_ref, name))
|
|
1070
|
return items
|
|
1070
|
return items
|
|
1071
|
|
|
1071
|
|
|
1072
|
def _symbolic_reference(self, commit_id, name, f_path):
|
|
1072
|
def _symbolic_reference(self, commit_id, name, f_path):
|
|
1073
|
return commit_id
|
|
1073
|
return commit_id
|
|
1074
|
|
|
1074
|
|
|
1075
|
def _symbolic_reference_svn(self, commit_id, name, f_path):
|
|
1075
|
def _symbolic_reference_svn(self, commit_id, name, f_path):
|
|
1076
|
new_f_path = vcspath.join(name, f_path)
|
|
1076
|
new_f_path = vcspath.join(name, f_path)
|
|
1077
|
return u'%s@%s' % (new_f_path, commit_id)
|
|
1077
|
return u'%s@%s' % (new_f_path, commit_id)
|
|
1078
|
|
|
1078
|
|
|
1079
|
@LoginRequired()
|
|
1079
|
@LoginRequired()
|
|
1080
|
@XHRRequired()
|
|
1080
|
@XHRRequired()
|
|
1081
|
@HasRepoPermissionAnyDecorator(
|
|
1081
|
@HasRepoPermissionAnyDecorator(
|
|
1082
|
'repository.read', 'repository.write', 'repository.admin')
|
|
1082
|
'repository.read', 'repository.write', 'repository.admin')
|
|
1083
|
@jsonify
|
|
1083
|
@jsonify
|
|
1084
|
def nodelist(self, repo_name, revision, f_path):
|
|
1084
|
def nodelist(self, repo_name, revision, f_path):
|
|
1085
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
1085
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
1086
|
|
|
1086
|
|
|
1087
|
metadata = self._get_nodelist_at_commit(
|
|
1087
|
metadata = self._get_nodelist_at_commit(
|
|
1088
|
repo_name, commit.raw_id, f_path)
|
|
1088
|
repo_name, commit.raw_id, f_path)
|
|
1089
|
return {'nodes': metadata}
|
|
1089
|
return {'nodes': metadata}
|
|
1090
|
|
|
1090
|
|
|
1091
|
@LoginRequired()
|
|
1091
|
@LoginRequired()
|
|
1092
|
@XHRRequired()
|
|
1092
|
@XHRRequired()
|
|
1093
|
@HasRepoPermissionAnyDecorator(
|
|
1093
|
@HasRepoPermissionAnyDecorator(
|
|
1094
|
'repository.read', 'repository.write', 'repository.admin')
|
|
1094
|
'repository.read', 'repository.write', 'repository.admin')
|
|
1095
|
@jsonify
|
|
1095
|
@jsonify
|
|
1096
|
def metadata_list(self, repo_name, revision, f_path):
|
|
1096
|
def metadata_list(self, repo_name, revision, f_path):
|
|
1097
|
"""
|
|
1097
|
"""
|
|
1098
|
Returns a json dict that contains commit date, author, revision
|
|
1098
|
Returns a json dict that contains commit date, author, revision
|
|
1099
|
and id for the specified repo, revision and file path
|
|
1099
|
and id for the specified repo, revision and file path
|
|
1100
|
|
|
1100
|
|
|
1101
|
:param repo_name: name of the repository
|
|
1101
|
:param repo_name: name of the repository
|
|
1102
|
:param revision: revision of files
|
|
1102
|
:param revision: revision of files
|
|
1103
|
:param f_path: file path of the requested directory
|
|
1103
|
:param f_path: file path of the requested directory
|
|
1104
|
"""
|
|
1104
|
"""
|
|
1105
|
|
|
1105
|
|
|
1106
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
1106
|
commit = self.__get_commit_or_redirect(revision, repo_name)
|
|
1107
|
try:
|
|
1107
|
try:
|
|
1108
|
file_node = commit.get_node(f_path)
|
|
1108
|
file_node = commit.get_node(f_path)
|
|
1109
|
except RepositoryError as e:
|
|
1109
|
except RepositoryError as e:
|
|
1110
|
return {'error': safe_str(e)}
|
|
1110
|
return {'error': safe_str(e)}
|
|
1111
|
|
|
1111
|
|
|
1112
|
metadata = self._get_metadata_at_commit(
|
|
1112
|
metadata = self._get_metadata_at_commit(
|
|
1113
|
repo_name, commit, file_node)
|
|
1113
|
repo_name, commit, file_node)
|
|
1114
|
return {'metadata': metadata}
|
|
1114
|
return {'metadata': metadata}
|