##// END OF EJS Templates
largefiles: enabled download of largefiles for git and mercurial from web interface....
marcink -
r1577:3fd4ff52 default
parent child Browse files
Show More
@@ -1,241 +1,241 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the Enterprise
3 # This shall be as lean as possible, just producing the Enterprise
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 args@
7 args@
8 { pythonPackages ? "python27Packages"
8 { pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 , ...
11 , ...
12 }:
12 }:
13
13
14 let
14 let
15
15
16 # Use nixpkgs from args or import them. We use this indirect approach
16 # Use nixpkgs from args or import them. We use this indirect approach
17 # through args to be able to use the name `pkgs` for our customized packages.
17 # through args to be able to use the name `pkgs` for our customized packages.
18 # Otherwise we will end up with an infinite recursion.
18 # Otherwise we will end up with an infinite recursion.
19 nixpkgs = args.pkgs or (import <nixpkgs> { });
19 nixpkgs = args.pkgs or (import <nixpkgs> { });
20
20
21 # johbo: Interim bridge which allows us to build with the upcoming
21 # johbo: Interim bridge which allows us to build with the upcoming
22 # nixos.16.09 branch (unstable at the moment of writing this note) and the
22 # nixos.16.09 branch (unstable at the moment of writing this note) and the
23 # current stable nixos-16.03.
23 # current stable nixos-16.03.
24 backwardsCompatibleFetchgit = { ... }@args:
24 backwardsCompatibleFetchgit = { ... }@args:
25 let
25 let
26 origSources = nixpkgs.fetchgit args;
26 origSources = nixpkgs.fetchgit args;
27 in
27 in
28 nixpkgs.lib.overrideDerivation origSources (oldAttrs: {
28 nixpkgs.lib.overrideDerivation origSources (oldAttrs: {
29 NIX_PREFETCH_GIT_CHECKOUT_HOOK = ''
29 NIX_PREFETCH_GIT_CHECKOUT_HOOK = ''
30 find $out -name '.git*' -print0 | xargs -0 rm -rf
30 find $out -name '.git*' -print0 | xargs -0 rm -rf
31 '';
31 '';
32 });
32 });
33
33
34 # Create a customized version of nixpkgs which should be used throughout the
34 # Create a customized version of nixpkgs which should be used throughout the
35 # rest of this file.
35 # rest of this file.
36 pkgs = nixpkgs.overridePackages (self: super: {
36 pkgs = nixpkgs.overridePackages (self: super: {
37 fetchgit = backwardsCompatibleFetchgit;
37 fetchgit = backwardsCompatibleFetchgit;
38 });
38 });
39
39
40 # Evaluates to the last segment of a file system path.
40 # Evaluates to the last segment of a file system path.
41 basename = path: with pkgs.lib; last (splitString "/" path);
41 basename = path: with pkgs.lib; last (splitString "/" path);
42
42
43 # source code filter used as arugment to builtins.filterSource.
43 # source code filter used as arugment to builtins.filterSource.
44 src-filter = path: type: with pkgs.lib;
44 src-filter = path: type: with pkgs.lib;
45 let
45 let
46 ext = last (splitString "." path);
46 ext = last (splitString "." path);
47 in
47 in
48 !builtins.elem (basename path) [
48 !builtins.elem (basename path) [
49 ".git" ".hg" "__pycache__" ".eggs"
49 ".git" ".hg" "__pycache__" ".eggs"
50 "bower_components" "node_modules"
50 "bower_components" "node_modules"
51 "build" "data" "result" "tmp"] &&
51 "build" "data" "result" "tmp"] &&
52 !builtins.elem ext ["egg-info" "pyc"] &&
52 !builtins.elem ext ["egg-info" "pyc"] &&
53 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
53 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
54 # it would still be good to restore it since we want to ignore "result-*".
54 # it would still be good to restore it since we want to ignore "result-*".
55 !hasPrefix "result" path;
55 !hasPrefix "result" path;
56
56
57 basePythonPackages = with builtins; if isAttrs pythonPackages
57 basePythonPackages = with builtins; if isAttrs pythonPackages
58 then pythonPackages
58 then pythonPackages
59 else getAttr pythonPackages pkgs;
59 else getAttr pythonPackages pkgs;
60
60
61 buildBowerComponents =
61 buildBowerComponents =
62 pkgs.buildBowerComponents or
62 pkgs.buildBowerComponents or
63 (import ./pkgs/backport-16.03-build-bower-components.nix { inherit pkgs; });
63 (import ./pkgs/backport-16.03-build-bower-components.nix { inherit pkgs; });
64
64
65 sources = pkgs.config.rc.sources or {};
65 sources = pkgs.config.rc.sources or {};
66 version = builtins.readFile ./rhodecode/VERSION;
66 version = builtins.readFile ./rhodecode/VERSION;
67 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
67 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
68
68
69 nodeEnv = import ./pkgs/node-default.nix {
69 nodeEnv = import ./pkgs/node-default.nix {
70 inherit pkgs;
70 inherit pkgs;
71 };
71 };
72 nodeDependencies = nodeEnv.shell.nodeDependencies;
72 nodeDependencies = nodeEnv.shell.nodeDependencies;
73
73
74 bowerComponents = buildBowerComponents {
74 bowerComponents = buildBowerComponents {
75 name = "enterprise-ce-${version}";
75 name = "enterprise-ce-${version}";
76 generated = ./pkgs/bower-packages.nix;
76 generated = ./pkgs/bower-packages.nix;
77 src = rhodecode-enterprise-ce-src;
77 src = rhodecode-enterprise-ce-src;
78 };
78 };
79
79
80 pythonGeneratedPackages = self: basePythonPackages.override (a: {
80 pythonGeneratedPackages = self: basePythonPackages.override (a: {
81 inherit self;
81 inherit self;
82 })
82 })
83 // (scopedImport {
83 // (scopedImport {
84 self = self;
84 self = self;
85 super = basePythonPackages;
85 super = basePythonPackages;
86 inherit pkgs;
86 inherit pkgs;
87 inherit (pkgs) fetchurl fetchgit;
87 inherit (pkgs) fetchurl fetchgit;
88 } ./pkgs/python-packages.nix);
88 } ./pkgs/python-packages.nix);
89
89
90 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
90 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
91 inherit
91 inherit
92 basePythonPackages
92 basePythonPackages
93 pkgs;
93 pkgs;
94 };
94 };
95
95
96 pythonLocalOverrides = self: super: {
96 pythonLocalOverrides = self: super: {
97 rhodecode-enterprise-ce =
97 rhodecode-enterprise-ce =
98 let
98 let
99 linkNodeAndBowerPackages = ''
99 linkNodeAndBowerPackages = ''
100 echo "Export RhodeCode CE path"
100 echo "Export RhodeCode CE path"
101 export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src}
101 export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src}
102 echo "Link node packages"
102 echo "Link node packages"
103 rm -fr node_modules
103 rm -fr node_modules
104 mkdir node_modules
104 mkdir node_modules
105 # johbo: Linking individual packages allows us to run "npm install"
105 # johbo: Linking individual packages allows us to run "npm install"
106 # inside of a shell to try things out. Re-entering the shell will
106 # inside of a shell to try things out. Re-entering the shell will
107 # restore a clean environment.
107 # restore a clean environment.
108 ln -s ${nodeDependencies}/lib/node_modules/* node_modules/
108 ln -s ${nodeDependencies}/lib/node_modules/* node_modules/
109
109
110 echo "DONE: Link node packages"
110 echo "DONE: Link node packages"
111
111
112 echo "Link bower packages"
112 echo "Link bower packages"
113 rm -fr bower_components
113 rm -fr bower_components
114 mkdir bower_components
114 mkdir bower_components
115
115
116 ln -s ${bowerComponents}/bower_components/* bower_components/
116 ln -s ${bowerComponents}/bower_components/* bower_components/
117 echo "DONE: Link bower packages"
117 echo "DONE: Link bower packages"
118 '';
118 '';
119 in super.rhodecode-enterprise-ce.override (attrs: {
119 in super.rhodecode-enterprise-ce.override (attrs: {
120
120
121 inherit
121 inherit
122 doCheck
122 doCheck
123 version;
123 version;
124 name = "rhodecode-enterprise-ce-${version}";
124 name = "rhodecode-enterprise-ce-${version}";
125 releaseName = "RhodeCodeEnterpriseCE-${version}";
125 releaseName = "RhodeCodeEnterpriseCE-${version}";
126 src = rhodecode-enterprise-ce-src;
126 src = rhodecode-enterprise-ce-src;
127 dontStrip = true; # prevent strip, we don't need it.
127 dontStrip = true; # prevent strip, we don't need it.
128
128
129 buildInputs =
129 buildInputs =
130 attrs.buildInputs ++
130 attrs.buildInputs ++
131 (with self; [
131 (with self; [
132 pkgs.nodePackages.bower
132 pkgs.nodePackages.bower
133 pkgs.nodePackages.grunt-cli
133 pkgs.nodePackages.grunt-cli
134 pkgs.subversion
134 pkgs.subversion
135 pytest-catchlog
135 pytest-catchlog
136 rhodecode-testdata
136 rhodecode-testdata
137 ]);
137 ]);
138
138
139 #TODO: either move this into overrides, OR use the new machanics from
139 #TODO: either move this into overrides, OR use the new machanics from
140 # pip2nix and requiremtn.txt file
140 # pip2nix and requiremtn.txt file
141 propagatedBuildInputs = attrs.propagatedBuildInputs ++ (with self; [
141 propagatedBuildInputs = attrs.propagatedBuildInputs ++ (with self; [
142 rhodecode-tools
142 rhodecode-tools
143 ]);
143 ]);
144
144
145 # TODO: johbo: Make a nicer way to expose the parts. Maybe
145 # TODO: johbo: Make a nicer way to expose the parts. Maybe
146 # pkgs/default.nix?
146 # pkgs/default.nix?
147 passthru = {
147 passthru = {
148 inherit
148 inherit
149 bowerComponents
149 bowerComponents
150 linkNodeAndBowerPackages
150 linkNodeAndBowerPackages
151 myPythonPackagesUnfix
151 myPythonPackagesUnfix
152 pythonLocalOverrides;
152 pythonLocalOverrides;
153 pythonPackages = self;
153 pythonPackages = self;
154 };
154 };
155
155
156 LC_ALL = "en_US.UTF-8";
156 LC_ALL = "en_US.UTF-8";
157 LOCALE_ARCHIVE =
157 LOCALE_ARCHIVE =
158 if pkgs.stdenv ? glibc
158 if pkgs.stdenv ? glibc
159 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
159 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
160 else "";
160 else "";
161
161
162 preCheck = ''
162 preCheck = ''
163 export PATH="$out/bin:$PATH"
163 export PATH="$out/bin:$PATH"
164 '';
164 '';
165
165
166 postCheck = ''
166 postCheck = ''
167 rm -rf $out/lib/${self.python.libPrefix}/site-packages/pytest_pylons
167 rm -rf $out/lib/${self.python.libPrefix}/site-packages/pytest_pylons
168 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
168 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
169 '';
169 '';
170
170
171 preBuild = linkNodeAndBowerPackages + ''
171 preBuild = linkNodeAndBowerPackages + ''
172 grunt
172 grunt
173 rm -fr node_modules
173 rm -fr node_modules
174 '';
174 '';
175
175
176 postInstall = ''
176 postInstall = ''
177 # python based programs need to be wrapped
177 # python based programs need to be wrapped
178 ln -s ${self.supervisor}/bin/supervisor* $out/bin/
178 ln -s ${self.supervisor}/bin/supervisor* $out/bin/
179 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
179 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
180 ln -s ${self.PasteScript}/bin/paster $out/bin/
180 ln -s ${self.PasteScript}/bin/paster $out/bin/
181 ln -s ${self.channelstream}/bin/channelstream $out/bin/
181 ln -s ${self.channelstream}/bin/channelstream $out/bin/
182 ln -s ${self.pyramid}/bin/* $out/bin/ #*/
182 ln -s ${self.pyramid}/bin/* $out/bin/ #*/
183
183
184 # rhodecode-tools
184 # rhodecode-tools
185 # TODO: johbo: re-think this. Do the tools import anything from enterprise?
185 # TODO: johbo: re-think this. Do the tools import anything from enterprise?
186 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
186 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
187
187
188 # note that condition should be restricted when adding further tools
188 # note that condition should be restricted when adding further tools
189 for file in $out/bin/*; do #*/
189 for file in $out/bin/*; do #*/
190 wrapProgram $file \
190 wrapProgram $file \
191 --prefix PYTHONPATH : $PYTHONPATH \
191 --prefix PYTHONPATH : $PYTHONPATH \
192 --prefix PATH : $PATH \
192 --prefix PATH : $PATH \
193 --set PYTHONHASHSEED random
193 --set PYTHONHASHSEED random
194 done
194 done
195
195
196 mkdir $out/etc
196 mkdir $out/etc
197 cp configs/production.ini $out/etc
197 cp configs/production.ini $out/etc
198
198
199 echo "Writing meta information for rccontrol to nix-support/rccontrol"
199 echo "Writing meta information for rccontrol to nix-support/rccontrol"
200 mkdir -p $out/nix-support/rccontrol
200 mkdir -p $out/nix-support/rccontrol
201 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
201 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
202 echo "DONE: Meta information for rccontrol written"
202 echo "DONE: Meta information for rccontrol written"
203
203
204 # TODO: johbo: Make part of ac-tests
204 # TODO: johbo: Make part of ac-tests
205 if [ ! -f rhodecode/public/js/scripts.js ]; then
205 if [ ! -f rhodecode/public/js/scripts.js ]; then
206 echo "Missing scripts.js"
206 echo "Missing scripts.js"
207 exit 1
207 exit 1
208 fi
208 fi
209 if [ ! -f rhodecode/public/css/style.css ]; then
209 if [ ! -f rhodecode/public/css/style.css ]; then
210 echo "Missing style.css"
210 echo "Missing style.css"
211 exit 1
211 exit 1
212 fi
212 fi
213 '';
213 '';
214
214
215 });
215 });
216
216
217 rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" {
217 rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" {
218 inherit
218 inherit
219 doCheck
219 doCheck
220 pkgs
220 pkgs
221 pythonPackages;
221 pythonPackages;
222 };
222 };
223
223
224 };
224 };
225
225
226 rhodecode-testdata-src = sources.rhodecode-testdata or (
226 rhodecode-testdata-src = sources.rhodecode-testdata or (
227 pkgs.fetchhg {
227 pkgs.fetchhg {
228 url = "https://code.rhodecode.com/upstream/rc_testdata";
228 url = "https://code.rhodecode.com/upstream/rc_testdata";
229 rev = "v0.9.0";
229 rev = "v0.10.0";
230 sha256 = "0k0ccb7cncd6mmzwckfbr6l7fsymcympwcm948qc3i0f0m6bbg1y";
230 sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0";
231 });
231 });
232
232
233 # Apply all overrides and fix the final package set
233 # Apply all overrides and fix the final package set
234 myPythonPackagesUnfix = with pkgs.lib;
234 myPythonPackagesUnfix = with pkgs.lib;
235 (extends pythonExternalOverrides
235 (extends pythonExternalOverrides
236 (extends pythonLocalOverrides
236 (extends pythonLocalOverrides
237 (extends pythonOverrides
237 (extends pythonOverrides
238 pythonGeneratedPackages)));
238 pythonGeneratedPackages)));
239 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
239 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
240
240
241 in myPythonPackages.rhodecode-enterprise-ce
241 in myPythonPackages.rhodecode-enterprise-ce
@@ -1,1087 +1,1097 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Files controller for RhodeCode Enterprise
22 Files controller for RhodeCode Enterprise
23 """
23 """
24
24
25 import itertools
25 import itertools
26 import logging
26 import logging
27 import os
27 import os
28 import shutil
28 import shutil
29 import tempfile
29 import tempfile
30
30
31 from pylons import request, response, tmpl_context as c, url
31 from pylons import request, response, tmpl_context as c, url
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33 from pylons.controllers.util import redirect
33 from pylons.controllers.util import redirect
34 from webob.exc import HTTPNotFound, HTTPBadRequest
34 from webob.exc import HTTPNotFound, HTTPBadRequest
35
35
36 from rhodecode.controllers.utils import parse_path_ref
36 from rhodecode.controllers.utils import parse_path_ref
37 from rhodecode.lib import diffs, helpers as h, caches
37 from rhodecode.lib import diffs, helpers as h, caches
38 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.codeblocks import (
39 from rhodecode.lib.codeblocks import (
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 from rhodecode.lib.utils import jsonify, action_logger
41 from rhodecode.lib.utils import jsonify, action_logger
42 from rhodecode.lib.utils2 import (
42 from rhodecode.lib.utils2 import (
43 convert_line_endings, detect_mode, safe_str, str2bool)
43 convert_line_endings, detect_mode, safe_str, str2bool)
44 from rhodecode.lib.auth import (
44 from rhodecode.lib.auth import (
45 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
45 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
46 from rhodecode.lib.base import BaseRepoController, render
46 from rhodecode.lib.base import BaseRepoController, render
47 from rhodecode.lib.vcs import path as vcspath
47 from rhodecode.lib.vcs import path as vcspath
48 from rhodecode.lib.vcs.backends.base import EmptyCommit
48 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 from rhodecode.lib.vcs.conf import settings
49 from rhodecode.lib.vcs.conf import settings
50 from rhodecode.lib.vcs.exceptions import (
50 from rhodecode.lib.vcs.exceptions import (
51 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
51 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
52 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
52 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
53 NodeDoesNotExistError, CommitError, NodeError)
53 NodeDoesNotExistError, CommitError, NodeError)
54 from rhodecode.lib.vcs.nodes import FileNode
54 from rhodecode.lib.vcs.nodes import FileNode
55
55
56 from rhodecode.model.repo import RepoModel
56 from rhodecode.model.repo import RepoModel
57 from rhodecode.model.scm import ScmModel
57 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.db import Repository
58 from rhodecode.model.db import Repository
59
59
60 from rhodecode.controllers.changeset import (
60 from rhodecode.controllers.changeset import (
61 _ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
61 _ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
62 from rhodecode.lib.exceptions import NonRelativePathError
62 from rhodecode.lib.exceptions import NonRelativePathError
63
63
64 log = logging.getLogger(__name__)
64 log = logging.getLogger(__name__)
65
65
66
66
67 class FilesController(BaseRepoController):
67 class FilesController(BaseRepoController):
68
68
69 def __before__(self):
69 def __before__(self):
70 super(FilesController, self).__before__()
70 super(FilesController, self).__before__()
71 c.cut_off_limit = self.cut_off_limit_file
71 c.cut_off_limit = self.cut_off_limit_file
72
72
73 def _get_default_encoding(self):
73 def _get_default_encoding(self):
74 enc_list = getattr(c, 'default_encodings', [])
74 enc_list = getattr(c, 'default_encodings', [])
75 return enc_list[0] if enc_list else 'UTF-8'
75 return enc_list[0] if enc_list else 'UTF-8'
76
76
77 def __get_commit_or_redirect(self, commit_id, repo_name,
77 def __get_commit_or_redirect(self, commit_id, repo_name,
78 redirect_after=True):
78 redirect_after=True):
79 """
79 """
80 This is a safe way to get commit. If an error occurs it redirects to
80 This is a safe way to get commit. If an error occurs it redirects to
81 tip with proper message
81 tip with proper message
82
82
83 :param commit_id: id of commit to fetch
83 :param commit_id: id of commit to fetch
84 :param repo_name: repo name to redirect after
84 :param repo_name: repo name to redirect after
85 :param redirect_after: toggle redirection
85 :param redirect_after: toggle redirection
86 """
86 """
87 try:
87 try:
88 return c.rhodecode_repo.get_commit(commit_id)
88 return c.rhodecode_repo.get_commit(commit_id)
89 except EmptyRepositoryError:
89 except EmptyRepositoryError:
90 if not redirect_after:
90 if not redirect_after:
91 return None
91 return None
92 url_ = url('files_add_home',
92 url_ = url('files_add_home',
93 repo_name=c.repo_name,
93 repo_name=c.repo_name,
94 revision=0, f_path='', anchor='edit')
94 revision=0, f_path='', anchor='edit')
95 if h.HasRepoPermissionAny(
95 if h.HasRepoPermissionAny(
96 'repository.write', 'repository.admin')(c.repo_name):
96 'repository.write', 'repository.admin')(c.repo_name):
97 add_new = h.link_to(
97 add_new = h.link_to(
98 _('Click here to add a new file.'),
98 _('Click here to add a new file.'),
99 url_, class_="alert-link")
99 url_, class_="alert-link")
100 else:
100 else:
101 add_new = ""
101 add_new = ""
102 h.flash(h.literal(
102 h.flash(h.literal(
103 _('There are no files yet. %s') % add_new), category='warning')
103 _('There are no files yet. %s') % add_new), category='warning')
104 redirect(h.url('summary_home', repo_name=repo_name))
104 redirect(h.url('summary_home', repo_name=repo_name))
105 except (CommitDoesNotExistError, LookupError):
105 except (CommitDoesNotExistError, LookupError):
106 msg = _('No such commit exists for this repository')
106 msg = _('No such commit exists for this repository')
107 h.flash(msg, category='error')
107 h.flash(msg, category='error')
108 raise HTTPNotFound()
108 raise HTTPNotFound()
109 except RepositoryError as e:
109 except RepositoryError as e:
110 h.flash(safe_str(e), category='error')
110 h.flash(safe_str(e), category='error')
111 raise HTTPNotFound()
111 raise HTTPNotFound()
112
112
113 def __get_filenode_or_redirect(self, repo_name, commit, path):
113 def __get_filenode_or_redirect(self, repo_name, commit, path):
114 """
114 """
115 Returns file_node, if error occurs or given path is directory,
115 Returns file_node, if error occurs or given path is directory,
116 it'll redirect to top level path
116 it'll redirect to top level path
117
117
118 :param repo_name: repo_name
118 :param repo_name: repo_name
119 :param commit: given commit
119 :param commit: given commit
120 :param path: path to lookup
120 :param path: path to lookup
121 """
121 """
122 try:
122 try:
123 file_node = commit.get_node(path)
123 file_node = commit.get_node(path)
124 if file_node.is_dir():
124 if file_node.is_dir():
125 raise RepositoryError('The given path is a directory')
125 raise RepositoryError('The given path is a directory')
126 except CommitDoesNotExistError:
126 except CommitDoesNotExistError:
127 msg = _('No such commit exists for this repository')
127 msg = _('No such commit exists for this repository')
128 log.exception(msg)
128 log.exception(msg)
129 h.flash(msg, category='error')
129 h.flash(msg, category='error')
130 raise HTTPNotFound()
130 raise HTTPNotFound()
131 except RepositoryError as e:
131 except RepositoryError as e:
132 h.flash(safe_str(e), category='error')
132 h.flash(safe_str(e), category='error')
133 raise HTTPNotFound()
133 raise HTTPNotFound()
134
134
135 return file_node
135 return file_node
136
136
137 def __get_tree_cache_manager(self, repo_name, namespace_type):
137 def __get_tree_cache_manager(self, repo_name, namespace_type):
138 _namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
138 _namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
139 return caches.get_cache_manager('repo_cache_long', _namespace)
139 return caches.get_cache_manager('repo_cache_long', _namespace)
140
140
141 def _get_tree_at_commit(self, repo_name, commit_id, f_path,
141 def _get_tree_at_commit(self, repo_name, commit_id, f_path,
142 full_load=False, force=False):
142 full_load=False, force=False):
143 def _cached_tree():
143 def _cached_tree():
144 log.debug('Generating cached file tree for %s, %s, %s',
144 log.debug('Generating cached file tree for %s, %s, %s',
145 repo_name, commit_id, f_path)
145 repo_name, commit_id, f_path)
146 c.full_load = full_load
146 c.full_load = full_load
147 return render('files/files_browser_tree.mako')
147 return render('files/files_browser_tree.mako')
148
148
149 cache_manager = self.__get_tree_cache_manager(
149 cache_manager = self.__get_tree_cache_manager(
150 repo_name, caches.FILE_TREE)
150 repo_name, caches.FILE_TREE)
151
151
152 cache_key = caches.compute_key_from_params(
152 cache_key = caches.compute_key_from_params(
153 repo_name, commit_id, f_path)
153 repo_name, commit_id, f_path)
154
154
155 if force:
155 if force:
156 # we want to force recompute of caches
156 # we want to force recompute of caches
157 cache_manager.remove_value(cache_key)
157 cache_manager.remove_value(cache_key)
158
158
159 return cache_manager.get(cache_key, createfunc=_cached_tree)
159 return cache_manager.get(cache_key, createfunc=_cached_tree)
160
160
161 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
161 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
162 def _cached_nodes():
162 def _cached_nodes():
163 log.debug('Generating cached nodelist for %s, %s, %s',
163 log.debug('Generating cached nodelist for %s, %s, %s',
164 repo_name, commit_id, f_path)
164 repo_name, commit_id, f_path)
165 _d, _f = ScmModel().get_nodes(
165 _d, _f = ScmModel().get_nodes(
166 repo_name, commit_id, f_path, flat=False)
166 repo_name, commit_id, f_path, flat=False)
167 return _d + _f
167 return _d + _f
168
168
169 cache_manager = self.__get_tree_cache_manager(
169 cache_manager = self.__get_tree_cache_manager(
170 repo_name, caches.FILE_SEARCH_TREE_META)
170 repo_name, caches.FILE_SEARCH_TREE_META)
171
171
172 cache_key = caches.compute_key_from_params(
172 cache_key = caches.compute_key_from_params(
173 repo_name, commit_id, f_path)
173 repo_name, commit_id, f_path)
174 return cache_manager.get(cache_key, createfunc=_cached_nodes)
174 return cache_manager.get(cache_key, createfunc=_cached_nodes)
175
175
176 @LoginRequired()
176 @LoginRequired()
177 @HasRepoPermissionAnyDecorator(
177 @HasRepoPermissionAnyDecorator(
178 'repository.read', 'repository.write', 'repository.admin')
178 'repository.read', 'repository.write', 'repository.admin')
179 def index(
179 def index(
180 self, repo_name, revision, f_path, annotate=False, rendered=False):
180 self, repo_name, revision, f_path, annotate=False, rendered=False):
181 commit_id = revision
181 commit_id = revision
182
182
183 # redirect to given commit_id from form if given
183 # redirect to given commit_id from form if given
184 get_commit_id = request.GET.get('at_rev', None)
184 get_commit_id = request.GET.get('at_rev', None)
185 if get_commit_id:
185 if get_commit_id:
186 self.__get_commit_or_redirect(get_commit_id, repo_name)
186 self.__get_commit_or_redirect(get_commit_id, repo_name)
187
187
188 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
188 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
189 c.branch = request.GET.get('branch', None)
189 c.branch = request.GET.get('branch', None)
190 c.f_path = f_path
190 c.f_path = f_path
191 c.annotate = annotate
191 c.annotate = annotate
192 # default is false, but .rst/.md files later are autorendered, we can
192 # default is false, but .rst/.md files later are autorendered, we can
193 # overwrite autorendering by setting this GET flag
193 # overwrite autorendering by setting this GET flag
194 c.renderer = rendered or not request.GET.get('no-render', False)
194 c.renderer = rendered or not request.GET.get('no-render', False)
195
195
196 # prev link
196 # prev link
197 try:
197 try:
198 prev_commit = c.commit.prev(c.branch)
198 prev_commit = c.commit.prev(c.branch)
199 c.prev_commit = prev_commit
199 c.prev_commit = prev_commit
200 c.url_prev = url('files_home', repo_name=c.repo_name,
200 c.url_prev = url('files_home', repo_name=c.repo_name,
201 revision=prev_commit.raw_id, f_path=f_path)
201 revision=prev_commit.raw_id, f_path=f_path)
202 if c.branch:
202 if c.branch:
203 c.url_prev += '?branch=%s' % c.branch
203 c.url_prev += '?branch=%s' % c.branch
204 except (CommitDoesNotExistError, VCSError):
204 except (CommitDoesNotExistError, VCSError):
205 c.url_prev = '#'
205 c.url_prev = '#'
206 c.prev_commit = EmptyCommit()
206 c.prev_commit = EmptyCommit()
207
207
208 # next link
208 # next link
209 try:
209 try:
210 next_commit = c.commit.next(c.branch)
210 next_commit = c.commit.next(c.branch)
211 c.next_commit = next_commit
211 c.next_commit = next_commit
212 c.url_next = url('files_home', repo_name=c.repo_name,
212 c.url_next = url('files_home', repo_name=c.repo_name,
213 revision=next_commit.raw_id, f_path=f_path)
213 revision=next_commit.raw_id, f_path=f_path)
214 if c.branch:
214 if c.branch:
215 c.url_next += '?branch=%s' % c.branch
215 c.url_next += '?branch=%s' % c.branch
216 except (CommitDoesNotExistError, VCSError):
216 except (CommitDoesNotExistError, VCSError):
217 c.url_next = '#'
217 c.url_next = '#'
218 c.next_commit = EmptyCommit()
218 c.next_commit = EmptyCommit()
219
219
220 # files or dirs
220 # files or dirs
221 try:
221 try:
222 c.file = c.commit.get_node(f_path)
222 c.file = c.commit.get_node(f_path)
223 c.file_author = True
223 c.file_author = True
224 c.file_tree = ''
224 c.file_tree = ''
225 if c.file.is_file():
225 if c.file.is_file():
226 c.lf_node = c.file.get_largefile_node()
227
226 c.file_source_page = 'true'
228 c.file_source_page = 'true'
227 c.file_last_commit = c.file.last_commit
229 c.file_last_commit = c.file.last_commit
228 if c.file.size < self.cut_off_limit_file:
230 if c.file.size < self.cut_off_limit_file:
229 if c.annotate: # annotation has precedence over renderer
231 if c.annotate: # annotation has precedence over renderer
230 c.annotated_lines = filenode_as_annotated_lines_tokens(
232 c.annotated_lines = filenode_as_annotated_lines_tokens(
231 c.file
233 c.file
232 )
234 )
233 else:
235 else:
234 c.renderer = (
236 c.renderer = (
235 c.renderer and h.renderer_from_filename(c.file.path)
237 c.renderer and h.renderer_from_filename(c.file.path)
236 )
238 )
237 if not c.renderer:
239 if not c.renderer:
238 c.lines = filenode_as_lines_tokens(c.file)
240 c.lines = filenode_as_lines_tokens(c.file)
239
241
240 c.on_branch_head = self._is_valid_head(
242 c.on_branch_head = self._is_valid_head(
241 commit_id, c.rhodecode_repo)
243 commit_id, c.rhodecode_repo)
242 c.branch_or_raw_id = c.commit.branch or c.commit.raw_id
244 c.branch_or_raw_id = c.commit.branch or c.commit.raw_id
243
245
244 author = c.file_last_commit.author
246 author = c.file_last_commit.author
245 c.authors = [(h.email(author),
247 c.authors = [(h.email(author),
246 h.person(author, 'username_or_name_or_email'))]
248 h.person(author, 'username_or_name_or_email'))]
247 else:
249 else:
248 c.file_source_page = 'false'
250 c.file_source_page = 'false'
249 c.authors = []
251 c.authors = []
250 c.file_tree = self._get_tree_at_commit(
252 c.file_tree = self._get_tree_at_commit(
251 repo_name, c.commit.raw_id, f_path)
253 repo_name, c.commit.raw_id, f_path)
252
254
253 except RepositoryError as e:
255 except RepositoryError as e:
254 h.flash(safe_str(e), category='error')
256 h.flash(safe_str(e), category='error')
255 raise HTTPNotFound()
257 raise HTTPNotFound()
256
258
257 if request.environ.get('HTTP_X_PJAX'):
259 if request.environ.get('HTTP_X_PJAX'):
258 return render('files/files_pjax.mako')
260 return render('files/files_pjax.mako')
259
261
260 return render('files/files.mako')
262 return render('files/files.mako')
261
263
262 @LoginRequired()
264 @LoginRequired()
263 @HasRepoPermissionAnyDecorator(
265 @HasRepoPermissionAnyDecorator(
264 'repository.read', 'repository.write', 'repository.admin')
266 'repository.read', 'repository.write', 'repository.admin')
265 def annotate_previous(self, repo_name, revision, f_path):
267 def annotate_previous(self, repo_name, revision, f_path):
266
268
267 commit_id = revision
269 commit_id = revision
268 commit = self.__get_commit_or_redirect(commit_id, repo_name)
270 commit = self.__get_commit_or_redirect(commit_id, repo_name)
269 prev_commit_id = commit.raw_id
271 prev_commit_id = commit.raw_id
270
272
271 f_path = f_path
273 f_path = f_path
272 is_file = False
274 is_file = False
273 try:
275 try:
274 _file = commit.get_node(f_path)
276 _file = commit.get_node(f_path)
275 is_file = _file.is_file()
277 is_file = _file.is_file()
276 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
278 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
277 pass
279 pass
278
280
279 if is_file:
281 if is_file:
280 history = commit.get_file_history(f_path)
282 history = commit.get_file_history(f_path)
281 prev_commit_id = history[1].raw_id \
283 prev_commit_id = history[1].raw_id \
282 if len(history) > 1 else prev_commit_id
284 if len(history) > 1 else prev_commit_id
283
285
284 return redirect(h.url(
286 return redirect(h.url(
285 'files_annotate_home', repo_name=repo_name,
287 'files_annotate_home', repo_name=repo_name,
286 revision=prev_commit_id, f_path=f_path))
288 revision=prev_commit_id, f_path=f_path))
287
289
288 @LoginRequired()
290 @LoginRequired()
289 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
291 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
290 'repository.admin')
292 'repository.admin')
291 @jsonify
293 @jsonify
292 def history(self, repo_name, revision, f_path):
294 def history(self, repo_name, revision, f_path):
293 commit = self.__get_commit_or_redirect(revision, repo_name)
295 commit = self.__get_commit_or_redirect(revision, repo_name)
294 f_path = f_path
296 f_path = f_path
295 _file = commit.get_node(f_path)
297 _file = commit.get_node(f_path)
296 if _file.is_file():
298 if _file.is_file():
297 file_history, _hist = self._get_node_history(commit, f_path)
299 file_history, _hist = self._get_node_history(commit, f_path)
298
300
299 res = []
301 res = []
300 for obj in file_history:
302 for obj in file_history:
301 res.append({
303 res.append({
302 'text': obj[1],
304 'text': obj[1],
303 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
305 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
304 })
306 })
305
307
306 data = {
308 data = {
307 'more': False,
309 'more': False,
308 'results': res
310 'results': res
309 }
311 }
310 return data
312 return data
311
313
312 @LoginRequired()
314 @LoginRequired()
313 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
315 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
314 'repository.admin')
316 'repository.admin')
315 def authors(self, repo_name, revision, f_path):
317 def authors(self, repo_name, revision, f_path):
316 commit = self.__get_commit_or_redirect(revision, repo_name)
318 commit = self.__get_commit_or_redirect(revision, repo_name)
317 file_node = commit.get_node(f_path)
319 file_node = commit.get_node(f_path)
318 if file_node.is_file():
320 if file_node.is_file():
319 c.file_last_commit = file_node.last_commit
321 c.file_last_commit = file_node.last_commit
320 if request.GET.get('annotate') == '1':
322 if request.GET.get('annotate') == '1':
321 # use _hist from annotation if annotation mode is on
323 # use _hist from annotation if annotation mode is on
322 commit_ids = set(x[1] for x in file_node.annotate)
324 commit_ids = set(x[1] for x in file_node.annotate)
323 _hist = (
325 _hist = (
324 c.rhodecode_repo.get_commit(commit_id)
326 c.rhodecode_repo.get_commit(commit_id)
325 for commit_id in commit_ids)
327 for commit_id in commit_ids)
326 else:
328 else:
327 _f_history, _hist = self._get_node_history(commit, f_path)
329 _f_history, _hist = self._get_node_history(commit, f_path)
328 c.file_author = False
330 c.file_author = False
329 c.authors = []
331 c.authors = []
330 for author in set(commit.author for commit in _hist):
332 for author in set(commit.author for commit in _hist):
331 c.authors.append((
333 c.authors.append((
332 h.email(author),
334 h.email(author),
333 h.person(author, 'username_or_name_or_email')))
335 h.person(author, 'username_or_name_or_email')))
334 return render('files/file_authors_box.mako')
336 return render('files/file_authors_box.mako')
335
337
336 @LoginRequired()
338 @LoginRequired()
337 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
338 'repository.admin')
340 'repository.admin')
339 def rawfile(self, repo_name, revision, f_path):
341 def rawfile(self, repo_name, revision, f_path):
340 """
342 """
341 Action for download as raw
343 Action for download as raw
342 """
344 """
343 commit = self.__get_commit_or_redirect(revision, repo_name)
345 commit = self.__get_commit_or_redirect(revision, repo_name)
344 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
346 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
345
347
348 if request.GET.get('lf'):
349 # only if lf get flag is passed, we download this file
350 # as LFS/Largefile
351 lf_node = file_node.get_largefile_node()
352 if lf_node:
353 # overwrite our pointer with the REAL large-file
354 file_node = lf_node
355
346 response.content_disposition = 'attachment; filename=%s' % \
356 response.content_disposition = 'attachment; filename=%s' % \
347 safe_str(f_path.split(Repository.NAME_SEP)[-1])
357 safe_str(f_path.split(Repository.NAME_SEP)[-1])
348
358
349 response.content_type = file_node.mimetype
359 response.content_type = file_node.mimetype
350 charset = self._get_default_encoding()
360 charset = self._get_default_encoding()
351 if charset:
361 if charset:
352 response.charset = charset
362 response.charset = charset
353
363
354 return file_node.content
364 return file_node.content
355
365
356 @LoginRequired()
366 @LoginRequired()
357 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
367 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
358 'repository.admin')
368 'repository.admin')
359 def raw(self, repo_name, revision, f_path):
369 def raw(self, repo_name, revision, f_path):
360 """
370 """
361 Action for show as raw, some mimetypes are "rendered",
371 Action for show as raw, some mimetypes are "rendered",
362 those include images, icons.
372 those include images, icons.
363 """
373 """
364 commit = self.__get_commit_or_redirect(revision, repo_name)
374 commit = self.__get_commit_or_redirect(revision, repo_name)
365 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
375 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
366
376
367 raw_mimetype_mapping = {
377 raw_mimetype_mapping = {
368 # map original mimetype to a mimetype used for "show as raw"
378 # map original mimetype to a mimetype used for "show as raw"
369 # you can also provide a content-disposition to override the
379 # you can also provide a content-disposition to override the
370 # default "attachment" disposition.
380 # default "attachment" disposition.
371 # orig_type: (new_type, new_dispo)
381 # orig_type: (new_type, new_dispo)
372
382
373 # show images inline:
383 # show images inline:
374 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
384 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
375 # for example render an SVG with javascript inside or even render
385 # for example render an SVG with javascript inside or even render
376 # HTML.
386 # HTML.
377 'image/x-icon': ('image/x-icon', 'inline'),
387 'image/x-icon': ('image/x-icon', 'inline'),
378 'image/png': ('image/png', 'inline'),
388 'image/png': ('image/png', 'inline'),
379 'image/gif': ('image/gif', 'inline'),
389 'image/gif': ('image/gif', 'inline'),
380 'image/jpeg': ('image/jpeg', 'inline'),
390 'image/jpeg': ('image/jpeg', 'inline'),
381 }
391 }
382
392
383 mimetype = file_node.mimetype
393 mimetype = file_node.mimetype
384 try:
394 try:
385 mimetype, dispo = raw_mimetype_mapping[mimetype]
395 mimetype, dispo = raw_mimetype_mapping[mimetype]
386 except KeyError:
396 except KeyError:
387 # we don't know anything special about this, handle it safely
397 # we don't know anything special about this, handle it safely
388 if file_node.is_binary:
398 if file_node.is_binary:
389 # do same as download raw for binary files
399 # do same as download raw for binary files
390 mimetype, dispo = 'application/octet-stream', 'attachment'
400 mimetype, dispo = 'application/octet-stream', 'attachment'
391 else:
401 else:
392 # do not just use the original mimetype, but force text/plain,
402 # do not just use the original mimetype, but force text/plain,
393 # otherwise it would serve text/html and that might be unsafe.
403 # otherwise it would serve text/html and that might be unsafe.
394 # Note: underlying vcs library fakes text/plain mimetype if the
404 # Note: underlying vcs library fakes text/plain mimetype if the
395 # mimetype can not be determined and it thinks it is not
405 # mimetype can not be determined and it thinks it is not
396 # binary.This might lead to erroneous text display in some
406 # binary.This might lead to erroneous text display in some
397 # cases, but helps in other cases, like with text files
407 # cases, but helps in other cases, like with text files
398 # without extension.
408 # without extension.
399 mimetype, dispo = 'text/plain', 'inline'
409 mimetype, dispo = 'text/plain', 'inline'
400
410
401 if dispo == 'attachment':
411 if dispo == 'attachment':
402 dispo = 'attachment; filename=%s' % safe_str(
412 dispo = 'attachment; filename=%s' % safe_str(
403 f_path.split(os.sep)[-1])
413 f_path.split(os.sep)[-1])
404
414
405 response.content_disposition = dispo
415 response.content_disposition = dispo
406 response.content_type = mimetype
416 response.content_type = mimetype
407 charset = self._get_default_encoding()
417 charset = self._get_default_encoding()
408 if charset:
418 if charset:
409 response.charset = charset
419 response.charset = charset
410 return file_node.content
420 return file_node.content
411
421
412 @CSRFRequired()
422 @CSRFRequired()
413 @LoginRequired()
423 @LoginRequired()
414 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
424 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
415 def delete(self, repo_name, revision, f_path):
425 def delete(self, repo_name, revision, f_path):
416 commit_id = revision
426 commit_id = revision
417
427
418 repo = c.rhodecode_db_repo
428 repo = c.rhodecode_db_repo
419 if repo.enable_locking and repo.locked[0]:
429 if repo.enable_locking and repo.locked[0]:
420 h.flash(_('This repository has been locked by %s on %s')
430 h.flash(_('This repository has been locked by %s on %s')
421 % (h.person_by_id(repo.locked[0]),
431 % (h.person_by_id(repo.locked[0]),
422 h.format_date(h.time_to_datetime(repo.locked[1]))),
432 h.format_date(h.time_to_datetime(repo.locked[1]))),
423 'warning')
433 'warning')
424 return redirect(h.url('files_home',
434 return redirect(h.url('files_home',
425 repo_name=repo_name, revision='tip'))
435 repo_name=repo_name, revision='tip'))
426
436
427 if not self._is_valid_head(commit_id, repo.scm_instance()):
437 if not self._is_valid_head(commit_id, repo.scm_instance()):
428 h.flash(_('You can only delete files with revision '
438 h.flash(_('You can only delete files with revision '
429 'being a valid branch '), category='warning')
439 'being a valid branch '), category='warning')
430 return redirect(h.url('files_home',
440 return redirect(h.url('files_home',
431 repo_name=repo_name, revision='tip',
441 repo_name=repo_name, revision='tip',
432 f_path=f_path))
442 f_path=f_path))
433
443
434 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
444 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
435 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
445 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
436
446
437 c.default_message = _(
447 c.default_message = _(
438 'Deleted file %s via RhodeCode Enterprise') % (f_path)
448 'Deleted file %s via RhodeCode Enterprise') % (f_path)
439 c.f_path = f_path
449 c.f_path = f_path
440 node_path = f_path
450 node_path = f_path
441 author = c.rhodecode_user.full_contact
451 author = c.rhodecode_user.full_contact
442 message = request.POST.get('message') or c.default_message
452 message = request.POST.get('message') or c.default_message
443 try:
453 try:
444 nodes = {
454 nodes = {
445 node_path: {
455 node_path: {
446 'content': ''
456 'content': ''
447 }
457 }
448 }
458 }
449 self.scm_model.delete_nodes(
459 self.scm_model.delete_nodes(
450 user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
460 user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
451 message=message,
461 message=message,
452 nodes=nodes,
462 nodes=nodes,
453 parent_commit=c.commit,
463 parent_commit=c.commit,
454 author=author,
464 author=author,
455 )
465 )
456
466
457 h.flash(_('Successfully deleted file %s') % f_path,
467 h.flash(_('Successfully deleted file %s') % f_path,
458 category='success')
468 category='success')
459 except Exception:
469 except Exception:
460 msg = _('Error occurred during commit')
470 msg = _('Error occurred during commit')
461 log.exception(msg)
471 log.exception(msg)
462 h.flash(msg, category='error')
472 h.flash(msg, category='error')
463 return redirect(url('changeset_home',
473 return redirect(url('changeset_home',
464 repo_name=c.repo_name, revision='tip'))
474 repo_name=c.repo_name, revision='tip'))
465
475
466 @LoginRequired()
476 @LoginRequired()
467 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
477 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
468 def delete_home(self, repo_name, revision, f_path):
478 def delete_home(self, repo_name, revision, f_path):
469 commit_id = revision
479 commit_id = revision
470
480
471 repo = c.rhodecode_db_repo
481 repo = c.rhodecode_db_repo
472 if repo.enable_locking and repo.locked[0]:
482 if repo.enable_locking and repo.locked[0]:
473 h.flash(_('This repository has been locked by %s on %s')
483 h.flash(_('This repository has been locked by %s on %s')
474 % (h.person_by_id(repo.locked[0]),
484 % (h.person_by_id(repo.locked[0]),
475 h.format_date(h.time_to_datetime(repo.locked[1]))),
485 h.format_date(h.time_to_datetime(repo.locked[1]))),
476 'warning')
486 'warning')
477 return redirect(h.url('files_home',
487 return redirect(h.url('files_home',
478 repo_name=repo_name, revision='tip'))
488 repo_name=repo_name, revision='tip'))
479
489
480 if not self._is_valid_head(commit_id, repo.scm_instance()):
490 if not self._is_valid_head(commit_id, repo.scm_instance()):
481 h.flash(_('You can only delete files with revision '
491 h.flash(_('You can only delete files with revision '
482 'being a valid branch '), category='warning')
492 'being a valid branch '), category='warning')
483 return redirect(h.url('files_home',
493 return redirect(h.url('files_home',
484 repo_name=repo_name, revision='tip',
494 repo_name=repo_name, revision='tip',
485 f_path=f_path))
495 f_path=f_path))
486
496
487 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
497 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
488 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
498 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
489
499
490 c.default_message = _(
500 c.default_message = _(
491 'Deleted file %s via RhodeCode Enterprise') % (f_path)
501 'Deleted file %s via RhodeCode Enterprise') % (f_path)
492 c.f_path = f_path
502 c.f_path = f_path
493
503
494 return render('files/files_delete.mako')
504 return render('files/files_delete.mako')
495
505
496 @CSRFRequired()
506 @CSRFRequired()
497 @LoginRequired()
507 @LoginRequired()
498 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
508 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
499 def edit(self, repo_name, revision, f_path):
509 def edit(self, repo_name, revision, f_path):
500 commit_id = revision
510 commit_id = revision
501
511
502 repo = c.rhodecode_db_repo
512 repo = c.rhodecode_db_repo
503 if repo.enable_locking and repo.locked[0]:
513 if repo.enable_locking and repo.locked[0]:
504 h.flash(_('This repository has been locked by %s on %s')
514 h.flash(_('This repository has been locked by %s on %s')
505 % (h.person_by_id(repo.locked[0]),
515 % (h.person_by_id(repo.locked[0]),
506 h.format_date(h.time_to_datetime(repo.locked[1]))),
516 h.format_date(h.time_to_datetime(repo.locked[1]))),
507 'warning')
517 'warning')
508 return redirect(h.url('files_home',
518 return redirect(h.url('files_home',
509 repo_name=repo_name, revision='tip'))
519 repo_name=repo_name, revision='tip'))
510
520
511 if not self._is_valid_head(commit_id, repo.scm_instance()):
521 if not self._is_valid_head(commit_id, repo.scm_instance()):
512 h.flash(_('You can only edit files with revision '
522 h.flash(_('You can only edit files with revision '
513 'being a valid branch '), category='warning')
523 'being a valid branch '), category='warning')
514 return redirect(h.url('files_home',
524 return redirect(h.url('files_home',
515 repo_name=repo_name, revision='tip',
525 repo_name=repo_name, revision='tip',
516 f_path=f_path))
526 f_path=f_path))
517
527
518 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
528 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
519 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
529 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
520
530
521 if c.file.is_binary:
531 if c.file.is_binary:
522 return redirect(url('files_home', repo_name=c.repo_name,
532 return redirect(url('files_home', repo_name=c.repo_name,
523 revision=c.commit.raw_id, f_path=f_path))
533 revision=c.commit.raw_id, f_path=f_path))
524 c.default_message = _(
534 c.default_message = _(
525 'Edited file %s via RhodeCode Enterprise') % (f_path)
535 'Edited file %s via RhodeCode Enterprise') % (f_path)
526 c.f_path = f_path
536 c.f_path = f_path
527 old_content = c.file.content
537 old_content = c.file.content
528 sl = old_content.splitlines(1)
538 sl = old_content.splitlines(1)
529 first_line = sl[0] if sl else ''
539 first_line = sl[0] if sl else ''
530
540
531 # modes: 0 - Unix, 1 - Mac, 2 - DOS
541 # modes: 0 - Unix, 1 - Mac, 2 - DOS
532 mode = detect_mode(first_line, 0)
542 mode = detect_mode(first_line, 0)
533 content = convert_line_endings(request.POST.get('content', ''), mode)
543 content = convert_line_endings(request.POST.get('content', ''), mode)
534
544
535 message = request.POST.get('message') or c.default_message
545 message = request.POST.get('message') or c.default_message
536 org_f_path = c.file.unicode_path
546 org_f_path = c.file.unicode_path
537 filename = request.POST['filename']
547 filename = request.POST['filename']
538 org_filename = c.file.name
548 org_filename = c.file.name
539
549
540 if content == old_content and filename == org_filename:
550 if content == old_content and filename == org_filename:
541 h.flash(_('No changes'), category='warning')
551 h.flash(_('No changes'), category='warning')
542 return redirect(url('changeset_home', repo_name=c.repo_name,
552 return redirect(url('changeset_home', repo_name=c.repo_name,
543 revision='tip'))
553 revision='tip'))
544 try:
554 try:
545 mapping = {
555 mapping = {
546 org_f_path: {
556 org_f_path: {
547 'org_filename': org_f_path,
557 'org_filename': org_f_path,
548 'filename': os.path.join(c.file.dir_path, filename),
558 'filename': os.path.join(c.file.dir_path, filename),
549 'content': content,
559 'content': content,
550 'lexer': '',
560 'lexer': '',
551 'op': 'mod',
561 'op': 'mod',
552 }
562 }
553 }
563 }
554
564
555 ScmModel().update_nodes(
565 ScmModel().update_nodes(
556 user=c.rhodecode_user.user_id,
566 user=c.rhodecode_user.user_id,
557 repo=c.rhodecode_db_repo,
567 repo=c.rhodecode_db_repo,
558 message=message,
568 message=message,
559 nodes=mapping,
569 nodes=mapping,
560 parent_commit=c.commit,
570 parent_commit=c.commit,
561 )
571 )
562
572
563 h.flash(_('Successfully committed to %s') % f_path,
573 h.flash(_('Successfully committed to %s') % f_path,
564 category='success')
574 category='success')
565 except Exception:
575 except Exception:
566 msg = _('Error occurred during commit')
576 msg = _('Error occurred during commit')
567 log.exception(msg)
577 log.exception(msg)
568 h.flash(msg, category='error')
578 h.flash(msg, category='error')
569 return redirect(url('changeset_home',
579 return redirect(url('changeset_home',
570 repo_name=c.repo_name, revision='tip'))
580 repo_name=c.repo_name, revision='tip'))
571
581
572 @LoginRequired()
582 @LoginRequired()
573 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
583 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
574 def edit_home(self, repo_name, revision, f_path):
584 def edit_home(self, repo_name, revision, f_path):
575 commit_id = revision
585 commit_id = revision
576
586
577 repo = c.rhodecode_db_repo
587 repo = c.rhodecode_db_repo
578 if repo.enable_locking and repo.locked[0]:
588 if repo.enable_locking and repo.locked[0]:
579 h.flash(_('This repository has been locked by %s on %s')
589 h.flash(_('This repository has been locked by %s on %s')
580 % (h.person_by_id(repo.locked[0]),
590 % (h.person_by_id(repo.locked[0]),
581 h.format_date(h.time_to_datetime(repo.locked[1]))),
591 h.format_date(h.time_to_datetime(repo.locked[1]))),
582 'warning')
592 'warning')
583 return redirect(h.url('files_home',
593 return redirect(h.url('files_home',
584 repo_name=repo_name, revision='tip'))
594 repo_name=repo_name, revision='tip'))
585
595
586 if not self._is_valid_head(commit_id, repo.scm_instance()):
596 if not self._is_valid_head(commit_id, repo.scm_instance()):
587 h.flash(_('You can only edit files with revision '
597 h.flash(_('You can only edit files with revision '
588 'being a valid branch '), category='warning')
598 'being a valid branch '), category='warning')
589 return redirect(h.url('files_home',
599 return redirect(h.url('files_home',
590 repo_name=repo_name, revision='tip',
600 repo_name=repo_name, revision='tip',
591 f_path=f_path))
601 f_path=f_path))
592
602
593 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
603 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
594 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
604 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
595
605
596 if c.file.is_binary:
606 if c.file.is_binary:
597 return redirect(url('files_home', repo_name=c.repo_name,
607 return redirect(url('files_home', repo_name=c.repo_name,
598 revision=c.commit.raw_id, f_path=f_path))
608 revision=c.commit.raw_id, f_path=f_path))
599 c.default_message = _(
609 c.default_message = _(
600 'Edited file %s via RhodeCode Enterprise') % (f_path)
610 'Edited file %s via RhodeCode Enterprise') % (f_path)
601 c.f_path = f_path
611 c.f_path = f_path
602
612
603 return render('files/files_edit.mako')
613 return render('files/files_edit.mako')
604
614
605 def _is_valid_head(self, commit_id, repo):
615 def _is_valid_head(self, commit_id, repo):
606 # check if commit is a branch identifier- basically we cannot
616 # check if commit is a branch identifier- basically we cannot
607 # create multiple heads via file editing
617 # create multiple heads via file editing
608 valid_heads = repo.branches.keys() + repo.branches.values()
618 valid_heads = repo.branches.keys() + repo.branches.values()
609
619
610 if h.is_svn(repo) and not repo.is_empty():
620 if h.is_svn(repo) and not repo.is_empty():
611 # Note: Subversion only has one head, we add it here in case there
621 # Note: Subversion only has one head, we add it here in case there
612 # is no branch matched.
622 # is no branch matched.
613 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
623 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
614
624
615 # check if commit is a branch name or branch hash
625 # check if commit is a branch name or branch hash
616 return commit_id in valid_heads
626 return commit_id in valid_heads
617
627
618 @CSRFRequired()
628 @CSRFRequired()
619 @LoginRequired()
629 @LoginRequired()
620 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
630 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
621 def add(self, repo_name, revision, f_path):
631 def add(self, repo_name, revision, f_path):
622 repo = Repository.get_by_repo_name(repo_name)
632 repo = Repository.get_by_repo_name(repo_name)
623 if repo.enable_locking and repo.locked[0]:
633 if repo.enable_locking and repo.locked[0]:
624 h.flash(_('This repository has been locked by %s on %s')
634 h.flash(_('This repository has been locked by %s on %s')
625 % (h.person_by_id(repo.locked[0]),
635 % (h.person_by_id(repo.locked[0]),
626 h.format_date(h.time_to_datetime(repo.locked[1]))),
636 h.format_date(h.time_to_datetime(repo.locked[1]))),
627 'warning')
637 'warning')
628 return redirect(h.url('files_home',
638 return redirect(h.url('files_home',
629 repo_name=repo_name, revision='tip'))
639 repo_name=repo_name, revision='tip'))
630
640
631 r_post = request.POST
641 r_post = request.POST
632
642
633 c.commit = self.__get_commit_or_redirect(
643 c.commit = self.__get_commit_or_redirect(
634 revision, repo_name, redirect_after=False)
644 revision, repo_name, redirect_after=False)
635 if c.commit is None:
645 if c.commit is None:
636 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
646 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
637 c.default_message = (_('Added file via RhodeCode Enterprise'))
647 c.default_message = (_('Added file via RhodeCode Enterprise'))
638 c.f_path = f_path
648 c.f_path = f_path
639 unix_mode = 0
649 unix_mode = 0
640 content = convert_line_endings(r_post.get('content', ''), unix_mode)
650 content = convert_line_endings(r_post.get('content', ''), unix_mode)
641
651
642 message = r_post.get('message') or c.default_message
652 message = r_post.get('message') or c.default_message
643 filename = r_post.get('filename')
653 filename = r_post.get('filename')
644 location = r_post.get('location', '') # dir location
654 location = r_post.get('location', '') # dir location
645 file_obj = r_post.get('upload_file', None)
655 file_obj = r_post.get('upload_file', None)
646
656
647 if file_obj is not None and hasattr(file_obj, 'filename'):
657 if file_obj is not None and hasattr(file_obj, 'filename'):
648 filename = file_obj.filename
658 filename = file_obj.filename
649 content = file_obj.file
659 content = file_obj.file
650
660
651 if hasattr(content, 'file'):
661 if hasattr(content, 'file'):
652 # non posix systems store real file under file attr
662 # non posix systems store real file under file attr
653 content = content.file
663 content = content.file
654
664
655 # If there's no commit, redirect to repo summary
665 # If there's no commit, redirect to repo summary
656 if type(c.commit) is EmptyCommit:
666 if type(c.commit) is EmptyCommit:
657 redirect_url = "summary_home"
667 redirect_url = "summary_home"
658 else:
668 else:
659 redirect_url = "changeset_home"
669 redirect_url = "changeset_home"
660
670
661 if not filename:
671 if not filename:
662 h.flash(_('No filename'), category='warning')
672 h.flash(_('No filename'), category='warning')
663 return redirect(url(redirect_url, repo_name=c.repo_name,
673 return redirect(url(redirect_url, repo_name=c.repo_name,
664 revision='tip'))
674 revision='tip'))
665
675
666 # extract the location from filename,
676 # extract the location from filename,
667 # allows using foo/bar.txt syntax to create subdirectories
677 # allows using foo/bar.txt syntax to create subdirectories
668 subdir_loc = filename.rsplit('/', 1)
678 subdir_loc = filename.rsplit('/', 1)
669 if len(subdir_loc) == 2:
679 if len(subdir_loc) == 2:
670 location = os.path.join(location, subdir_loc[0])
680 location = os.path.join(location, subdir_loc[0])
671
681
672 # strip all crap out of file, just leave the basename
682 # strip all crap out of file, just leave the basename
673 filename = os.path.basename(filename)
683 filename = os.path.basename(filename)
674 node_path = os.path.join(location, filename)
684 node_path = os.path.join(location, filename)
675 author = c.rhodecode_user.full_contact
685 author = c.rhodecode_user.full_contact
676
686
677 try:
687 try:
678 nodes = {
688 nodes = {
679 node_path: {
689 node_path: {
680 'content': content
690 'content': content
681 }
691 }
682 }
692 }
683 self.scm_model.create_nodes(
693 self.scm_model.create_nodes(
684 user=c.rhodecode_user.user_id,
694 user=c.rhodecode_user.user_id,
685 repo=c.rhodecode_db_repo,
695 repo=c.rhodecode_db_repo,
686 message=message,
696 message=message,
687 nodes=nodes,
697 nodes=nodes,
688 parent_commit=c.commit,
698 parent_commit=c.commit,
689 author=author,
699 author=author,
690 )
700 )
691
701
692 h.flash(_('Successfully committed to %s') % node_path,
702 h.flash(_('Successfully committed to %s') % node_path,
693 category='success')
703 category='success')
694 except NonRelativePathError as e:
704 except NonRelativePathError as e:
695 h.flash(_(
705 h.flash(_(
696 'The location specified must be a relative path and must not '
706 'The location specified must be a relative path and must not '
697 'contain .. in the path'), category='warning')
707 'contain .. in the path'), category='warning')
698 return redirect(url('changeset_home', repo_name=c.repo_name,
708 return redirect(url('changeset_home', repo_name=c.repo_name,
699 revision='tip'))
709 revision='tip'))
700 except (NodeError, NodeAlreadyExistsError) as e:
710 except (NodeError, NodeAlreadyExistsError) as e:
701 h.flash(_(e), category='error')
711 h.flash(_(e), category='error')
702 except Exception:
712 except Exception:
703 msg = _('Error occurred during commit')
713 msg = _('Error occurred during commit')
704 log.exception(msg)
714 log.exception(msg)
705 h.flash(msg, category='error')
715 h.flash(msg, category='error')
706 return redirect(url('changeset_home',
716 return redirect(url('changeset_home',
707 repo_name=c.repo_name, revision='tip'))
717 repo_name=c.repo_name, revision='tip'))
708
718
709 @LoginRequired()
719 @LoginRequired()
710 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
720 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
711 def add_home(self, repo_name, revision, f_path):
721 def add_home(self, repo_name, revision, f_path):
712
722
713 repo = Repository.get_by_repo_name(repo_name)
723 repo = Repository.get_by_repo_name(repo_name)
714 if repo.enable_locking and repo.locked[0]:
724 if repo.enable_locking and repo.locked[0]:
715 h.flash(_('This repository has been locked by %s on %s')
725 h.flash(_('This repository has been locked by %s on %s')
716 % (h.person_by_id(repo.locked[0]),
726 % (h.person_by_id(repo.locked[0]),
717 h.format_date(h.time_to_datetime(repo.locked[1]))),
727 h.format_date(h.time_to_datetime(repo.locked[1]))),
718 'warning')
728 'warning')
719 return redirect(h.url('files_home',
729 return redirect(h.url('files_home',
720 repo_name=repo_name, revision='tip'))
730 repo_name=repo_name, revision='tip'))
721
731
722 c.commit = self.__get_commit_or_redirect(
732 c.commit = self.__get_commit_or_redirect(
723 revision, repo_name, redirect_after=False)
733 revision, repo_name, redirect_after=False)
724 if c.commit is None:
734 if c.commit is None:
725 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
735 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
726 c.default_message = (_('Added file via RhodeCode Enterprise'))
736 c.default_message = (_('Added file via RhodeCode Enterprise'))
727 c.f_path = f_path
737 c.f_path = f_path
728
738
729 return render('files/files_add.mako')
739 return render('files/files_add.mako')
730
740
731 @LoginRequired()
741 @LoginRequired()
732 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
742 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
733 'repository.admin')
743 'repository.admin')
734 def archivefile(self, repo_name, fname):
744 def archivefile(self, repo_name, fname):
735 fileformat = None
745 fileformat = None
736 commit_id = None
746 commit_id = None
737 ext = None
747 ext = None
738 subrepos = request.GET.get('subrepos') == 'true'
748 subrepos = request.GET.get('subrepos') == 'true'
739
749
740 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
750 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
741 archive_spec = fname.split(ext_data[1])
751 archive_spec = fname.split(ext_data[1])
742 if len(archive_spec) == 2 and archive_spec[1] == '':
752 if len(archive_spec) == 2 and archive_spec[1] == '':
743 fileformat = a_type or ext_data[1]
753 fileformat = a_type or ext_data[1]
744 commit_id = archive_spec[0]
754 commit_id = archive_spec[0]
745 ext = ext_data[1]
755 ext = ext_data[1]
746
756
747 dbrepo = RepoModel().get_by_repo_name(repo_name)
757 dbrepo = RepoModel().get_by_repo_name(repo_name)
748 if not dbrepo.enable_downloads:
758 if not dbrepo.enable_downloads:
749 return _('Downloads disabled')
759 return _('Downloads disabled')
750
760
751 try:
761 try:
752 commit = c.rhodecode_repo.get_commit(commit_id)
762 commit = c.rhodecode_repo.get_commit(commit_id)
753 content_type = settings.ARCHIVE_SPECS[fileformat][0]
763 content_type = settings.ARCHIVE_SPECS[fileformat][0]
754 except CommitDoesNotExistError:
764 except CommitDoesNotExistError:
755 return _('Unknown revision %s') % commit_id
765 return _('Unknown revision %s') % commit_id
756 except EmptyRepositoryError:
766 except EmptyRepositoryError:
757 return _('Empty repository')
767 return _('Empty repository')
758 except KeyError:
768 except KeyError:
759 return _('Unknown archive type')
769 return _('Unknown archive type')
760
770
761 # archive cache
771 # archive cache
762 from rhodecode import CONFIG
772 from rhodecode import CONFIG
763
773
764 archive_name = '%s-%s%s%s' % (
774 archive_name = '%s-%s%s%s' % (
765 safe_str(repo_name.replace('/', '_')),
775 safe_str(repo_name.replace('/', '_')),
766 '-sub' if subrepos else '',
776 '-sub' if subrepos else '',
767 safe_str(commit.short_id), ext)
777 safe_str(commit.short_id), ext)
768
778
769 use_cached_archive = False
779 use_cached_archive = False
770 archive_cache_enabled = CONFIG.get(
780 archive_cache_enabled = CONFIG.get(
771 'archive_cache_dir') and not request.GET.get('no_cache')
781 'archive_cache_dir') and not request.GET.get('no_cache')
772
782
773 if archive_cache_enabled:
783 if archive_cache_enabled:
774 # check if we it's ok to write
784 # check if we it's ok to write
775 if not os.path.isdir(CONFIG['archive_cache_dir']):
785 if not os.path.isdir(CONFIG['archive_cache_dir']):
776 os.makedirs(CONFIG['archive_cache_dir'])
786 os.makedirs(CONFIG['archive_cache_dir'])
777 cached_archive_path = os.path.join(
787 cached_archive_path = os.path.join(
778 CONFIG['archive_cache_dir'], archive_name)
788 CONFIG['archive_cache_dir'], archive_name)
779 if os.path.isfile(cached_archive_path):
789 if os.path.isfile(cached_archive_path):
780 log.debug('Found cached archive in %s', cached_archive_path)
790 log.debug('Found cached archive in %s', cached_archive_path)
781 fd, archive = None, cached_archive_path
791 fd, archive = None, cached_archive_path
782 use_cached_archive = True
792 use_cached_archive = True
783 else:
793 else:
784 log.debug('Archive %s is not yet cached', archive_name)
794 log.debug('Archive %s is not yet cached', archive_name)
785
795
786 if not use_cached_archive:
796 if not use_cached_archive:
787 # generate new archive
797 # generate new archive
788 fd, archive = tempfile.mkstemp()
798 fd, archive = tempfile.mkstemp()
789 log.debug('Creating new temp archive in %s' % (archive,))
799 log.debug('Creating new temp archive in %s' % (archive,))
790 try:
800 try:
791 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
801 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
792 except ImproperArchiveTypeError:
802 except ImproperArchiveTypeError:
793 return _('Unknown archive type')
803 return _('Unknown archive type')
794 if archive_cache_enabled:
804 if archive_cache_enabled:
795 # if we generated the archive and we have cache enabled
805 # if we generated the archive and we have cache enabled
796 # let's use this for future
806 # let's use this for future
797 log.debug('Storing new archive in %s' % (cached_archive_path,))
807 log.debug('Storing new archive in %s' % (cached_archive_path,))
798 shutil.move(archive, cached_archive_path)
808 shutil.move(archive, cached_archive_path)
799 archive = cached_archive_path
809 archive = cached_archive_path
800
810
801 def get_chunked_archive(archive):
811 def get_chunked_archive(archive):
802 with open(archive, 'rb') as stream:
812 with open(archive, 'rb') as stream:
803 while True:
813 while True:
804 data = stream.read(16 * 1024)
814 data = stream.read(16 * 1024)
805 if not data:
815 if not data:
806 if fd: # fd means we used temporary file
816 if fd: # fd means we used temporary file
807 os.close(fd)
817 os.close(fd)
808 if not archive_cache_enabled:
818 if not archive_cache_enabled:
809 log.debug('Destroying temp archive %s', archive)
819 log.debug('Destroying temp archive %s', archive)
810 os.remove(archive)
820 os.remove(archive)
811 break
821 break
812 yield data
822 yield data
813
823
814 # store download action
824 # store download action
815 action_logger(user=c.rhodecode_user,
825 action_logger(user=c.rhodecode_user,
816 action='user_downloaded_archive:%s' % archive_name,
826 action='user_downloaded_archive:%s' % archive_name,
817 repo=repo_name, ipaddr=self.ip_addr, commit=True)
827 repo=repo_name, ipaddr=self.ip_addr, commit=True)
818 response.content_disposition = str(
828 response.content_disposition = str(
819 'attachment; filename=%s' % archive_name)
829 'attachment; filename=%s' % archive_name)
820 response.content_type = str(content_type)
830 response.content_type = str(content_type)
821
831
822 return get_chunked_archive(archive)
832 return get_chunked_archive(archive)
823
833
824 @LoginRequired()
834 @LoginRequired()
825 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
835 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
826 'repository.admin')
836 'repository.admin')
827 def diff(self, repo_name, f_path):
837 def diff(self, repo_name, f_path):
828
838
829 c.action = request.GET.get('diff')
839 c.action = request.GET.get('diff')
830 diff1 = request.GET.get('diff1', '')
840 diff1 = request.GET.get('diff1', '')
831 diff2 = request.GET.get('diff2', '')
841 diff2 = request.GET.get('diff2', '')
832
842
833 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
843 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
834
844
835 ignore_whitespace = str2bool(request.GET.get('ignorews'))
845 ignore_whitespace = str2bool(request.GET.get('ignorews'))
836 line_context = request.GET.get('context', 3)
846 line_context = request.GET.get('context', 3)
837
847
838 if not any((diff1, diff2)):
848 if not any((diff1, diff2)):
839 h.flash(
849 h.flash(
840 'Need query parameter "diff1" or "diff2" to generate a diff.',
850 'Need query parameter "diff1" or "diff2" to generate a diff.',
841 category='error')
851 category='error')
842 raise HTTPBadRequest()
852 raise HTTPBadRequest()
843
853
844 if c.action not in ['download', 'raw']:
854 if c.action not in ['download', 'raw']:
845 # redirect to new view if we render diff
855 # redirect to new view if we render diff
846 return redirect(
856 return redirect(
847 url('compare_url', repo_name=repo_name,
857 url('compare_url', repo_name=repo_name,
848 source_ref_type='rev',
858 source_ref_type='rev',
849 source_ref=diff1,
859 source_ref=diff1,
850 target_repo=c.repo_name,
860 target_repo=c.repo_name,
851 target_ref_type='rev',
861 target_ref_type='rev',
852 target_ref=diff2,
862 target_ref=diff2,
853 f_path=f_path))
863 f_path=f_path))
854
864
855 try:
865 try:
856 node1 = self._get_file_node(diff1, path1)
866 node1 = self._get_file_node(diff1, path1)
857 node2 = self._get_file_node(diff2, f_path)
867 node2 = self._get_file_node(diff2, f_path)
858 except (RepositoryError, NodeError):
868 except (RepositoryError, NodeError):
859 log.exception("Exception while trying to get node from repository")
869 log.exception("Exception while trying to get node from repository")
860 return redirect(url(
870 return redirect(url(
861 'files_home', repo_name=c.repo_name, f_path=f_path))
871 'files_home', repo_name=c.repo_name, f_path=f_path))
862
872
863 if all(isinstance(node.commit, EmptyCommit)
873 if all(isinstance(node.commit, EmptyCommit)
864 for node in (node1, node2)):
874 for node in (node1, node2)):
865 raise HTTPNotFound
875 raise HTTPNotFound
866
876
867 c.commit_1 = node1.commit
877 c.commit_1 = node1.commit
868 c.commit_2 = node2.commit
878 c.commit_2 = node2.commit
869
879
870 if c.action == 'download':
880 if c.action == 'download':
871 _diff = diffs.get_gitdiff(node1, node2,
881 _diff = diffs.get_gitdiff(node1, node2,
872 ignore_whitespace=ignore_whitespace,
882 ignore_whitespace=ignore_whitespace,
873 context=line_context)
883 context=line_context)
874 diff = diffs.DiffProcessor(_diff, format='gitdiff')
884 diff = diffs.DiffProcessor(_diff, format='gitdiff')
875
885
876 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
886 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
877 response.content_type = 'text/plain'
887 response.content_type = 'text/plain'
878 response.content_disposition = (
888 response.content_disposition = (
879 'attachment; filename=%s' % (diff_name,)
889 'attachment; filename=%s' % (diff_name,)
880 )
890 )
881 charset = self._get_default_encoding()
891 charset = self._get_default_encoding()
882 if charset:
892 if charset:
883 response.charset = charset
893 response.charset = charset
884 return diff.as_raw()
894 return diff.as_raw()
885
895
886 elif c.action == 'raw':
896 elif c.action == 'raw':
887 _diff = diffs.get_gitdiff(node1, node2,
897 _diff = diffs.get_gitdiff(node1, node2,
888 ignore_whitespace=ignore_whitespace,
898 ignore_whitespace=ignore_whitespace,
889 context=line_context)
899 context=line_context)
890 diff = diffs.DiffProcessor(_diff, format='gitdiff')
900 diff = diffs.DiffProcessor(_diff, format='gitdiff')
891 response.content_type = 'text/plain'
901 response.content_type = 'text/plain'
892 charset = self._get_default_encoding()
902 charset = self._get_default_encoding()
893 if charset:
903 if charset:
894 response.charset = charset
904 response.charset = charset
895 return diff.as_raw()
905 return diff.as_raw()
896
906
897 else:
907 else:
898 return redirect(
908 return redirect(
899 url('compare_url', repo_name=repo_name,
909 url('compare_url', repo_name=repo_name,
900 source_ref_type='rev',
910 source_ref_type='rev',
901 source_ref=diff1,
911 source_ref=diff1,
902 target_repo=c.repo_name,
912 target_repo=c.repo_name,
903 target_ref_type='rev',
913 target_ref_type='rev',
904 target_ref=diff2,
914 target_ref=diff2,
905 f_path=f_path))
915 f_path=f_path))
906
916
907 @LoginRequired()
917 @LoginRequired()
908 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
918 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
909 'repository.admin')
919 'repository.admin')
910 def diff_2way(self, repo_name, f_path):
920 def diff_2way(self, repo_name, f_path):
911 """
921 """
912 Kept only to make OLD links work
922 Kept only to make OLD links work
913 """
923 """
914 diff1 = request.GET.get('diff1', '')
924 diff1 = request.GET.get('diff1', '')
915 diff2 = request.GET.get('diff2', '')
925 diff2 = request.GET.get('diff2', '')
916
926
917 if not any((diff1, diff2)):
927 if not any((diff1, diff2)):
918 h.flash(
928 h.flash(
919 'Need query parameter "diff1" or "diff2" to generate a diff.',
929 'Need query parameter "diff1" or "diff2" to generate a diff.',
920 category='error')
930 category='error')
921 raise HTTPBadRequest()
931 raise HTTPBadRequest()
922
932
923 return redirect(
933 return redirect(
924 url('compare_url', repo_name=repo_name,
934 url('compare_url', repo_name=repo_name,
925 source_ref_type='rev',
935 source_ref_type='rev',
926 source_ref=diff1,
936 source_ref=diff1,
927 target_repo=c.repo_name,
937 target_repo=c.repo_name,
928 target_ref_type='rev',
938 target_ref_type='rev',
929 target_ref=diff2,
939 target_ref=diff2,
930 f_path=f_path,
940 f_path=f_path,
931 diffmode='sideside'))
941 diffmode='sideside'))
932
942
933 def _get_file_node(self, commit_id, f_path):
943 def _get_file_node(self, commit_id, f_path):
934 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
944 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
935 commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
945 commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
936 try:
946 try:
937 node = commit.get_node(f_path)
947 node = commit.get_node(f_path)
938 if node.is_dir():
948 if node.is_dir():
939 raise NodeError('%s path is a %s not a file'
949 raise NodeError('%s path is a %s not a file'
940 % (node, type(node)))
950 % (node, type(node)))
941 except NodeDoesNotExistError:
951 except NodeDoesNotExistError:
942 commit = EmptyCommit(
952 commit = EmptyCommit(
943 commit_id=commit_id,
953 commit_id=commit_id,
944 idx=commit.idx,
954 idx=commit.idx,
945 repo=commit.repository,
955 repo=commit.repository,
946 alias=commit.repository.alias,
956 alias=commit.repository.alias,
947 message=commit.message,
957 message=commit.message,
948 author=commit.author,
958 author=commit.author,
949 date=commit.date)
959 date=commit.date)
950 node = FileNode(f_path, '', commit=commit)
960 node = FileNode(f_path, '', commit=commit)
951 else:
961 else:
952 commit = EmptyCommit(
962 commit = EmptyCommit(
953 repo=c.rhodecode_repo,
963 repo=c.rhodecode_repo,
954 alias=c.rhodecode_repo.alias)
964 alias=c.rhodecode_repo.alias)
955 node = FileNode(f_path, '', commit=commit)
965 node = FileNode(f_path, '', commit=commit)
956 return node
966 return node
957
967
958 def _get_node_history(self, commit, f_path, commits=None):
968 def _get_node_history(self, commit, f_path, commits=None):
959 """
969 """
960 get commit history for given node
970 get commit history for given node
961
971
962 :param commit: commit to calculate history
972 :param commit: commit to calculate history
963 :param f_path: path for node to calculate history for
973 :param f_path: path for node to calculate history for
964 :param commits: if passed don't calculate history and take
974 :param commits: if passed don't calculate history and take
965 commits defined in this list
975 commits defined in this list
966 """
976 """
967 # calculate history based on tip
977 # calculate history based on tip
968 tip = c.rhodecode_repo.get_commit()
978 tip = c.rhodecode_repo.get_commit()
969 if commits is None:
979 if commits is None:
970 pre_load = ["author", "branch"]
980 pre_load = ["author", "branch"]
971 try:
981 try:
972 commits = tip.get_file_history(f_path, pre_load=pre_load)
982 commits = tip.get_file_history(f_path, pre_load=pre_load)
973 except (NodeDoesNotExistError, CommitError):
983 except (NodeDoesNotExistError, CommitError):
974 # this node is not present at tip!
984 # this node is not present at tip!
975 commits = commit.get_file_history(f_path, pre_load=pre_load)
985 commits = commit.get_file_history(f_path, pre_load=pre_load)
976
986
977 history = []
987 history = []
978 commits_group = ([], _("Changesets"))
988 commits_group = ([], _("Changesets"))
979 for commit in commits:
989 for commit in commits:
980 branch = ' (%s)' % commit.branch if commit.branch else ''
990 branch = ' (%s)' % commit.branch if commit.branch else ''
981 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
991 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
982 commits_group[0].append((commit.raw_id, n_desc,))
992 commits_group[0].append((commit.raw_id, n_desc,))
983 history.append(commits_group)
993 history.append(commits_group)
984
994
985 symbolic_reference = self._symbolic_reference
995 symbolic_reference = self._symbolic_reference
986
996
987 if c.rhodecode_repo.alias == 'svn':
997 if c.rhodecode_repo.alias == 'svn':
988 adjusted_f_path = self._adjust_file_path_for_svn(
998 adjusted_f_path = self._adjust_file_path_for_svn(
989 f_path, c.rhodecode_repo)
999 f_path, c.rhodecode_repo)
990 if adjusted_f_path != f_path:
1000 if adjusted_f_path != f_path:
991 log.debug(
1001 log.debug(
992 'Recognized svn tag or branch in file "%s", using svn '
1002 'Recognized svn tag or branch in file "%s", using svn '
993 'specific symbolic references', f_path)
1003 'specific symbolic references', f_path)
994 f_path = adjusted_f_path
1004 f_path = adjusted_f_path
995 symbolic_reference = self._symbolic_reference_svn
1005 symbolic_reference = self._symbolic_reference_svn
996
1006
997 branches = self._create_references(
1007 branches = self._create_references(
998 c.rhodecode_repo.branches, symbolic_reference, f_path)
1008 c.rhodecode_repo.branches, symbolic_reference, f_path)
999 branches_group = (branches, _("Branches"))
1009 branches_group = (branches, _("Branches"))
1000
1010
1001 tags = self._create_references(
1011 tags = self._create_references(
1002 c.rhodecode_repo.tags, symbolic_reference, f_path)
1012 c.rhodecode_repo.tags, symbolic_reference, f_path)
1003 tags_group = (tags, _("Tags"))
1013 tags_group = (tags, _("Tags"))
1004
1014
1005 history.append(branches_group)
1015 history.append(branches_group)
1006 history.append(tags_group)
1016 history.append(tags_group)
1007
1017
1008 return history, commits
1018 return history, commits
1009
1019
1010 def _adjust_file_path_for_svn(self, f_path, repo):
1020 def _adjust_file_path_for_svn(self, f_path, repo):
1011 """
1021 """
1012 Computes the relative path of `f_path`.
1022 Computes the relative path of `f_path`.
1013
1023
1014 This is mainly based on prefix matching of the recognized tags and
1024 This is mainly based on prefix matching of the recognized tags and
1015 branches in the underlying repository.
1025 branches in the underlying repository.
1016 """
1026 """
1017 tags_and_branches = itertools.chain(
1027 tags_and_branches = itertools.chain(
1018 repo.branches.iterkeys(),
1028 repo.branches.iterkeys(),
1019 repo.tags.iterkeys())
1029 repo.tags.iterkeys())
1020 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
1030 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
1021
1031
1022 for name in tags_and_branches:
1032 for name in tags_and_branches:
1023 if f_path.startswith(name + '/'):
1033 if f_path.startswith(name + '/'):
1024 f_path = vcspath.relpath(f_path, name)
1034 f_path = vcspath.relpath(f_path, name)
1025 break
1035 break
1026 return f_path
1036 return f_path
1027
1037
1028 def _create_references(
1038 def _create_references(
1029 self, branches_or_tags, symbolic_reference, f_path):
1039 self, branches_or_tags, symbolic_reference, f_path):
1030 items = []
1040 items = []
1031 for name, commit_id in branches_or_tags.items():
1041 for name, commit_id in branches_or_tags.items():
1032 sym_ref = symbolic_reference(commit_id, name, f_path)
1042 sym_ref = symbolic_reference(commit_id, name, f_path)
1033 items.append((sym_ref, name))
1043 items.append((sym_ref, name))
1034 return items
1044 return items
1035
1045
1036 def _symbolic_reference(self, commit_id, name, f_path):
1046 def _symbolic_reference(self, commit_id, name, f_path):
1037 return commit_id
1047 return commit_id
1038
1048
1039 def _symbolic_reference_svn(self, commit_id, name, f_path):
1049 def _symbolic_reference_svn(self, commit_id, name, f_path):
1040 new_f_path = vcspath.join(name, f_path)
1050 new_f_path = vcspath.join(name, f_path)
1041 return u'%s@%s' % (new_f_path, commit_id)
1051 return u'%s@%s' % (new_f_path, commit_id)
1042
1052
1043 @LoginRequired()
1053 @LoginRequired()
1044 @XHRRequired()
1054 @XHRRequired()
1045 @HasRepoPermissionAnyDecorator(
1055 @HasRepoPermissionAnyDecorator(
1046 'repository.read', 'repository.write', 'repository.admin')
1056 'repository.read', 'repository.write', 'repository.admin')
1047 @jsonify
1057 @jsonify
1048 def nodelist(self, repo_name, revision, f_path):
1058 def nodelist(self, repo_name, revision, f_path):
1049 commit = self.__get_commit_or_redirect(revision, repo_name)
1059 commit = self.__get_commit_or_redirect(revision, repo_name)
1050
1060
1051 metadata = self._get_nodelist_at_commit(
1061 metadata = self._get_nodelist_at_commit(
1052 repo_name, commit.raw_id, f_path)
1062 repo_name, commit.raw_id, f_path)
1053 return {'nodes': metadata}
1063 return {'nodes': metadata}
1054
1064
1055 @LoginRequired()
1065 @LoginRequired()
1056 @XHRRequired()
1066 @XHRRequired()
1057 @HasRepoPermissionAnyDecorator(
1067 @HasRepoPermissionAnyDecorator(
1058 'repository.read', 'repository.write', 'repository.admin')
1068 'repository.read', 'repository.write', 'repository.admin')
1059 def nodetree_full(self, repo_name, commit_id, f_path):
1069 def nodetree_full(self, repo_name, commit_id, f_path):
1060 """
1070 """
1061 Returns rendered html of file tree that contains commit date,
1071 Returns rendered html of file tree that contains commit date,
1062 author, revision for the specified combination of
1072 author, revision for the specified combination of
1063 repo, commit_id and file path
1073 repo, commit_id and file path
1064
1074
1065 :param repo_name: name of the repository
1075 :param repo_name: name of the repository
1066 :param commit_id: commit_id of file tree
1076 :param commit_id: commit_id of file tree
1067 :param f_path: file path of the requested directory
1077 :param f_path: file path of the requested directory
1068 """
1078 """
1069
1079
1070 commit = self.__get_commit_or_redirect(commit_id, repo_name)
1080 commit = self.__get_commit_or_redirect(commit_id, repo_name)
1071 try:
1081 try:
1072 dir_node = commit.get_node(f_path)
1082 dir_node = commit.get_node(f_path)
1073 except RepositoryError as e:
1083 except RepositoryError as e:
1074 return 'error {}'.format(safe_str(e))
1084 return 'error {}'.format(safe_str(e))
1075
1085
1076 if dir_node.is_file():
1086 if dir_node.is_file():
1077 return ''
1087 return ''
1078
1088
1079 c.file = dir_node
1089 c.file = dir_node
1080 c.commit = commit
1090 c.commit = commit
1081
1091
1082 # using force=True here, make a little trick. We flush the cache and
1092 # using force=True here, make a little trick. We flush the cache and
1083 # compute it using the same key as without full_load, so the fully
1093 # compute it using the same key as without full_load, so the fully
1084 # loaded cached tree is now returned instead of partial
1094 # loaded cached tree is now returned instead of partial
1085 return self._get_tree_at_commit(
1095 return self._get_tree_at_commit(
1086 repo_name, commit.raw_id, dir_node.path, full_load=True,
1096 repo_name, commit.raw_id, dir_node.path, full_load=True,
1087 force=True)
1097 force=True)
@@ -1,1587 +1,1588 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import itertools
27 import itertools
28 import logging
28 import logging
29 import os
29 import os
30 import time
30 import time
31 import warnings
31 import warnings
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 from rhodecode.lib.vcs import connection
36 from rhodecode.lib.vcs import connection
37 from rhodecode.lib.vcs.utils import author_name, author_email
37 from rhodecode.lib.vcs.utils import author_name, author_email
38 from rhodecode.lib.vcs.conf import settings
38 from rhodecode.lib.vcs.conf import settings
39 from rhodecode.lib.vcs.exceptions import (
39 from rhodecode.lib.vcs.exceptions import (
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 RepositoryError)
44 RepositoryError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 FILEMODE_DEFAULT = 0100644
50 FILEMODE_DEFAULT = 0100644
51 FILEMODE_EXECUTABLE = 0100755
51 FILEMODE_EXECUTABLE = 0100755
52
52
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 MergeResponse = collections.namedtuple(
54 MergeResponse = collections.namedtuple(
55 'MergeResponse',
55 'MergeResponse',
56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57
57
58
58
59 class MergeFailureReason(object):
59 class MergeFailureReason(object):
60 """
60 """
61 Enumeration with all the reasons why the server side merge could fail.
61 Enumeration with all the reasons why the server side merge could fail.
62
62
63 DO NOT change the number of the reasons, as they may be stored in the
63 DO NOT change the number of the reasons, as they may be stored in the
64 database.
64 database.
65
65
66 Changing the name of a reason is acceptable and encouraged to deprecate old
66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 reasons.
67 reasons.
68 """
68 """
69
69
70 # Everything went well.
70 # Everything went well.
71 NONE = 0
71 NONE = 0
72
72
73 # An unexpected exception was raised. Check the logs for more details.
73 # An unexpected exception was raised. Check the logs for more details.
74 UNKNOWN = 1
74 UNKNOWN = 1
75
75
76 # The merge was not successful, there are conflicts.
76 # The merge was not successful, there are conflicts.
77 MERGE_FAILED = 2
77 MERGE_FAILED = 2
78
78
79 # The merge succeeded but we could not push it to the target repository.
79 # The merge succeeded but we could not push it to the target repository.
80 PUSH_FAILED = 3
80 PUSH_FAILED = 3
81
81
82 # The specified target is not a head in the target repository.
82 # The specified target is not a head in the target repository.
83 TARGET_IS_NOT_HEAD = 4
83 TARGET_IS_NOT_HEAD = 4
84
84
85 # The source repository contains more branches than the target. Pushing
85 # The source repository contains more branches than the target. Pushing
86 # the merge will create additional branches in the target.
86 # the merge will create additional branches in the target.
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88
88
89 # The target reference has multiple heads. That does not allow to correctly
89 # The target reference has multiple heads. That does not allow to correctly
90 # identify the target location. This could only happen for mercurial
90 # identify the target location. This could only happen for mercurial
91 # branches.
91 # branches.
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93
93
94 # The target repository is locked
94 # The target repository is locked
95 TARGET_IS_LOCKED = 7
95 TARGET_IS_LOCKED = 7
96
96
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 # A involved commit could not be found.
98 # A involved commit could not be found.
99 _DEPRECATED_MISSING_COMMIT = 8
99 _DEPRECATED_MISSING_COMMIT = 8
100
100
101 # The target repo reference is missing.
101 # The target repo reference is missing.
102 MISSING_TARGET_REF = 9
102 MISSING_TARGET_REF = 9
103
103
104 # The source repo reference is missing.
104 # The source repo reference is missing.
105 MISSING_SOURCE_REF = 10
105 MISSING_SOURCE_REF = 10
106
106
107 # The merge was not successful, there are conflicts related to sub
107 # The merge was not successful, there are conflicts related to sub
108 # repositories.
108 # repositories.
109 SUBREPO_MERGE_FAILED = 11
109 SUBREPO_MERGE_FAILED = 11
110
110
111
111
112 class UpdateFailureReason(object):
112 class UpdateFailureReason(object):
113 """
113 """
114 Enumeration with all the reasons why the pull request update could fail.
114 Enumeration with all the reasons why the pull request update could fail.
115
115
116 DO NOT change the number of the reasons, as they may be stored in the
116 DO NOT change the number of the reasons, as they may be stored in the
117 database.
117 database.
118
118
119 Changing the name of a reason is acceptable and encouraged to deprecate old
119 Changing the name of a reason is acceptable and encouraged to deprecate old
120 reasons.
120 reasons.
121 """
121 """
122
122
123 # Everything went well.
123 # Everything went well.
124 NONE = 0
124 NONE = 0
125
125
126 # An unexpected exception was raised. Check the logs for more details.
126 # An unexpected exception was raised. Check the logs for more details.
127 UNKNOWN = 1
127 UNKNOWN = 1
128
128
129 # The pull request is up to date.
129 # The pull request is up to date.
130 NO_CHANGE = 2
130 NO_CHANGE = 2
131
131
132 # The pull request has a reference type that is not supported for update.
132 # The pull request has a reference type that is not supported for update.
133 WRONG_REF_TPYE = 3
133 WRONG_REF_TPYE = 3
134
134
135 # Update failed because the target reference is missing.
135 # Update failed because the target reference is missing.
136 MISSING_TARGET_REF = 4
136 MISSING_TARGET_REF = 4
137
137
138 # Update failed because the source reference is missing.
138 # Update failed because the source reference is missing.
139 MISSING_SOURCE_REF = 5
139 MISSING_SOURCE_REF = 5
140
140
141
141
142 class BaseRepository(object):
142 class BaseRepository(object):
143 """
143 """
144 Base Repository for final backends
144 Base Repository for final backends
145
145
146 .. attribute:: DEFAULT_BRANCH_NAME
146 .. attribute:: DEFAULT_BRANCH_NAME
147
147
148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
149
149
150 .. attribute:: commit_ids
150 .. attribute:: commit_ids
151
151
152 list of all available commit ids, in ascending order
152 list of all available commit ids, in ascending order
153
153
154 .. attribute:: path
154 .. attribute:: path
155
155
156 absolute path to the repository
156 absolute path to the repository
157
157
158 .. attribute:: bookmarks
158 .. attribute:: bookmarks
159
159
160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
161 there are no bookmarks or the backend implementation does not support
161 there are no bookmarks or the backend implementation does not support
162 bookmarks.
162 bookmarks.
163
163
164 .. attribute:: tags
164 .. attribute:: tags
165
165
166 Mapping from name to :term:`Commit ID` of the tag.
166 Mapping from name to :term:`Commit ID` of the tag.
167
167
168 """
168 """
169
169
170 DEFAULT_BRANCH_NAME = None
170 DEFAULT_BRANCH_NAME = None
171 DEFAULT_CONTACT = u"Unknown"
171 DEFAULT_CONTACT = u"Unknown"
172 DEFAULT_DESCRIPTION = u"unknown"
172 DEFAULT_DESCRIPTION = u"unknown"
173 EMPTY_COMMIT_ID = '0' * 40
173 EMPTY_COMMIT_ID = '0' * 40
174
174
175 path = None
175 path = None
176
176
177 def __init__(self, repo_path, config=None, create=False, **kwargs):
177 def __init__(self, repo_path, config=None, create=False, **kwargs):
178 """
178 """
179 Initializes repository. Raises RepositoryError if repository could
179 Initializes repository. Raises RepositoryError if repository could
180 not be find at the given ``repo_path`` or directory at ``repo_path``
180 not be find at the given ``repo_path`` or directory at ``repo_path``
181 exists and ``create`` is set to True.
181 exists and ``create`` is set to True.
182
182
183 :param repo_path: local path of the repository
183 :param repo_path: local path of the repository
184 :param config: repository configuration
184 :param config: repository configuration
185 :param create=False: if set to True, would try to create repository.
185 :param create=False: if set to True, would try to create repository.
186 :param src_url=None: if set, should be proper url from which repository
186 :param src_url=None: if set, should be proper url from which repository
187 would be cloned; requires ``create`` parameter to be set to True -
187 would be cloned; requires ``create`` parameter to be set to True -
188 raises RepositoryError if src_url is set and create evaluates to
188 raises RepositoryError if src_url is set and create evaluates to
189 False
189 False
190 """
190 """
191 raise NotImplementedError
191 raise NotImplementedError
192
192
193 def __repr__(self):
193 def __repr__(self):
194 return '<%s at %s>' % (self.__class__.__name__, self.path)
194 return '<%s at %s>' % (self.__class__.__name__, self.path)
195
195
196 def __len__(self):
196 def __len__(self):
197 return self.count()
197 return self.count()
198
198
199 def __eq__(self, other):
199 def __eq__(self, other):
200 same_instance = isinstance(other, self.__class__)
200 same_instance = isinstance(other, self.__class__)
201 return same_instance and other.path == self.path
201 return same_instance and other.path == self.path
202
202
203 def __ne__(self, other):
203 def __ne__(self, other):
204 return not self.__eq__(other)
204 return not self.__eq__(other)
205
205
206 @LazyProperty
206 @LazyProperty
207 def EMPTY_COMMIT(self):
207 def EMPTY_COMMIT(self):
208 return EmptyCommit(self.EMPTY_COMMIT_ID)
208 return EmptyCommit(self.EMPTY_COMMIT_ID)
209
209
210 @LazyProperty
210 @LazyProperty
211 def alias(self):
211 def alias(self):
212 for k, v in settings.BACKENDS.items():
212 for k, v in settings.BACKENDS.items():
213 if v.split('.')[-1] == str(self.__class__.__name__):
213 if v.split('.')[-1] == str(self.__class__.__name__):
214 return k
214 return k
215
215
216 @LazyProperty
216 @LazyProperty
217 def name(self):
217 def name(self):
218 return safe_unicode(os.path.basename(self.path))
218 return safe_unicode(os.path.basename(self.path))
219
219
220 @LazyProperty
220 @LazyProperty
221 def description(self):
221 def description(self):
222 raise NotImplementedError
222 raise NotImplementedError
223
223
224 def refs(self):
224 def refs(self):
225 """
225 """
226 returns a `dict` with branches, bookmarks, tags, and closed_branches
226 returns a `dict` with branches, bookmarks, tags, and closed_branches
227 for this repository
227 for this repository
228 """
228 """
229 return dict(
229 return dict(
230 branches=self.branches,
230 branches=self.branches,
231 branches_closed=self.branches_closed,
231 branches_closed=self.branches_closed,
232 tags=self.tags,
232 tags=self.tags,
233 bookmarks=self.bookmarks
233 bookmarks=self.bookmarks
234 )
234 )
235
235
236 @LazyProperty
236 @LazyProperty
237 def branches(self):
237 def branches(self):
238 """
238 """
239 A `dict` which maps branch names to commit ids.
239 A `dict` which maps branch names to commit ids.
240 """
240 """
241 raise NotImplementedError
241 raise NotImplementedError
242
242
243 @LazyProperty
243 @LazyProperty
244 def tags(self):
244 def tags(self):
245 """
245 """
246 A `dict` which maps tags names to commit ids.
246 A `dict` which maps tags names to commit ids.
247 """
247 """
248 raise NotImplementedError
248 raise NotImplementedError
249
249
250 @LazyProperty
250 @LazyProperty
251 def size(self):
251 def size(self):
252 """
252 """
253 Returns combined size in bytes for all repository files
253 Returns combined size in bytes for all repository files
254 """
254 """
255 tip = self.get_commit()
255 tip = self.get_commit()
256 return tip.size
256 return tip.size
257
257
258 def size_at_commit(self, commit_id):
258 def size_at_commit(self, commit_id):
259 commit = self.get_commit(commit_id)
259 commit = self.get_commit(commit_id)
260 return commit.size
260 return commit.size
261
261
262 def is_empty(self):
262 def is_empty(self):
263 return not bool(self.commit_ids)
263 return not bool(self.commit_ids)
264
264
265 @staticmethod
265 @staticmethod
266 def check_url(url, config):
266 def check_url(url, config):
267 """
267 """
268 Function will check given url and try to verify if it's a valid
268 Function will check given url and try to verify if it's a valid
269 link.
269 link.
270 """
270 """
271 raise NotImplementedError
271 raise NotImplementedError
272
272
273 @staticmethod
273 @staticmethod
274 def is_valid_repository(path):
274 def is_valid_repository(path):
275 """
275 """
276 Check if given `path` contains a valid repository of this backend
276 Check if given `path` contains a valid repository of this backend
277 """
277 """
278 raise NotImplementedError
278 raise NotImplementedError
279
279
280 # ==========================================================================
280 # ==========================================================================
281 # COMMITS
281 # COMMITS
282 # ==========================================================================
282 # ==========================================================================
283
283
284 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
284 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
285 """
285 """
286 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
286 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
287 are both None, most recent commit is returned.
287 are both None, most recent commit is returned.
288
288
289 :param pre_load: Optional. List of commit attributes to load.
289 :param pre_load: Optional. List of commit attributes to load.
290
290
291 :raises ``EmptyRepositoryError``: if there are no commits
291 :raises ``EmptyRepositoryError``: if there are no commits
292 """
292 """
293 raise NotImplementedError
293 raise NotImplementedError
294
294
295 def __iter__(self):
295 def __iter__(self):
296 for commit_id in self.commit_ids:
296 for commit_id in self.commit_ids:
297 yield self.get_commit(commit_id=commit_id)
297 yield self.get_commit(commit_id=commit_id)
298
298
299 def get_commits(
299 def get_commits(
300 self, start_id=None, end_id=None, start_date=None, end_date=None,
300 self, start_id=None, end_id=None, start_date=None, end_date=None,
301 branch_name=None, pre_load=None):
301 branch_name=None, pre_load=None):
302 """
302 """
303 Returns iterator of `BaseCommit` objects from start to end
303 Returns iterator of `BaseCommit` objects from start to end
304 not inclusive. This should behave just like a list, ie. end is not
304 not inclusive. This should behave just like a list, ie. end is not
305 inclusive.
305 inclusive.
306
306
307 :param start_id: None or str, must be a valid commit id
307 :param start_id: None or str, must be a valid commit id
308 :param end_id: None or str, must be a valid commit id
308 :param end_id: None or str, must be a valid commit id
309 :param start_date:
309 :param start_date:
310 :param end_date:
310 :param end_date:
311 :param branch_name:
311 :param branch_name:
312 :param pre_load:
312 :param pre_load:
313 """
313 """
314 raise NotImplementedError
314 raise NotImplementedError
315
315
316 def __getitem__(self, key):
316 def __getitem__(self, key):
317 """
317 """
318 Allows index based access to the commit objects of this repository.
318 Allows index based access to the commit objects of this repository.
319 """
319 """
320 pre_load = ["author", "branch", "date", "message", "parents"]
320 pre_load = ["author", "branch", "date", "message", "parents"]
321 if isinstance(key, slice):
321 if isinstance(key, slice):
322 return self._get_range(key, pre_load)
322 return self._get_range(key, pre_load)
323 return self.get_commit(commit_idx=key, pre_load=pre_load)
323 return self.get_commit(commit_idx=key, pre_load=pre_load)
324
324
325 def _get_range(self, slice_obj, pre_load):
325 def _get_range(self, slice_obj, pre_load):
326 for commit_id in self.commit_ids.__getitem__(slice_obj):
326 for commit_id in self.commit_ids.__getitem__(slice_obj):
327 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
327 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
328
328
329 def count(self):
329 def count(self):
330 return len(self.commit_ids)
330 return len(self.commit_ids)
331
331
332 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
332 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
333 """
333 """
334 Creates and returns a tag for the given ``commit_id``.
334 Creates and returns a tag for the given ``commit_id``.
335
335
336 :param name: name for new tag
336 :param name: name for new tag
337 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
337 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
338 :param commit_id: commit id for which new tag would be created
338 :param commit_id: commit id for which new tag would be created
339 :param message: message of the tag's commit
339 :param message: message of the tag's commit
340 :param date: date of tag's commit
340 :param date: date of tag's commit
341
341
342 :raises TagAlreadyExistError: if tag with same name already exists
342 :raises TagAlreadyExistError: if tag with same name already exists
343 """
343 """
344 raise NotImplementedError
344 raise NotImplementedError
345
345
346 def remove_tag(self, name, user, message=None, date=None):
346 def remove_tag(self, name, user, message=None, date=None):
347 """
347 """
348 Removes tag with the given ``name``.
348 Removes tag with the given ``name``.
349
349
350 :param name: name of the tag to be removed
350 :param name: name of the tag to be removed
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param message: message of the tag's removal commit
352 :param message: message of the tag's removal commit
353 :param date: date of tag's removal commit
353 :param date: date of tag's removal commit
354
354
355 :raises TagDoesNotExistError: if tag with given name does not exists
355 :raises TagDoesNotExistError: if tag with given name does not exists
356 """
356 """
357 raise NotImplementedError
357 raise NotImplementedError
358
358
359 def get_diff(
359 def get_diff(
360 self, commit1, commit2, path=None, ignore_whitespace=False,
360 self, commit1, commit2, path=None, ignore_whitespace=False,
361 context=3, path1=None):
361 context=3, path1=None):
362 """
362 """
363 Returns (git like) *diff*, as plain text. Shows changes introduced by
363 Returns (git like) *diff*, as plain text. Shows changes introduced by
364 `commit2` since `commit1`.
364 `commit2` since `commit1`.
365
365
366 :param commit1: Entry point from which diff is shown. Can be
366 :param commit1: Entry point from which diff is shown. Can be
367 ``self.EMPTY_COMMIT`` - in this case, patch showing all
367 ``self.EMPTY_COMMIT`` - in this case, patch showing all
368 the changes since empty state of the repository until `commit2`
368 the changes since empty state of the repository until `commit2`
369 :param commit2: Until which commit changes should be shown.
369 :param commit2: Until which commit changes should be shown.
370 :param path: Can be set to a path of a file to create a diff of that
370 :param path: Can be set to a path of a file to create a diff of that
371 file. If `path1` is also set, this value is only associated to
371 file. If `path1` is also set, this value is only associated to
372 `commit2`.
372 `commit2`.
373 :param ignore_whitespace: If set to ``True``, would not show whitespace
373 :param ignore_whitespace: If set to ``True``, would not show whitespace
374 changes. Defaults to ``False``.
374 changes. Defaults to ``False``.
375 :param context: How many lines before/after changed lines should be
375 :param context: How many lines before/after changed lines should be
376 shown. Defaults to ``3``.
376 shown. Defaults to ``3``.
377 :param path1: Can be set to a path to associate with `commit1`. This
377 :param path1: Can be set to a path to associate with `commit1`. This
378 parameter works only for backends which support diff generation for
378 parameter works only for backends which support diff generation for
379 different paths. Other backends will raise a `ValueError` if `path1`
379 different paths. Other backends will raise a `ValueError` if `path1`
380 is set and has a different value than `path`.
380 is set and has a different value than `path`.
381 :param file_path: filter this diff by given path pattern
381 :param file_path: filter this diff by given path pattern
382 """
382 """
383 raise NotImplementedError
383 raise NotImplementedError
384
384
385 def strip(self, commit_id, branch=None):
385 def strip(self, commit_id, branch=None):
386 """
386 """
387 Strip given commit_id from the repository
387 Strip given commit_id from the repository
388 """
388 """
389 raise NotImplementedError
389 raise NotImplementedError
390
390
391 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
391 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
392 """
392 """
393 Return a latest common ancestor commit if one exists for this repo
393 Return a latest common ancestor commit if one exists for this repo
394 `commit_id1` vs `commit_id2` from `repo2`.
394 `commit_id1` vs `commit_id2` from `repo2`.
395
395
396 :param commit_id1: Commit it from this repository to use as a
396 :param commit_id1: Commit it from this repository to use as a
397 target for the comparison.
397 target for the comparison.
398 :param commit_id2: Source commit id to use for comparison.
398 :param commit_id2: Source commit id to use for comparison.
399 :param repo2: Source repository to use for comparison.
399 :param repo2: Source repository to use for comparison.
400 """
400 """
401 raise NotImplementedError
401 raise NotImplementedError
402
402
403 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
403 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
404 """
404 """
405 Compare this repository's revision `commit_id1` with `commit_id2`.
405 Compare this repository's revision `commit_id1` with `commit_id2`.
406
406
407 Returns a tuple(commits, ancestor) that would be merged from
407 Returns a tuple(commits, ancestor) that would be merged from
408 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
408 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
409 will be returned as ancestor.
409 will be returned as ancestor.
410
410
411 :param commit_id1: Commit it from this repository to use as a
411 :param commit_id1: Commit it from this repository to use as a
412 target for the comparison.
412 target for the comparison.
413 :param commit_id2: Source commit id to use for comparison.
413 :param commit_id2: Source commit id to use for comparison.
414 :param repo2: Source repository to use for comparison.
414 :param repo2: Source repository to use for comparison.
415 :param merge: If set to ``True`` will do a merge compare which also
415 :param merge: If set to ``True`` will do a merge compare which also
416 returns the common ancestor.
416 returns the common ancestor.
417 :param pre_load: Optional. List of commit attributes to load.
417 :param pre_load: Optional. List of commit attributes to load.
418 """
418 """
419 raise NotImplementedError
419 raise NotImplementedError
420
420
421 def merge(self, target_ref, source_repo, source_ref, workspace_id,
421 def merge(self, target_ref, source_repo, source_ref, workspace_id,
422 user_name='', user_email='', message='', dry_run=False,
422 user_name='', user_email='', message='', dry_run=False,
423 use_rebase=False):
423 use_rebase=False):
424 """
424 """
425 Merge the revisions specified in `source_ref` from `source_repo`
425 Merge the revisions specified in `source_ref` from `source_repo`
426 onto the `target_ref` of this repository.
426 onto the `target_ref` of this repository.
427
427
428 `source_ref` and `target_ref` are named tupls with the following
428 `source_ref` and `target_ref` are named tupls with the following
429 fields `type`, `name` and `commit_id`.
429 fields `type`, `name` and `commit_id`.
430
430
431 Returns a MergeResponse named tuple with the following fields
431 Returns a MergeResponse named tuple with the following fields
432 'possible', 'executed', 'source_commit', 'target_commit',
432 'possible', 'executed', 'source_commit', 'target_commit',
433 'merge_commit'.
433 'merge_commit'.
434
434
435 :param target_ref: `target_ref` points to the commit on top of which
435 :param target_ref: `target_ref` points to the commit on top of which
436 the `source_ref` should be merged.
436 the `source_ref` should be merged.
437 :param source_repo: The repository that contains the commits to be
437 :param source_repo: The repository that contains the commits to be
438 merged.
438 merged.
439 :param source_ref: `source_ref` points to the topmost commit from
439 :param source_ref: `source_ref` points to the topmost commit from
440 the `source_repo` which should be merged.
440 the `source_repo` which should be merged.
441 :param workspace_id: `workspace_id` unique identifier.
441 :param workspace_id: `workspace_id` unique identifier.
442 :param user_name: Merge commit `user_name`.
442 :param user_name: Merge commit `user_name`.
443 :param user_email: Merge commit `user_email`.
443 :param user_email: Merge commit `user_email`.
444 :param message: Merge commit `message`.
444 :param message: Merge commit `message`.
445 :param dry_run: If `True` the merge will not take place.
445 :param dry_run: If `True` the merge will not take place.
446 :param use_rebase: If `True` commits from the source will be rebased
446 :param use_rebase: If `True` commits from the source will be rebased
447 on top of the target instead of being merged.
447 on top of the target instead of being merged.
448 """
448 """
449 if dry_run:
449 if dry_run:
450 message = message or 'dry_run_merge_message'
450 message = message or 'dry_run_merge_message'
451 user_email = user_email or 'dry-run-merge@rhodecode.com'
451 user_email = user_email or 'dry-run-merge@rhodecode.com'
452 user_name = user_name or 'Dry-Run User'
452 user_name = user_name or 'Dry-Run User'
453 else:
453 else:
454 if not user_name:
454 if not user_name:
455 raise ValueError('user_name cannot be empty')
455 raise ValueError('user_name cannot be empty')
456 if not user_email:
456 if not user_email:
457 raise ValueError('user_email cannot be empty')
457 raise ValueError('user_email cannot be empty')
458 if not message:
458 if not message:
459 raise ValueError('message cannot be empty')
459 raise ValueError('message cannot be empty')
460
460
461 shadow_repository_path = self._maybe_prepare_merge_workspace(
461 shadow_repository_path = self._maybe_prepare_merge_workspace(
462 workspace_id, target_ref)
462 workspace_id, target_ref)
463
463
464 try:
464 try:
465 return self._merge_repo(
465 return self._merge_repo(
466 shadow_repository_path, target_ref, source_repo,
466 shadow_repository_path, target_ref, source_repo,
467 source_ref, message, user_name, user_email, dry_run=dry_run,
467 source_ref, message, user_name, user_email, dry_run=dry_run,
468 use_rebase=use_rebase)
468 use_rebase=use_rebase)
469 except RepositoryError:
469 except RepositoryError:
470 log.exception(
470 log.exception(
471 'Unexpected failure when running merge, dry-run=%s',
471 'Unexpected failure when running merge, dry-run=%s',
472 dry_run)
472 dry_run)
473 return MergeResponse(
473 return MergeResponse(
474 False, False, None, MergeFailureReason.UNKNOWN)
474 False, False, None, MergeFailureReason.UNKNOWN)
475
475
476 def _merge_repo(self, shadow_repository_path, target_ref,
476 def _merge_repo(self, shadow_repository_path, target_ref,
477 source_repo, source_ref, merge_message,
477 source_repo, source_ref, merge_message,
478 merger_name, merger_email, dry_run=False, use_rebase=False):
478 merger_name, merger_email, dry_run=False, use_rebase=False):
479 """Internal implementation of merge."""
479 """Internal implementation of merge."""
480 raise NotImplementedError
480 raise NotImplementedError
481
481
482 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
482 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
483 """
483 """
484 Create the merge workspace.
484 Create the merge workspace.
485
485
486 :param workspace_id: `workspace_id` unique identifier.
486 :param workspace_id: `workspace_id` unique identifier.
487 """
487 """
488 raise NotImplementedError
488 raise NotImplementedError
489
489
490 def cleanup_merge_workspace(self, workspace_id):
490 def cleanup_merge_workspace(self, workspace_id):
491 """
491 """
492 Remove merge workspace.
492 Remove merge workspace.
493
493
494 This function MUST not fail in case there is no workspace associated to
494 This function MUST not fail in case there is no workspace associated to
495 the given `workspace_id`.
495 the given `workspace_id`.
496
496
497 :param workspace_id: `workspace_id` unique identifier.
497 :param workspace_id: `workspace_id` unique identifier.
498 """
498 """
499 raise NotImplementedError
499 raise NotImplementedError
500
500
501 # ========== #
501 # ========== #
502 # COMMIT API #
502 # COMMIT API #
503 # ========== #
503 # ========== #
504
504
505 @LazyProperty
505 @LazyProperty
506 def in_memory_commit(self):
506 def in_memory_commit(self):
507 """
507 """
508 Returns :class:`InMemoryCommit` object for this repository.
508 Returns :class:`InMemoryCommit` object for this repository.
509 """
509 """
510 raise NotImplementedError
510 raise NotImplementedError
511
511
512 # ======================== #
512 # ======================== #
513 # UTILITIES FOR SUBCLASSES #
513 # UTILITIES FOR SUBCLASSES #
514 # ======================== #
514 # ======================== #
515
515
516 def _validate_diff_commits(self, commit1, commit2):
516 def _validate_diff_commits(self, commit1, commit2):
517 """
517 """
518 Validates that the given commits are related to this repository.
518 Validates that the given commits are related to this repository.
519
519
520 Intended as a utility for sub classes to have a consistent validation
520 Intended as a utility for sub classes to have a consistent validation
521 of input parameters in methods like :meth:`get_diff`.
521 of input parameters in methods like :meth:`get_diff`.
522 """
522 """
523 self._validate_commit(commit1)
523 self._validate_commit(commit1)
524 self._validate_commit(commit2)
524 self._validate_commit(commit2)
525 if (isinstance(commit1, EmptyCommit) and
525 if (isinstance(commit1, EmptyCommit) and
526 isinstance(commit2, EmptyCommit)):
526 isinstance(commit2, EmptyCommit)):
527 raise ValueError("Cannot compare two empty commits")
527 raise ValueError("Cannot compare two empty commits")
528
528
529 def _validate_commit(self, commit):
529 def _validate_commit(self, commit):
530 if not isinstance(commit, BaseCommit):
530 if not isinstance(commit, BaseCommit):
531 raise TypeError(
531 raise TypeError(
532 "%s is not of type BaseCommit" % repr(commit))
532 "%s is not of type BaseCommit" % repr(commit))
533 if commit.repository != self and not isinstance(commit, EmptyCommit):
533 if commit.repository != self and not isinstance(commit, EmptyCommit):
534 raise ValueError(
534 raise ValueError(
535 "Commit %s must be a valid commit from this repository %s, "
535 "Commit %s must be a valid commit from this repository %s, "
536 "related to this repository instead %s." %
536 "related to this repository instead %s." %
537 (commit, self, commit.repository))
537 (commit, self, commit.repository))
538
538
539 def _validate_commit_id(self, commit_id):
539 def _validate_commit_id(self, commit_id):
540 if not isinstance(commit_id, basestring):
540 if not isinstance(commit_id, basestring):
541 raise TypeError("commit_id must be a string value")
541 raise TypeError("commit_id must be a string value")
542
542
543 def _validate_commit_idx(self, commit_idx):
543 def _validate_commit_idx(self, commit_idx):
544 if not isinstance(commit_idx, (int, long)):
544 if not isinstance(commit_idx, (int, long)):
545 raise TypeError("commit_idx must be a numeric value")
545 raise TypeError("commit_idx must be a numeric value")
546
546
547 def _validate_branch_name(self, branch_name):
547 def _validate_branch_name(self, branch_name):
548 if branch_name and branch_name not in self.branches_all:
548 if branch_name and branch_name not in self.branches_all:
549 msg = ("Branch %s not found in %s" % (branch_name, self))
549 msg = ("Branch %s not found in %s" % (branch_name, self))
550 raise BranchDoesNotExistError(msg)
550 raise BranchDoesNotExistError(msg)
551
551
552 #
552 #
553 # Supporting deprecated API parts
553 # Supporting deprecated API parts
554 # TODO: johbo: consider to move this into a mixin
554 # TODO: johbo: consider to move this into a mixin
555 #
555 #
556
556
557 @property
557 @property
558 def EMPTY_CHANGESET(self):
558 def EMPTY_CHANGESET(self):
559 warnings.warn(
559 warnings.warn(
560 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
560 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
561 return self.EMPTY_COMMIT_ID
561 return self.EMPTY_COMMIT_ID
562
562
563 @property
563 @property
564 def revisions(self):
564 def revisions(self):
565 warnings.warn("Use commits attribute instead", DeprecationWarning)
565 warnings.warn("Use commits attribute instead", DeprecationWarning)
566 return self.commit_ids
566 return self.commit_ids
567
567
568 @revisions.setter
568 @revisions.setter
569 def revisions(self, value):
569 def revisions(self, value):
570 warnings.warn("Use commits attribute instead", DeprecationWarning)
570 warnings.warn("Use commits attribute instead", DeprecationWarning)
571 self.commit_ids = value
571 self.commit_ids = value
572
572
573 def get_changeset(self, revision=None, pre_load=None):
573 def get_changeset(self, revision=None, pre_load=None):
574 warnings.warn("Use get_commit instead", DeprecationWarning)
574 warnings.warn("Use get_commit instead", DeprecationWarning)
575 commit_id = None
575 commit_id = None
576 commit_idx = None
576 commit_idx = None
577 if isinstance(revision, basestring):
577 if isinstance(revision, basestring):
578 commit_id = revision
578 commit_id = revision
579 else:
579 else:
580 commit_idx = revision
580 commit_idx = revision
581 return self.get_commit(
581 return self.get_commit(
582 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
582 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
583
583
584 def get_changesets(
584 def get_changesets(
585 self, start=None, end=None, start_date=None, end_date=None,
585 self, start=None, end=None, start_date=None, end_date=None,
586 branch_name=None, pre_load=None):
586 branch_name=None, pre_load=None):
587 warnings.warn("Use get_commits instead", DeprecationWarning)
587 warnings.warn("Use get_commits instead", DeprecationWarning)
588 start_id = self._revision_to_commit(start)
588 start_id = self._revision_to_commit(start)
589 end_id = self._revision_to_commit(end)
589 end_id = self._revision_to_commit(end)
590 return self.get_commits(
590 return self.get_commits(
591 start_id=start_id, end_id=end_id, start_date=start_date,
591 start_id=start_id, end_id=end_id, start_date=start_date,
592 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
592 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
593
593
594 def _revision_to_commit(self, revision):
594 def _revision_to_commit(self, revision):
595 """
595 """
596 Translates a revision to a commit_id
596 Translates a revision to a commit_id
597
597
598 Helps to support the old changeset based API which allows to use
598 Helps to support the old changeset based API which allows to use
599 commit ids and commit indices interchangeable.
599 commit ids and commit indices interchangeable.
600 """
600 """
601 if revision is None:
601 if revision is None:
602 return revision
602 return revision
603
603
604 if isinstance(revision, basestring):
604 if isinstance(revision, basestring):
605 commit_id = revision
605 commit_id = revision
606 else:
606 else:
607 commit_id = self.commit_ids[revision]
607 commit_id = self.commit_ids[revision]
608 return commit_id
608 return commit_id
609
609
610 @property
610 @property
611 def in_memory_changeset(self):
611 def in_memory_changeset(self):
612 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
612 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
613 return self.in_memory_commit
613 return self.in_memory_commit
614
614
615
615
616 class BaseCommit(object):
616 class BaseCommit(object):
617 """
617 """
618 Each backend should implement it's commit representation.
618 Each backend should implement it's commit representation.
619
619
620 **Attributes**
620 **Attributes**
621
621
622 ``repository``
622 ``repository``
623 repository object within which commit exists
623 repository object within which commit exists
624
624
625 ``id``
625 ``id``
626 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
626 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
627 just ``tip``.
627 just ``tip``.
628
628
629 ``raw_id``
629 ``raw_id``
630 raw commit representation (i.e. full 40 length sha for git
630 raw commit representation (i.e. full 40 length sha for git
631 backend)
631 backend)
632
632
633 ``short_id``
633 ``short_id``
634 shortened (if apply) version of ``raw_id``; it would be simple
634 shortened (if apply) version of ``raw_id``; it would be simple
635 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
635 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
636 as ``raw_id`` for subversion
636 as ``raw_id`` for subversion
637
637
638 ``idx``
638 ``idx``
639 commit index
639 commit index
640
640
641 ``files``
641 ``files``
642 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
642 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
643
643
644 ``dirs``
644 ``dirs``
645 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
645 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
646
646
647 ``nodes``
647 ``nodes``
648 combined list of ``Node`` objects
648 combined list of ``Node`` objects
649
649
650 ``author``
650 ``author``
651 author of the commit, as unicode
651 author of the commit, as unicode
652
652
653 ``message``
653 ``message``
654 message of the commit, as unicode
654 message of the commit, as unicode
655
655
656 ``parents``
656 ``parents``
657 list of parent commits
657 list of parent commits
658
658
659 """
659 """
660
660
661 branch = None
661 branch = None
662 """
662 """
663 Depending on the backend this should be set to the branch name of the
663 Depending on the backend this should be set to the branch name of the
664 commit. Backends not supporting branches on commits should leave this
664 commit. Backends not supporting branches on commits should leave this
665 value as ``None``.
665 value as ``None``.
666 """
666 """
667
667
668 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
668 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
669 """
669 """
670 This template is used to generate a default prefix for repository archives
670 This template is used to generate a default prefix for repository archives
671 if no prefix has been specified.
671 if no prefix has been specified.
672 """
672 """
673
673
674 def __str__(self):
674 def __str__(self):
675 return '<%s at %s:%s>' % (
675 return '<%s at %s:%s>' % (
676 self.__class__.__name__, self.idx, self.short_id)
676 self.__class__.__name__, self.idx, self.short_id)
677
677
678 def __repr__(self):
678 def __repr__(self):
679 return self.__str__()
679 return self.__str__()
680
680
681 def __unicode__(self):
681 def __unicode__(self):
682 return u'%s:%s' % (self.idx, self.short_id)
682 return u'%s:%s' % (self.idx, self.short_id)
683
683
684 def __eq__(self, other):
684 def __eq__(self, other):
685 same_instance = isinstance(other, self.__class__)
685 same_instance = isinstance(other, self.__class__)
686 return same_instance and self.raw_id == other.raw_id
686 return same_instance and self.raw_id == other.raw_id
687
687
688 def __json__(self):
688 def __json__(self):
689 parents = []
689 parents = []
690 try:
690 try:
691 for parent in self.parents:
691 for parent in self.parents:
692 parents.append({'raw_id': parent.raw_id})
692 parents.append({'raw_id': parent.raw_id})
693 except NotImplementedError:
693 except NotImplementedError:
694 # empty commit doesn't have parents implemented
694 # empty commit doesn't have parents implemented
695 pass
695 pass
696
696
697 return {
697 return {
698 'short_id': self.short_id,
698 'short_id': self.short_id,
699 'raw_id': self.raw_id,
699 'raw_id': self.raw_id,
700 'revision': self.idx,
700 'revision': self.idx,
701 'message': self.message,
701 'message': self.message,
702 'date': self.date,
702 'date': self.date,
703 'author': self.author,
703 'author': self.author,
704 'parents': parents,
704 'parents': parents,
705 'branch': self.branch
705 'branch': self.branch
706 }
706 }
707
707
708 @LazyProperty
708 @LazyProperty
709 def last(self):
709 def last(self):
710 """
710 """
711 ``True`` if this is last commit in repository, ``False``
711 ``True`` if this is last commit in repository, ``False``
712 otherwise; trying to access this attribute while there is no
712 otherwise; trying to access this attribute while there is no
713 commits would raise `EmptyRepositoryError`
713 commits would raise `EmptyRepositoryError`
714 """
714 """
715 if self.repository is None:
715 if self.repository is None:
716 raise CommitError("Cannot check if it's most recent commit")
716 raise CommitError("Cannot check if it's most recent commit")
717 return self.raw_id == self.repository.commit_ids[-1]
717 return self.raw_id == self.repository.commit_ids[-1]
718
718
719 @LazyProperty
719 @LazyProperty
720 def parents(self):
720 def parents(self):
721 """
721 """
722 Returns list of parent commits.
722 Returns list of parent commits.
723 """
723 """
724 raise NotImplementedError
724 raise NotImplementedError
725
725
726 @property
726 @property
727 def merge(self):
727 def merge(self):
728 """
728 """
729 Returns boolean if commit is a merge.
729 Returns boolean if commit is a merge.
730 """
730 """
731 return len(self.parents) > 1
731 return len(self.parents) > 1
732
732
733 @LazyProperty
733 @LazyProperty
734 def children(self):
734 def children(self):
735 """
735 """
736 Returns list of child commits.
736 Returns list of child commits.
737 """
737 """
738 raise NotImplementedError
738 raise NotImplementedError
739
739
740 @LazyProperty
740 @LazyProperty
741 def id(self):
741 def id(self):
742 """
742 """
743 Returns string identifying this commit.
743 Returns string identifying this commit.
744 """
744 """
745 raise NotImplementedError
745 raise NotImplementedError
746
746
747 @LazyProperty
747 @LazyProperty
748 def raw_id(self):
748 def raw_id(self):
749 """
749 """
750 Returns raw string identifying this commit.
750 Returns raw string identifying this commit.
751 """
751 """
752 raise NotImplementedError
752 raise NotImplementedError
753
753
754 @LazyProperty
754 @LazyProperty
755 def short_id(self):
755 def short_id(self):
756 """
756 """
757 Returns shortened version of ``raw_id`` attribute, as string,
757 Returns shortened version of ``raw_id`` attribute, as string,
758 identifying this commit, useful for presentation to users.
758 identifying this commit, useful for presentation to users.
759 """
759 """
760 raise NotImplementedError
760 raise NotImplementedError
761
761
762 @LazyProperty
762 @LazyProperty
763 def idx(self):
763 def idx(self):
764 """
764 """
765 Returns integer identifying this commit.
765 Returns integer identifying this commit.
766 """
766 """
767 raise NotImplementedError
767 raise NotImplementedError
768
768
769 @LazyProperty
769 @LazyProperty
770 def committer(self):
770 def committer(self):
771 """
771 """
772 Returns committer for this commit
772 Returns committer for this commit
773 """
773 """
774 raise NotImplementedError
774 raise NotImplementedError
775
775
776 @LazyProperty
776 @LazyProperty
777 def committer_name(self):
777 def committer_name(self):
778 """
778 """
779 Returns committer name for this commit
779 Returns committer name for this commit
780 """
780 """
781
781
782 return author_name(self.committer)
782 return author_name(self.committer)
783
783
784 @LazyProperty
784 @LazyProperty
785 def committer_email(self):
785 def committer_email(self):
786 """
786 """
787 Returns committer email address for this commit
787 Returns committer email address for this commit
788 """
788 """
789
789
790 return author_email(self.committer)
790 return author_email(self.committer)
791
791
792 @LazyProperty
792 @LazyProperty
793 def author(self):
793 def author(self):
794 """
794 """
795 Returns author for this commit
795 Returns author for this commit
796 """
796 """
797
797
798 raise NotImplementedError
798 raise NotImplementedError
799
799
800 @LazyProperty
800 @LazyProperty
801 def author_name(self):
801 def author_name(self):
802 """
802 """
803 Returns author name for this commit
803 Returns author name for this commit
804 """
804 """
805
805
806 return author_name(self.author)
806 return author_name(self.author)
807
807
808 @LazyProperty
808 @LazyProperty
809 def author_email(self):
809 def author_email(self):
810 """
810 """
811 Returns author email address for this commit
811 Returns author email address for this commit
812 """
812 """
813
813
814 return author_email(self.author)
814 return author_email(self.author)
815
815
816 def get_file_mode(self, path):
816 def get_file_mode(self, path):
817 """
817 """
818 Returns stat mode of the file at `path`.
818 Returns stat mode of the file at `path`.
819 """
819 """
820 raise NotImplementedError
820 raise NotImplementedError
821
821
822 def is_link(self, path):
822 def is_link(self, path):
823 """
823 """
824 Returns ``True`` if given `path` is a symlink
824 Returns ``True`` if given `path` is a symlink
825 """
825 """
826 raise NotImplementedError
826 raise NotImplementedError
827
827
828 def get_file_content(self, path):
828 def get_file_content(self, path):
829 """
829 """
830 Returns content of the file at the given `path`.
830 Returns content of the file at the given `path`.
831 """
831 """
832 raise NotImplementedError
832 raise NotImplementedError
833
833
834 def get_file_size(self, path):
834 def get_file_size(self, path):
835 """
835 """
836 Returns size of the file at the given `path`.
836 Returns size of the file at the given `path`.
837 """
837 """
838 raise NotImplementedError
838 raise NotImplementedError
839
839
840 def get_file_commit(self, path, pre_load=None):
840 def get_file_commit(self, path, pre_load=None):
841 """
841 """
842 Returns last commit of the file at the given `path`.
842 Returns last commit of the file at the given `path`.
843
843
844 :param pre_load: Optional. List of commit attributes to load.
844 :param pre_load: Optional. List of commit attributes to load.
845 """
845 """
846 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
846 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
847 if not commits:
847 if not commits:
848 raise RepositoryError(
848 raise RepositoryError(
849 'Failed to fetch history for path {}. '
849 'Failed to fetch history for path {}. '
850 'Please check if such path exists in your repository'.format(
850 'Please check if such path exists in your repository'.format(
851 path))
851 path))
852 return commits[0]
852 return commits[0]
853
853
854 def get_file_history(self, path, limit=None, pre_load=None):
854 def get_file_history(self, path, limit=None, pre_load=None):
855 """
855 """
856 Returns history of file as reversed list of :class:`BaseCommit`
856 Returns history of file as reversed list of :class:`BaseCommit`
857 objects for which file at given `path` has been modified.
857 objects for which file at given `path` has been modified.
858
858
859 :param limit: Optional. Allows to limit the size of the returned
859 :param limit: Optional. Allows to limit the size of the returned
860 history. This is intended as a hint to the underlying backend, so
860 history. This is intended as a hint to the underlying backend, so
861 that it can apply optimizations depending on the limit.
861 that it can apply optimizations depending on the limit.
862 :param pre_load: Optional. List of commit attributes to load.
862 :param pre_load: Optional. List of commit attributes to load.
863 """
863 """
864 raise NotImplementedError
864 raise NotImplementedError
865
865
866 def get_file_annotate(self, path, pre_load=None):
866 def get_file_annotate(self, path, pre_load=None):
867 """
867 """
868 Returns a generator of four element tuples with
868 Returns a generator of four element tuples with
869 lineno, sha, commit lazy loader and line
869 lineno, sha, commit lazy loader and line
870
870
871 :param pre_load: Optional. List of commit attributes to load.
871 :param pre_load: Optional. List of commit attributes to load.
872 """
872 """
873 raise NotImplementedError
873 raise NotImplementedError
874
874
875 def get_nodes(self, path):
875 def get_nodes(self, path):
876 """
876 """
877 Returns combined ``DirNode`` and ``FileNode`` objects list representing
877 Returns combined ``DirNode`` and ``FileNode`` objects list representing
878 state of commit at the given ``path``.
878 state of commit at the given ``path``.
879
879
880 :raises ``CommitError``: if node at the given ``path`` is not
880 :raises ``CommitError``: if node at the given ``path`` is not
881 instance of ``DirNode``
881 instance of ``DirNode``
882 """
882 """
883 raise NotImplementedError
883 raise NotImplementedError
884
884
885 def get_node(self, path):
885 def get_node(self, path):
886 """
886 """
887 Returns ``Node`` object from the given ``path``.
887 Returns ``Node`` object from the given ``path``.
888
888
889 :raises ``NodeDoesNotExistError``: if there is no node at the given
889 :raises ``NodeDoesNotExistError``: if there is no node at the given
890 ``path``
890 ``path``
891 """
891 """
892 raise NotImplementedError
892 raise NotImplementedError
893
893
894 def get_largefile_node(self, path):
894 def get_largefile_node(self, path):
895 """
895 """
896 Returns the path to largefile from Mercurial storage.
896 Returns the path to largefile from Mercurial/Git-lfs storage.
897 or None if it's not a largefile node
897 """
898 """
898 raise NotImplementedError
899 return None
899
900
900 def archive_repo(self, file_path, kind='tgz', subrepos=None,
901 def archive_repo(self, file_path, kind='tgz', subrepos=None,
901 prefix=None, write_metadata=False, mtime=None):
902 prefix=None, write_metadata=False, mtime=None):
902 """
903 """
903 Creates an archive containing the contents of the repository.
904 Creates an archive containing the contents of the repository.
904
905
905 :param file_path: path to the file which to create the archive.
906 :param file_path: path to the file which to create the archive.
906 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
907 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
907 :param prefix: name of root directory in archive.
908 :param prefix: name of root directory in archive.
908 Default is repository name and commit's short_id joined with dash:
909 Default is repository name and commit's short_id joined with dash:
909 ``"{repo_name}-{short_id}"``.
910 ``"{repo_name}-{short_id}"``.
910 :param write_metadata: write a metadata file into archive.
911 :param write_metadata: write a metadata file into archive.
911 :param mtime: custom modification time for archive creation, defaults
912 :param mtime: custom modification time for archive creation, defaults
912 to time.time() if not given.
913 to time.time() if not given.
913
914
914 :raise VCSError: If prefix has a problem.
915 :raise VCSError: If prefix has a problem.
915 """
916 """
916 allowed_kinds = settings.ARCHIVE_SPECS.keys()
917 allowed_kinds = settings.ARCHIVE_SPECS.keys()
917 if kind not in allowed_kinds:
918 if kind not in allowed_kinds:
918 raise ImproperArchiveTypeError(
919 raise ImproperArchiveTypeError(
919 'Archive kind (%s) not supported use one of %s' %
920 'Archive kind (%s) not supported use one of %s' %
920 (kind, allowed_kinds))
921 (kind, allowed_kinds))
921
922
922 prefix = self._validate_archive_prefix(prefix)
923 prefix = self._validate_archive_prefix(prefix)
923
924
924 mtime = mtime or time.mktime(self.date.timetuple())
925 mtime = mtime or time.mktime(self.date.timetuple())
925
926
926 file_info = []
927 file_info = []
927 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
928 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
928 for _r, _d, files in cur_rev.walk('/'):
929 for _r, _d, files in cur_rev.walk('/'):
929 for f in files:
930 for f in files:
930 f_path = os.path.join(prefix, f.path)
931 f_path = os.path.join(prefix, f.path)
931 file_info.append(
932 file_info.append(
932 (f_path, f.mode, f.is_link(), f.raw_bytes))
933 (f_path, f.mode, f.is_link(), f.raw_bytes))
933
934
934 if write_metadata:
935 if write_metadata:
935 metadata = [
936 metadata = [
936 ('repo_name', self.repository.name),
937 ('repo_name', self.repository.name),
937 ('rev', self.raw_id),
938 ('rev', self.raw_id),
938 ('create_time', mtime),
939 ('create_time', mtime),
939 ('branch', self.branch),
940 ('branch', self.branch),
940 ('tags', ','.join(self.tags)),
941 ('tags', ','.join(self.tags)),
941 ]
942 ]
942 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
943 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
943 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
944 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
944
945
945 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
946 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
946
947
947 def _validate_archive_prefix(self, prefix):
948 def _validate_archive_prefix(self, prefix):
948 if prefix is None:
949 if prefix is None:
949 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
950 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
950 repo_name=safe_str(self.repository.name),
951 repo_name=safe_str(self.repository.name),
951 short_id=self.short_id)
952 short_id=self.short_id)
952 elif not isinstance(prefix, str):
953 elif not isinstance(prefix, str):
953 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
954 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
954 elif prefix.startswith('/'):
955 elif prefix.startswith('/'):
955 raise VCSError("Prefix cannot start with leading slash")
956 raise VCSError("Prefix cannot start with leading slash")
956 elif prefix.strip() == '':
957 elif prefix.strip() == '':
957 raise VCSError("Prefix cannot be empty")
958 raise VCSError("Prefix cannot be empty")
958 return prefix
959 return prefix
959
960
960 @LazyProperty
961 @LazyProperty
961 def root(self):
962 def root(self):
962 """
963 """
963 Returns ``RootNode`` object for this commit.
964 Returns ``RootNode`` object for this commit.
964 """
965 """
965 return self.get_node('')
966 return self.get_node('')
966
967
967 def next(self, branch=None):
968 def next(self, branch=None):
968 """
969 """
969 Returns next commit from current, if branch is gives it will return
970 Returns next commit from current, if branch is gives it will return
970 next commit belonging to this branch
971 next commit belonging to this branch
971
972
972 :param branch: show commits within the given named branch
973 :param branch: show commits within the given named branch
973 """
974 """
974 indexes = xrange(self.idx + 1, self.repository.count())
975 indexes = xrange(self.idx + 1, self.repository.count())
975 return self._find_next(indexes, branch)
976 return self._find_next(indexes, branch)
976
977
977 def prev(self, branch=None):
978 def prev(self, branch=None):
978 """
979 """
979 Returns previous commit from current, if branch is gives it will
980 Returns previous commit from current, if branch is gives it will
980 return previous commit belonging to this branch
981 return previous commit belonging to this branch
981
982
982 :param branch: show commit within the given named branch
983 :param branch: show commit within the given named branch
983 """
984 """
984 indexes = xrange(self.idx - 1, -1, -1)
985 indexes = xrange(self.idx - 1, -1, -1)
985 return self._find_next(indexes, branch)
986 return self._find_next(indexes, branch)
986
987
987 def _find_next(self, indexes, branch=None):
988 def _find_next(self, indexes, branch=None):
988 if branch and self.branch != branch:
989 if branch and self.branch != branch:
989 raise VCSError('Branch option used on commit not belonging '
990 raise VCSError('Branch option used on commit not belonging '
990 'to that branch')
991 'to that branch')
991
992
992 for next_idx in indexes:
993 for next_idx in indexes:
993 commit = self.repository.get_commit(commit_idx=next_idx)
994 commit = self.repository.get_commit(commit_idx=next_idx)
994 if branch and branch != commit.branch:
995 if branch and branch != commit.branch:
995 continue
996 continue
996 return commit
997 return commit
997 raise CommitDoesNotExistError
998 raise CommitDoesNotExistError
998
999
999 def diff(self, ignore_whitespace=True, context=3):
1000 def diff(self, ignore_whitespace=True, context=3):
1000 """
1001 """
1001 Returns a `Diff` object representing the change made by this commit.
1002 Returns a `Diff` object representing the change made by this commit.
1002 """
1003 """
1003 parent = (
1004 parent = (
1004 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1005 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1005 diff = self.repository.get_diff(
1006 diff = self.repository.get_diff(
1006 parent, self,
1007 parent, self,
1007 ignore_whitespace=ignore_whitespace,
1008 ignore_whitespace=ignore_whitespace,
1008 context=context)
1009 context=context)
1009 return diff
1010 return diff
1010
1011
1011 @LazyProperty
1012 @LazyProperty
1012 def added(self):
1013 def added(self):
1013 """
1014 """
1014 Returns list of added ``FileNode`` objects.
1015 Returns list of added ``FileNode`` objects.
1015 """
1016 """
1016 raise NotImplementedError
1017 raise NotImplementedError
1017
1018
1018 @LazyProperty
1019 @LazyProperty
1019 def changed(self):
1020 def changed(self):
1020 """
1021 """
1021 Returns list of modified ``FileNode`` objects.
1022 Returns list of modified ``FileNode`` objects.
1022 """
1023 """
1023 raise NotImplementedError
1024 raise NotImplementedError
1024
1025
1025 @LazyProperty
1026 @LazyProperty
1026 def removed(self):
1027 def removed(self):
1027 """
1028 """
1028 Returns list of removed ``FileNode`` objects.
1029 Returns list of removed ``FileNode`` objects.
1029 """
1030 """
1030 raise NotImplementedError
1031 raise NotImplementedError
1031
1032
1032 @LazyProperty
1033 @LazyProperty
1033 def size(self):
1034 def size(self):
1034 """
1035 """
1035 Returns total number of bytes from contents of all filenodes.
1036 Returns total number of bytes from contents of all filenodes.
1036 """
1037 """
1037 return sum((node.size for node in self.get_filenodes_generator()))
1038 return sum((node.size for node in self.get_filenodes_generator()))
1038
1039
1039 def walk(self, topurl=''):
1040 def walk(self, topurl=''):
1040 """
1041 """
1041 Similar to os.walk method. Insted of filesystem it walks through
1042 Similar to os.walk method. Insted of filesystem it walks through
1042 commit starting at given ``topurl``. Returns generator of tuples
1043 commit starting at given ``topurl``. Returns generator of tuples
1043 (topnode, dirnodes, filenodes).
1044 (topnode, dirnodes, filenodes).
1044 """
1045 """
1045 topnode = self.get_node(topurl)
1046 topnode = self.get_node(topurl)
1046 if not topnode.is_dir():
1047 if not topnode.is_dir():
1047 return
1048 return
1048 yield (topnode, topnode.dirs, topnode.files)
1049 yield (topnode, topnode.dirs, topnode.files)
1049 for dirnode in topnode.dirs:
1050 for dirnode in topnode.dirs:
1050 for tup in self.walk(dirnode.path):
1051 for tup in self.walk(dirnode.path):
1051 yield tup
1052 yield tup
1052
1053
1053 def get_filenodes_generator(self):
1054 def get_filenodes_generator(self):
1054 """
1055 """
1055 Returns generator that yields *all* file nodes.
1056 Returns generator that yields *all* file nodes.
1056 """
1057 """
1057 for topnode, dirs, files in self.walk():
1058 for topnode, dirs, files in self.walk():
1058 for node in files:
1059 for node in files:
1059 yield node
1060 yield node
1060
1061
1061 #
1062 #
1062 # Utilities for sub classes to support consistent behavior
1063 # Utilities for sub classes to support consistent behavior
1063 #
1064 #
1064
1065
1065 def no_node_at_path(self, path):
1066 def no_node_at_path(self, path):
1066 return NodeDoesNotExistError(
1067 return NodeDoesNotExistError(
1067 "There is no file nor directory at the given path: "
1068 "There is no file nor directory at the given path: "
1068 "'%s' at commit %s" % (path, self.short_id))
1069 "'%s' at commit %s" % (path, self.short_id))
1069
1070
1070 def _fix_path(self, path):
1071 def _fix_path(self, path):
1071 """
1072 """
1072 Paths are stored without trailing slash so we need to get rid off it if
1073 Paths are stored without trailing slash so we need to get rid off it if
1073 needed.
1074 needed.
1074 """
1075 """
1075 return path.rstrip('/')
1076 return path.rstrip('/')
1076
1077
1077 #
1078 #
1078 # Deprecated API based on changesets
1079 # Deprecated API based on changesets
1079 #
1080 #
1080
1081
1081 @property
1082 @property
1082 def revision(self):
1083 def revision(self):
1083 warnings.warn("Use idx instead", DeprecationWarning)
1084 warnings.warn("Use idx instead", DeprecationWarning)
1084 return self.idx
1085 return self.idx
1085
1086
1086 @revision.setter
1087 @revision.setter
1087 def revision(self, value):
1088 def revision(self, value):
1088 warnings.warn("Use idx instead", DeprecationWarning)
1089 warnings.warn("Use idx instead", DeprecationWarning)
1089 self.idx = value
1090 self.idx = value
1090
1091
1091 def get_file_changeset(self, path):
1092 def get_file_changeset(self, path):
1092 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1093 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1093 return self.get_file_commit(path)
1094 return self.get_file_commit(path)
1094
1095
1095
1096
1096 class BaseChangesetClass(type):
1097 class BaseChangesetClass(type):
1097
1098
1098 def __instancecheck__(self, instance):
1099 def __instancecheck__(self, instance):
1099 return isinstance(instance, BaseCommit)
1100 return isinstance(instance, BaseCommit)
1100
1101
1101
1102
1102 class BaseChangeset(BaseCommit):
1103 class BaseChangeset(BaseCommit):
1103
1104
1104 __metaclass__ = BaseChangesetClass
1105 __metaclass__ = BaseChangesetClass
1105
1106
1106 def __new__(cls, *args, **kwargs):
1107 def __new__(cls, *args, **kwargs):
1107 warnings.warn(
1108 warnings.warn(
1108 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1109 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1109 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1110 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1110
1111
1111
1112
1112 class BaseInMemoryCommit(object):
1113 class BaseInMemoryCommit(object):
1113 """
1114 """
1114 Represents differences between repository's state (most recent head) and
1115 Represents differences between repository's state (most recent head) and
1115 changes made *in place*.
1116 changes made *in place*.
1116
1117
1117 **Attributes**
1118 **Attributes**
1118
1119
1119 ``repository``
1120 ``repository``
1120 repository object for this in-memory-commit
1121 repository object for this in-memory-commit
1121
1122
1122 ``added``
1123 ``added``
1123 list of ``FileNode`` objects marked as *added*
1124 list of ``FileNode`` objects marked as *added*
1124
1125
1125 ``changed``
1126 ``changed``
1126 list of ``FileNode`` objects marked as *changed*
1127 list of ``FileNode`` objects marked as *changed*
1127
1128
1128 ``removed``
1129 ``removed``
1129 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1130 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1130 *removed*
1131 *removed*
1131
1132
1132 ``parents``
1133 ``parents``
1133 list of :class:`BaseCommit` instances representing parents of
1134 list of :class:`BaseCommit` instances representing parents of
1134 in-memory commit. Should always be 2-element sequence.
1135 in-memory commit. Should always be 2-element sequence.
1135
1136
1136 """
1137 """
1137
1138
1138 def __init__(self, repository):
1139 def __init__(self, repository):
1139 self.repository = repository
1140 self.repository = repository
1140 self.added = []
1141 self.added = []
1141 self.changed = []
1142 self.changed = []
1142 self.removed = []
1143 self.removed = []
1143 self.parents = []
1144 self.parents = []
1144
1145
1145 def add(self, *filenodes):
1146 def add(self, *filenodes):
1146 """
1147 """
1147 Marks given ``FileNode`` objects as *to be committed*.
1148 Marks given ``FileNode`` objects as *to be committed*.
1148
1149
1149 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1150 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1150 latest commit
1151 latest commit
1151 :raises ``NodeAlreadyAddedError``: if node with same path is already
1152 :raises ``NodeAlreadyAddedError``: if node with same path is already
1152 marked as *added*
1153 marked as *added*
1153 """
1154 """
1154 # Check if not already marked as *added* first
1155 # Check if not already marked as *added* first
1155 for node in filenodes:
1156 for node in filenodes:
1156 if node.path in (n.path for n in self.added):
1157 if node.path in (n.path for n in self.added):
1157 raise NodeAlreadyAddedError(
1158 raise NodeAlreadyAddedError(
1158 "Such FileNode %s is already marked for addition"
1159 "Such FileNode %s is already marked for addition"
1159 % node.path)
1160 % node.path)
1160 for node in filenodes:
1161 for node in filenodes:
1161 self.added.append(node)
1162 self.added.append(node)
1162
1163
1163 def change(self, *filenodes):
1164 def change(self, *filenodes):
1164 """
1165 """
1165 Marks given ``FileNode`` objects to be *changed* in next commit.
1166 Marks given ``FileNode`` objects to be *changed* in next commit.
1166
1167
1167 :raises ``EmptyRepositoryError``: if there are no commits yet
1168 :raises ``EmptyRepositoryError``: if there are no commits yet
1168 :raises ``NodeAlreadyExistsError``: if node with same path is already
1169 :raises ``NodeAlreadyExistsError``: if node with same path is already
1169 marked to be *changed*
1170 marked to be *changed*
1170 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1171 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1171 marked to be *removed*
1172 marked to be *removed*
1172 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1173 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1173 commit
1174 commit
1174 :raises ``NodeNotChangedError``: if node hasn't really be changed
1175 :raises ``NodeNotChangedError``: if node hasn't really be changed
1175 """
1176 """
1176 for node in filenodes:
1177 for node in filenodes:
1177 if node.path in (n.path for n in self.removed):
1178 if node.path in (n.path for n in self.removed):
1178 raise NodeAlreadyRemovedError(
1179 raise NodeAlreadyRemovedError(
1179 "Node at %s is already marked as removed" % node.path)
1180 "Node at %s is already marked as removed" % node.path)
1180 try:
1181 try:
1181 self.repository.get_commit()
1182 self.repository.get_commit()
1182 except EmptyRepositoryError:
1183 except EmptyRepositoryError:
1183 raise EmptyRepositoryError(
1184 raise EmptyRepositoryError(
1184 "Nothing to change - try to *add* new nodes rather than "
1185 "Nothing to change - try to *add* new nodes rather than "
1185 "changing them")
1186 "changing them")
1186 for node in filenodes:
1187 for node in filenodes:
1187 if node.path in (n.path for n in self.changed):
1188 if node.path in (n.path for n in self.changed):
1188 raise NodeAlreadyChangedError(
1189 raise NodeAlreadyChangedError(
1189 "Node at '%s' is already marked as changed" % node.path)
1190 "Node at '%s' is already marked as changed" % node.path)
1190 self.changed.append(node)
1191 self.changed.append(node)
1191
1192
1192 def remove(self, *filenodes):
1193 def remove(self, *filenodes):
1193 """
1194 """
1194 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1195 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1195 *removed* in next commit.
1196 *removed* in next commit.
1196
1197
1197 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1198 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1198 be *removed*
1199 be *removed*
1199 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1200 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1200 be *changed*
1201 be *changed*
1201 """
1202 """
1202 for node in filenodes:
1203 for node in filenodes:
1203 if node.path in (n.path for n in self.removed):
1204 if node.path in (n.path for n in self.removed):
1204 raise NodeAlreadyRemovedError(
1205 raise NodeAlreadyRemovedError(
1205 "Node is already marked to for removal at %s" % node.path)
1206 "Node is already marked to for removal at %s" % node.path)
1206 if node.path in (n.path for n in self.changed):
1207 if node.path in (n.path for n in self.changed):
1207 raise NodeAlreadyChangedError(
1208 raise NodeAlreadyChangedError(
1208 "Node is already marked to be changed at %s" % node.path)
1209 "Node is already marked to be changed at %s" % node.path)
1209 # We only mark node as *removed* - real removal is done by
1210 # We only mark node as *removed* - real removal is done by
1210 # commit method
1211 # commit method
1211 self.removed.append(node)
1212 self.removed.append(node)
1212
1213
1213 def reset(self):
1214 def reset(self):
1214 """
1215 """
1215 Resets this instance to initial state (cleans ``added``, ``changed``
1216 Resets this instance to initial state (cleans ``added``, ``changed``
1216 and ``removed`` lists).
1217 and ``removed`` lists).
1217 """
1218 """
1218 self.added = []
1219 self.added = []
1219 self.changed = []
1220 self.changed = []
1220 self.removed = []
1221 self.removed = []
1221 self.parents = []
1222 self.parents = []
1222
1223
1223 def get_ipaths(self):
1224 def get_ipaths(self):
1224 """
1225 """
1225 Returns generator of paths from nodes marked as added, changed or
1226 Returns generator of paths from nodes marked as added, changed or
1226 removed.
1227 removed.
1227 """
1228 """
1228 for node in itertools.chain(self.added, self.changed, self.removed):
1229 for node in itertools.chain(self.added, self.changed, self.removed):
1229 yield node.path
1230 yield node.path
1230
1231
1231 def get_paths(self):
1232 def get_paths(self):
1232 """
1233 """
1233 Returns list of paths from nodes marked as added, changed or removed.
1234 Returns list of paths from nodes marked as added, changed or removed.
1234 """
1235 """
1235 return list(self.get_ipaths())
1236 return list(self.get_ipaths())
1236
1237
1237 def check_integrity(self, parents=None):
1238 def check_integrity(self, parents=None):
1238 """
1239 """
1239 Checks in-memory commit's integrity. Also, sets parents if not
1240 Checks in-memory commit's integrity. Also, sets parents if not
1240 already set.
1241 already set.
1241
1242
1242 :raises CommitError: if any error occurs (i.e.
1243 :raises CommitError: if any error occurs (i.e.
1243 ``NodeDoesNotExistError``).
1244 ``NodeDoesNotExistError``).
1244 """
1245 """
1245 if not self.parents:
1246 if not self.parents:
1246 parents = parents or []
1247 parents = parents or []
1247 if len(parents) == 0:
1248 if len(parents) == 0:
1248 try:
1249 try:
1249 parents = [self.repository.get_commit(), None]
1250 parents = [self.repository.get_commit(), None]
1250 except EmptyRepositoryError:
1251 except EmptyRepositoryError:
1251 parents = [None, None]
1252 parents = [None, None]
1252 elif len(parents) == 1:
1253 elif len(parents) == 1:
1253 parents += [None]
1254 parents += [None]
1254 self.parents = parents
1255 self.parents = parents
1255
1256
1256 # Local parents, only if not None
1257 # Local parents, only if not None
1257 parents = [p for p in self.parents if p]
1258 parents = [p for p in self.parents if p]
1258
1259
1259 # Check nodes marked as added
1260 # Check nodes marked as added
1260 for p in parents:
1261 for p in parents:
1261 for node in self.added:
1262 for node in self.added:
1262 try:
1263 try:
1263 p.get_node(node.path)
1264 p.get_node(node.path)
1264 except NodeDoesNotExistError:
1265 except NodeDoesNotExistError:
1265 pass
1266 pass
1266 else:
1267 else:
1267 raise NodeAlreadyExistsError(
1268 raise NodeAlreadyExistsError(
1268 "Node `%s` already exists at %s" % (node.path, p))
1269 "Node `%s` already exists at %s" % (node.path, p))
1269
1270
1270 # Check nodes marked as changed
1271 # Check nodes marked as changed
1271 missing = set(self.changed)
1272 missing = set(self.changed)
1272 not_changed = set(self.changed)
1273 not_changed = set(self.changed)
1273 if self.changed and not parents:
1274 if self.changed and not parents:
1274 raise NodeDoesNotExistError(str(self.changed[0].path))
1275 raise NodeDoesNotExistError(str(self.changed[0].path))
1275 for p in parents:
1276 for p in parents:
1276 for node in self.changed:
1277 for node in self.changed:
1277 try:
1278 try:
1278 old = p.get_node(node.path)
1279 old = p.get_node(node.path)
1279 missing.remove(node)
1280 missing.remove(node)
1280 # if content actually changed, remove node from not_changed
1281 # if content actually changed, remove node from not_changed
1281 if old.content != node.content:
1282 if old.content != node.content:
1282 not_changed.remove(node)
1283 not_changed.remove(node)
1283 except NodeDoesNotExistError:
1284 except NodeDoesNotExistError:
1284 pass
1285 pass
1285 if self.changed and missing:
1286 if self.changed and missing:
1286 raise NodeDoesNotExistError(
1287 raise NodeDoesNotExistError(
1287 "Node `%s` marked as modified but missing in parents: %s"
1288 "Node `%s` marked as modified but missing in parents: %s"
1288 % (node.path, parents))
1289 % (node.path, parents))
1289
1290
1290 if self.changed and not_changed:
1291 if self.changed and not_changed:
1291 raise NodeNotChangedError(
1292 raise NodeNotChangedError(
1292 "Node `%s` wasn't actually changed (parents: %s)"
1293 "Node `%s` wasn't actually changed (parents: %s)"
1293 % (not_changed.pop().path, parents))
1294 % (not_changed.pop().path, parents))
1294
1295
1295 # Check nodes marked as removed
1296 # Check nodes marked as removed
1296 if self.removed and not parents:
1297 if self.removed and not parents:
1297 raise NodeDoesNotExistError(
1298 raise NodeDoesNotExistError(
1298 "Cannot remove node at %s as there "
1299 "Cannot remove node at %s as there "
1299 "were no parents specified" % self.removed[0].path)
1300 "were no parents specified" % self.removed[0].path)
1300 really_removed = set()
1301 really_removed = set()
1301 for p in parents:
1302 for p in parents:
1302 for node in self.removed:
1303 for node in self.removed:
1303 try:
1304 try:
1304 p.get_node(node.path)
1305 p.get_node(node.path)
1305 really_removed.add(node)
1306 really_removed.add(node)
1306 except CommitError:
1307 except CommitError:
1307 pass
1308 pass
1308 not_removed = set(self.removed) - really_removed
1309 not_removed = set(self.removed) - really_removed
1309 if not_removed:
1310 if not_removed:
1310 # TODO: johbo: This code branch does not seem to be covered
1311 # TODO: johbo: This code branch does not seem to be covered
1311 raise NodeDoesNotExistError(
1312 raise NodeDoesNotExistError(
1312 "Cannot remove node at %s from "
1313 "Cannot remove node at %s from "
1313 "following parents: %s" % (not_removed, parents))
1314 "following parents: %s" % (not_removed, parents))
1314
1315
1315 def commit(
1316 def commit(
1316 self, message, author, parents=None, branch=None, date=None,
1317 self, message, author, parents=None, branch=None, date=None,
1317 **kwargs):
1318 **kwargs):
1318 """
1319 """
1319 Performs in-memory commit (doesn't check workdir in any way) and
1320 Performs in-memory commit (doesn't check workdir in any way) and
1320 returns newly created :class:`BaseCommit`. Updates repository's
1321 returns newly created :class:`BaseCommit`. Updates repository's
1321 attribute `commits`.
1322 attribute `commits`.
1322
1323
1323 .. note::
1324 .. note::
1324
1325
1325 While overriding this method each backend's should call
1326 While overriding this method each backend's should call
1326 ``self.check_integrity(parents)`` in the first place.
1327 ``self.check_integrity(parents)`` in the first place.
1327
1328
1328 :param message: message of the commit
1329 :param message: message of the commit
1329 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1330 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1330 :param parents: single parent or sequence of parents from which commit
1331 :param parents: single parent or sequence of parents from which commit
1331 would be derived
1332 would be derived
1332 :param date: ``datetime.datetime`` instance. Defaults to
1333 :param date: ``datetime.datetime`` instance. Defaults to
1333 ``datetime.datetime.now()``.
1334 ``datetime.datetime.now()``.
1334 :param branch: branch name, as string. If none given, default backend's
1335 :param branch: branch name, as string. If none given, default backend's
1335 branch would be used.
1336 branch would be used.
1336
1337
1337 :raises ``CommitError``: if any error occurs while committing
1338 :raises ``CommitError``: if any error occurs while committing
1338 """
1339 """
1339 raise NotImplementedError
1340 raise NotImplementedError
1340
1341
1341
1342
1342 class BaseInMemoryChangesetClass(type):
1343 class BaseInMemoryChangesetClass(type):
1343
1344
1344 def __instancecheck__(self, instance):
1345 def __instancecheck__(self, instance):
1345 return isinstance(instance, BaseInMemoryCommit)
1346 return isinstance(instance, BaseInMemoryCommit)
1346
1347
1347
1348
1348 class BaseInMemoryChangeset(BaseInMemoryCommit):
1349 class BaseInMemoryChangeset(BaseInMemoryCommit):
1349
1350
1350 __metaclass__ = BaseInMemoryChangesetClass
1351 __metaclass__ = BaseInMemoryChangesetClass
1351
1352
1352 def __new__(cls, *args, **kwargs):
1353 def __new__(cls, *args, **kwargs):
1353 warnings.warn(
1354 warnings.warn(
1354 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1355 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1355 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1356 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1356
1357
1357
1358
1358 class EmptyCommit(BaseCommit):
1359 class EmptyCommit(BaseCommit):
1359 """
1360 """
1360 An dummy empty commit. It's possible to pass hash when creating
1361 An dummy empty commit. It's possible to pass hash when creating
1361 an EmptyCommit
1362 an EmptyCommit
1362 """
1363 """
1363
1364
1364 def __init__(
1365 def __init__(
1365 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1366 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1366 message='', author='', date=None):
1367 message='', author='', date=None):
1367 self._empty_commit_id = commit_id
1368 self._empty_commit_id = commit_id
1368 # TODO: johbo: Solve idx parameter, default value does not make
1369 # TODO: johbo: Solve idx parameter, default value does not make
1369 # too much sense
1370 # too much sense
1370 self.idx = idx
1371 self.idx = idx
1371 self.message = message
1372 self.message = message
1372 self.author = author
1373 self.author = author
1373 self.date = date or datetime.datetime.fromtimestamp(0)
1374 self.date = date or datetime.datetime.fromtimestamp(0)
1374 self.repository = repo
1375 self.repository = repo
1375 self.alias = alias
1376 self.alias = alias
1376
1377
1377 @LazyProperty
1378 @LazyProperty
1378 def raw_id(self):
1379 def raw_id(self):
1379 """
1380 """
1380 Returns raw string identifying this commit, useful for web
1381 Returns raw string identifying this commit, useful for web
1381 representation.
1382 representation.
1382 """
1383 """
1383
1384
1384 return self._empty_commit_id
1385 return self._empty_commit_id
1385
1386
1386 @LazyProperty
1387 @LazyProperty
1387 def branch(self):
1388 def branch(self):
1388 if self.alias:
1389 if self.alias:
1389 from rhodecode.lib.vcs.backends import get_backend
1390 from rhodecode.lib.vcs.backends import get_backend
1390 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1391 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1391
1392
1392 @LazyProperty
1393 @LazyProperty
1393 def short_id(self):
1394 def short_id(self):
1394 return self.raw_id[:12]
1395 return self.raw_id[:12]
1395
1396
1396 @LazyProperty
1397 @LazyProperty
1397 def id(self):
1398 def id(self):
1398 return self.raw_id
1399 return self.raw_id
1399
1400
1400 def get_file_commit(self, path):
1401 def get_file_commit(self, path):
1401 return self
1402 return self
1402
1403
1403 def get_file_content(self, path):
1404 def get_file_content(self, path):
1404 return u''
1405 return u''
1405
1406
1406 def get_file_size(self, path):
1407 def get_file_size(self, path):
1407 return 0
1408 return 0
1408
1409
1409
1410
1410 class EmptyChangesetClass(type):
1411 class EmptyChangesetClass(type):
1411
1412
1412 def __instancecheck__(self, instance):
1413 def __instancecheck__(self, instance):
1413 return isinstance(instance, EmptyCommit)
1414 return isinstance(instance, EmptyCommit)
1414
1415
1415
1416
1416 class EmptyChangeset(EmptyCommit):
1417 class EmptyChangeset(EmptyCommit):
1417
1418
1418 __metaclass__ = EmptyChangesetClass
1419 __metaclass__ = EmptyChangesetClass
1419
1420
1420 def __new__(cls, *args, **kwargs):
1421 def __new__(cls, *args, **kwargs):
1421 warnings.warn(
1422 warnings.warn(
1422 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1423 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1423 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1424 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1424
1425
1425 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1426 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1426 alias=None, revision=-1, message='', author='', date=None):
1427 alias=None, revision=-1, message='', author='', date=None):
1427 if requested_revision is not None:
1428 if requested_revision is not None:
1428 warnings.warn(
1429 warnings.warn(
1429 "Parameter requested_revision not supported anymore",
1430 "Parameter requested_revision not supported anymore",
1430 DeprecationWarning)
1431 DeprecationWarning)
1431 super(EmptyChangeset, self).__init__(
1432 super(EmptyChangeset, self).__init__(
1432 commit_id=cs, repo=repo, alias=alias, idx=revision,
1433 commit_id=cs, repo=repo, alias=alias, idx=revision,
1433 message=message, author=author, date=date)
1434 message=message, author=author, date=date)
1434
1435
1435 @property
1436 @property
1436 def revision(self):
1437 def revision(self):
1437 warnings.warn("Use idx instead", DeprecationWarning)
1438 warnings.warn("Use idx instead", DeprecationWarning)
1438 return self.idx
1439 return self.idx
1439
1440
1440 @revision.setter
1441 @revision.setter
1441 def revision(self, value):
1442 def revision(self, value):
1442 warnings.warn("Use idx instead", DeprecationWarning)
1443 warnings.warn("Use idx instead", DeprecationWarning)
1443 self.idx = value
1444 self.idx = value
1444
1445
1445
1446
1446 class EmptyRepository(BaseRepository):
1447 class EmptyRepository(BaseRepository):
1447 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1448 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1448 pass
1449 pass
1449
1450
1450 def get_diff(self, *args, **kwargs):
1451 def get_diff(self, *args, **kwargs):
1451 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1452 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1452 return GitDiff('')
1453 return GitDiff('')
1453
1454
1454
1455
1455 class CollectionGenerator(object):
1456 class CollectionGenerator(object):
1456
1457
1457 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1458 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1458 self.repo = repo
1459 self.repo = repo
1459 self.commit_ids = commit_ids
1460 self.commit_ids = commit_ids
1460 # TODO: (oliver) this isn't currently hooked up
1461 # TODO: (oliver) this isn't currently hooked up
1461 self.collection_size = None
1462 self.collection_size = None
1462 self.pre_load = pre_load
1463 self.pre_load = pre_load
1463
1464
1464 def __len__(self):
1465 def __len__(self):
1465 if self.collection_size is not None:
1466 if self.collection_size is not None:
1466 return self.collection_size
1467 return self.collection_size
1467 return self.commit_ids.__len__()
1468 return self.commit_ids.__len__()
1468
1469
1469 def __iter__(self):
1470 def __iter__(self):
1470 for commit_id in self.commit_ids:
1471 for commit_id in self.commit_ids:
1471 # TODO: johbo: Mercurial passes in commit indices or commit ids
1472 # TODO: johbo: Mercurial passes in commit indices or commit ids
1472 yield self._commit_factory(commit_id)
1473 yield self._commit_factory(commit_id)
1473
1474
1474 def _commit_factory(self, commit_id):
1475 def _commit_factory(self, commit_id):
1475 """
1476 """
1476 Allows backends to override the way commits are generated.
1477 Allows backends to override the way commits are generated.
1477 """
1478 """
1478 return self.repo.get_commit(commit_id=commit_id,
1479 return self.repo.get_commit(commit_id=commit_id,
1479 pre_load=self.pre_load)
1480 pre_load=self.pre_load)
1480
1481
1481 def __getslice__(self, i, j):
1482 def __getslice__(self, i, j):
1482 """
1483 """
1483 Returns an iterator of sliced repository
1484 Returns an iterator of sliced repository
1484 """
1485 """
1485 commit_ids = self.commit_ids[i:j]
1486 commit_ids = self.commit_ids[i:j]
1486 return self.__class__(
1487 return self.__class__(
1487 self.repo, commit_ids, pre_load=self.pre_load)
1488 self.repo, commit_ids, pre_load=self.pre_load)
1488
1489
1489 def __repr__(self):
1490 def __repr__(self):
1490 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1491 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1491
1492
1492
1493
1493 class Config(object):
1494 class Config(object):
1494 """
1495 """
1495 Represents the configuration for a repository.
1496 Represents the configuration for a repository.
1496
1497
1497 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1498 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1498 standard library. It implements only the needed subset.
1499 standard library. It implements only the needed subset.
1499 """
1500 """
1500
1501
1501 def __init__(self):
1502 def __init__(self):
1502 self._values = {}
1503 self._values = {}
1503
1504
1504 def copy(self):
1505 def copy(self):
1505 clone = Config()
1506 clone = Config()
1506 for section, values in self._values.items():
1507 for section, values in self._values.items():
1507 clone._values[section] = values.copy()
1508 clone._values[section] = values.copy()
1508 return clone
1509 return clone
1509
1510
1510 def __repr__(self):
1511 def __repr__(self):
1511 return '<Config(%s sections) at %s>' % (
1512 return '<Config(%s sections) at %s>' % (
1512 len(self._values), hex(id(self)))
1513 len(self._values), hex(id(self)))
1513
1514
1514 def items(self, section):
1515 def items(self, section):
1515 return self._values.get(section, {}).iteritems()
1516 return self._values.get(section, {}).iteritems()
1516
1517
1517 def get(self, section, option):
1518 def get(self, section, option):
1518 return self._values.get(section, {}).get(option)
1519 return self._values.get(section, {}).get(option)
1519
1520
1520 def set(self, section, option, value):
1521 def set(self, section, option, value):
1521 section_values = self._values.setdefault(section, {})
1522 section_values = self._values.setdefault(section, {})
1522 section_values[option] = value
1523 section_values[option] = value
1523
1524
1524 def clear_section(self, section):
1525 def clear_section(self, section):
1525 self._values[section] = {}
1526 self._values[section] = {}
1526
1527
1527 def serialize(self):
1528 def serialize(self):
1528 """
1529 """
1529 Creates a list of three tuples (section, key, value) representing
1530 Creates a list of three tuples (section, key, value) representing
1530 this config object.
1531 this config object.
1531 """
1532 """
1532 items = []
1533 items = []
1533 for section in self._values:
1534 for section in self._values:
1534 for option, value in self._values[section].items():
1535 for option, value in self._values[section].items():
1535 items.append(
1536 items.append(
1536 (safe_str(section), safe_str(option), safe_str(value)))
1537 (safe_str(section), safe_str(option), safe_str(value)))
1537 return items
1538 return items
1538
1539
1539
1540
1540 class Diff(object):
1541 class Diff(object):
1541 """
1542 """
1542 Represents a diff result from a repository backend.
1543 Represents a diff result from a repository backend.
1543
1544
1544 Subclasses have to provide a backend specific value for
1545 Subclasses have to provide a backend specific value for
1545 :attr:`_header_re` and :attr:`_meta_re`.
1546 :attr:`_header_re` and :attr:`_meta_re`.
1546 """
1547 """
1547 _meta_re = None
1548 _meta_re = None
1548 _header_re = None
1549 _header_re = None
1549
1550
1550 def __init__(self, raw_diff):
1551 def __init__(self, raw_diff):
1551 self.raw = raw_diff
1552 self.raw = raw_diff
1552
1553
1553 def chunks(self):
1554 def chunks(self):
1554 """
1555 """
1555 split the diff in chunks of separate --git a/file b/file chunks
1556 split the diff in chunks of separate --git a/file b/file chunks
1556 to make diffs consistent we must prepend with \n, and make sure
1557 to make diffs consistent we must prepend with \n, and make sure
1557 we can detect last chunk as this was also has special rule
1558 we can detect last chunk as this was also has special rule
1558 """
1559 """
1559
1560
1560 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1561 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1561 header = diff_parts[0]
1562 header = diff_parts[0]
1562
1563
1563 if self._meta_re:
1564 if self._meta_re:
1564 match = self._meta_re.match(header)
1565 match = self._meta_re.match(header)
1565
1566
1566 chunks = diff_parts[1:]
1567 chunks = diff_parts[1:]
1567 total_chunks = len(chunks)
1568 total_chunks = len(chunks)
1568
1569
1569 return (
1570 return (
1570 DiffChunk(chunk, self, cur_chunk == total_chunks)
1571 DiffChunk(chunk, self, cur_chunk == total_chunks)
1571 for cur_chunk, chunk in enumerate(chunks, start=1))
1572 for cur_chunk, chunk in enumerate(chunks, start=1))
1572
1573
1573
1574
1574 class DiffChunk(object):
1575 class DiffChunk(object):
1575
1576
1576 def __init__(self, chunk, diff, last_chunk):
1577 def __init__(self, chunk, diff, last_chunk):
1577 self._diff = diff
1578 self._diff = diff
1578
1579
1579 # since we split by \ndiff --git that part is lost from original diff
1580 # since we split by \ndiff --git that part is lost from original diff
1580 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1581 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1581 if not last_chunk:
1582 if not last_chunk:
1582 chunk += '\n'
1583 chunk += '\n'
1583
1584
1584 match = self._diff._header_re.match(chunk)
1585 match = self._diff._header_re.match(chunk)
1585 self.header = match.groupdict()
1586 self.header = match.groupdict()
1586 self.diff = chunk[match.end():]
1587 self.diff = chunk[match.end():]
1587 self.raw = chunk
1588 self.raw = chunk
@@ -1,527 +1,538 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from ConfigParser import ConfigParser
27 from ConfigParser import ConfigParser
28 from itertools import chain
28 from itertools import chain
29 from StringIO import StringIO
29 from StringIO import StringIO
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.utils2 import safe_int
36 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.conf import settings
37 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends import base
38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 from rhodecode.lib.vcs.nodes import (
39 from rhodecode.lib.vcs.nodes import (
40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 RemovedFileNodesGenerator)
42 RemovedFileNodesGenerator, LargeFileNode)
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49 _author_property = 'author'
49 _author_property = 'author'
50 _committer_property = 'committer'
50 _committer_property = 'committer'
51 _date_property = 'commit_time'
51 _date_property = 'commit_time'
52 _date_tz_property = 'commit_timezone'
52 _date_tz_property = 'commit_timezone'
53 _message_property = 'message'
53 _message_property = 'message'
54 _parents_property = 'parents'
54 _parents_property = 'parents'
55
55
56 _filter_pre_load = [
56 _filter_pre_load = [
57 # done through a more complex tree walk on parents
57 # done through a more complex tree walk on parents
58 "affected_files",
58 "affected_files",
59 # based on repository cached property
59 # based on repository cached property
60 "branch",
60 "branch",
61 # done through subprocess not remote call
61 # done through subprocess not remote call
62 "children",
62 "children",
63 # done through a more complex tree walk on parents
63 # done through a more complex tree walk on parents
64 "status",
64 "status",
65 # mercurial specific property not supported here
65 # mercurial specific property not supported here
66 "_file_paths",
66 "_file_paths",
67 ]
67 ]
68
68
69 def __init__(self, repository, raw_id, idx, pre_load=None):
69 def __init__(self, repository, raw_id, idx, pre_load=None):
70 self.repository = repository
70 self.repository = repository
71 self._remote = repository._remote
71 self._remote = repository._remote
72 # TODO: johbo: Tweak of raw_id should not be necessary
72 # TODO: johbo: Tweak of raw_id should not be necessary
73 self.raw_id = safe_str(raw_id)
73 self.raw_id = safe_str(raw_id)
74 self.idx = idx
74 self.idx = idx
75
75
76 self._set_bulk_properties(pre_load)
76 self._set_bulk_properties(pre_load)
77
77
78 # caches
78 # caches
79 self._stat_modes = {} # stat info for paths
79 self._stat_modes = {} # stat info for paths
80 self._paths = {} # path processed with parse_tree
80 self._paths = {} # path processed with parse_tree
81 self.nodes = {}
81 self.nodes = {}
82 self._submodules = None
82 self._submodules = None
83
83
84 def _set_bulk_properties(self, pre_load):
84 def _set_bulk_properties(self, pre_load):
85 if not pre_load:
85 if not pre_load:
86 return
86 return
87 pre_load = [entry for entry in pre_load
87 pre_load = [entry for entry in pre_load
88 if entry not in self._filter_pre_load]
88 if entry not in self._filter_pre_load]
89 if not pre_load:
89 if not pre_load:
90 return
90 return
91
91
92 result = self._remote.bulk_request(self.raw_id, pre_load)
92 result = self._remote.bulk_request(self.raw_id, pre_load)
93 for attr, value in result.items():
93 for attr, value in result.items():
94 if attr in ["author", "message"]:
94 if attr in ["author", "message"]:
95 if value:
95 if value:
96 value = safe_unicode(value)
96 value = safe_unicode(value)
97 elif attr == "date":
97 elif attr == "date":
98 value = utcdate_fromtimestamp(*value)
98 value = utcdate_fromtimestamp(*value)
99 elif attr == "parents":
99 elif attr == "parents":
100 value = self._make_commits(value)
100 value = self._make_commits(value)
101 self.__dict__[attr] = value
101 self.__dict__[attr] = value
102
102
103 @LazyProperty
103 @LazyProperty
104 def _commit(self):
104 def _commit(self):
105 return self._remote[self.raw_id]
105 return self._remote[self.raw_id]
106
106
107 @LazyProperty
107 @LazyProperty
108 def _tree_id(self):
108 def _tree_id(self):
109 return self._remote[self._commit['tree']]['id']
109 return self._remote[self._commit['tree']]['id']
110
110
111 @LazyProperty
111 @LazyProperty
112 def id(self):
112 def id(self):
113 return self.raw_id
113 return self.raw_id
114
114
115 @LazyProperty
115 @LazyProperty
116 def short_id(self):
116 def short_id(self):
117 return self.raw_id[:12]
117 return self.raw_id[:12]
118
118
119 @LazyProperty
119 @LazyProperty
120 def message(self):
120 def message(self):
121 return safe_unicode(
121 return safe_unicode(
122 self._remote.commit_attribute(self.id, self._message_property))
122 self._remote.commit_attribute(self.id, self._message_property))
123
123
124 @LazyProperty
124 @LazyProperty
125 def committer(self):
125 def committer(self):
126 return safe_unicode(
126 return safe_unicode(
127 self._remote.commit_attribute(self.id, self._committer_property))
127 self._remote.commit_attribute(self.id, self._committer_property))
128
128
129 @LazyProperty
129 @LazyProperty
130 def author(self):
130 def author(self):
131 return safe_unicode(
131 return safe_unicode(
132 self._remote.commit_attribute(self.id, self._author_property))
132 self._remote.commit_attribute(self.id, self._author_property))
133
133
134 @LazyProperty
134 @LazyProperty
135 def date(self):
135 def date(self):
136 unix_ts, tz = self._remote.get_object_attrs(
136 unix_ts, tz = self._remote.get_object_attrs(
137 self.raw_id, self._date_property, self._date_tz_property)
137 self.raw_id, self._date_property, self._date_tz_property)
138 return utcdate_fromtimestamp(unix_ts, tz)
138 return utcdate_fromtimestamp(unix_ts, tz)
139
139
140 @LazyProperty
140 @LazyProperty
141 def status(self):
141 def status(self):
142 """
142 """
143 Returns modified, added, removed, deleted files for current commit
143 Returns modified, added, removed, deleted files for current commit
144 """
144 """
145 return self.changed, self.added, self.removed
145 return self.changed, self.added, self.removed
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 tags = [safe_unicode(name) for name,
149 tags = [safe_unicode(name) for name,
150 commit_id in self.repository.tags.iteritems()
150 commit_id in self.repository.tags.iteritems()
151 if commit_id == self.raw_id]
151 if commit_id == self.raw_id]
152 return tags
152 return tags
153
153
154 @LazyProperty
154 @LazyProperty
155 def branch(self):
155 def branch(self):
156 for name, commit_id in self.repository.branches.iteritems():
156 for name, commit_id in self.repository.branches.iteritems():
157 if commit_id == self.raw_id:
157 if commit_id == self.raw_id:
158 return safe_unicode(name)
158 return safe_unicode(name)
159 return None
159 return None
160
160
161 def _get_id_for_path(self, path):
161 def _get_id_for_path(self, path):
162 path = safe_str(path)
162 path = safe_str(path)
163 if path in self._paths:
163 if path in self._paths:
164 return self._paths[path]
164 return self._paths[path]
165
165
166 tree_id = self._tree_id
166 tree_id = self._tree_id
167
167
168 path = path.strip('/')
168 path = path.strip('/')
169 if path == '':
169 if path == '':
170 data = [tree_id, "tree"]
170 data = [tree_id, "tree"]
171 self._paths[''] = data
171 self._paths[''] = data
172 return data
172 return data
173
173
174 parts = path.split('/')
174 parts = path.split('/')
175 dirs, name = parts[:-1], parts[-1]
175 dirs, name = parts[:-1], parts[-1]
176 cur_dir = ''
176 cur_dir = ''
177
177
178 # initially extract things from root dir
178 # initially extract things from root dir
179 tree_items = self._remote.tree_items(tree_id)
179 tree_items = self._remote.tree_items(tree_id)
180 self._process_tree_items(tree_items, cur_dir)
180 self._process_tree_items(tree_items, cur_dir)
181
181
182 for dir in dirs:
182 for dir in dirs:
183 if cur_dir:
183 if cur_dir:
184 cur_dir = '/'.join((cur_dir, dir))
184 cur_dir = '/'.join((cur_dir, dir))
185 else:
185 else:
186 cur_dir = dir
186 cur_dir = dir
187 dir_id = None
187 dir_id = None
188 for item, stat_, id_, type_ in tree_items:
188 for item, stat_, id_, type_ in tree_items:
189 if item == dir:
189 if item == dir:
190 dir_id = id_
190 dir_id = id_
191 break
191 break
192 if dir_id:
192 if dir_id:
193 if type_ != "tree":
193 if type_ != "tree":
194 raise CommitError('%s is not a directory' % cur_dir)
194 raise CommitError('%s is not a directory' % cur_dir)
195 # update tree
195 # update tree
196 tree_items = self._remote.tree_items(dir_id)
196 tree_items = self._remote.tree_items(dir_id)
197 else:
197 else:
198 raise CommitError('%s have not been found' % cur_dir)
198 raise CommitError('%s have not been found' % cur_dir)
199
199
200 # cache all items from the given traversed tree
200 # cache all items from the given traversed tree
201 self._process_tree_items(tree_items, cur_dir)
201 self._process_tree_items(tree_items, cur_dir)
202
202
203 if path not in self._paths:
203 if path not in self._paths:
204 raise self.no_node_at_path(path)
204 raise self.no_node_at_path(path)
205
205
206 return self._paths[path]
206 return self._paths[path]
207
207
208 def _process_tree_items(self, items, cur_dir):
208 def _process_tree_items(self, items, cur_dir):
209 for item, stat_, id_, type_ in items:
209 for item, stat_, id_, type_ in items:
210 if cur_dir:
210 if cur_dir:
211 name = '/'.join((cur_dir, item))
211 name = '/'.join((cur_dir, item))
212 else:
212 else:
213 name = item
213 name = item
214 self._paths[name] = [id_, type_]
214 self._paths[name] = [id_, type_]
215 self._stat_modes[name] = stat_
215 self._stat_modes[name] = stat_
216
216
217 def _get_kind(self, path):
217 def _get_kind(self, path):
218 path_id, type_ = self._get_id_for_path(path)
218 path_id, type_ = self._get_id_for_path(path)
219 if type_ == 'blob':
219 if type_ == 'blob':
220 return NodeKind.FILE
220 return NodeKind.FILE
221 elif type_ == 'tree':
221 elif type_ == 'tree':
222 return NodeKind.DIR
222 return NodeKind.DIR
223 elif type == 'link':
223 elif type == 'link':
224 return NodeKind.SUBMODULE
224 return NodeKind.SUBMODULE
225 return None
225 return None
226
226
227 def _get_filectx(self, path):
227 def _get_filectx(self, path):
228 path = self._fix_path(path)
228 path = self._fix_path(path)
229 if self._get_kind(path) != NodeKind.FILE:
229 if self._get_kind(path) != NodeKind.FILE:
230 raise CommitError(
230 raise CommitError(
231 "File does not exist for commit %s at '%s'" %
231 "File does not exist for commit %s at '%s'" %
232 (self.raw_id, path))
232 (self.raw_id, path))
233 return path
233 return path
234
234
235 def _get_file_nodes(self):
235 def _get_file_nodes(self):
236 return chain(*(t[2] for t in self.walk()))
236 return chain(*(t[2] for t in self.walk()))
237
237
238 @LazyProperty
238 @LazyProperty
239 def parents(self):
239 def parents(self):
240 """
240 """
241 Returns list of parent commits.
241 Returns list of parent commits.
242 """
242 """
243 parent_ids = self._remote.commit_attribute(
243 parent_ids = self._remote.commit_attribute(
244 self.id, self._parents_property)
244 self.id, self._parents_property)
245 return self._make_commits(parent_ids)
245 return self._make_commits(parent_ids)
246
246
247 @LazyProperty
247 @LazyProperty
248 def children(self):
248 def children(self):
249 """
249 """
250 Returns list of child commits.
250 Returns list of child commits.
251 """
251 """
252 rev_filter = settings.GIT_REV_FILTER
252 rev_filter = settings.GIT_REV_FILTER
253 output, __ = self.repository.run_git_command(
253 output, __ = self.repository.run_git_command(
254 ['rev-list', '--children'] + rev_filter)
254 ['rev-list', '--children'] + rev_filter)
255
255
256 child_ids = []
256 child_ids = []
257 pat = re.compile(r'^%s' % self.raw_id)
257 pat = re.compile(r'^%s' % self.raw_id)
258 for l in output.splitlines():
258 for l in output.splitlines():
259 if pat.match(l):
259 if pat.match(l):
260 found_ids = l.split(' ')[1:]
260 found_ids = l.split(' ')[1:]
261 child_ids.extend(found_ids)
261 child_ids.extend(found_ids)
262 return self._make_commits(child_ids)
262 return self._make_commits(child_ids)
263
263
264 def _make_commits(self, commit_ids):
264 def _make_commits(self, commit_ids):
265 return [self.repository.get_commit(commit_id=commit_id)
265 return [self.repository.get_commit(commit_id=commit_id)
266 for commit_id in commit_ids]
266 for commit_id in commit_ids]
267
267
268 def get_file_mode(self, path):
268 def get_file_mode(self, path):
269 """
269 """
270 Returns stat mode of the file at the given `path`.
270 Returns stat mode of the file at the given `path`.
271 """
271 """
272 path = safe_str(path)
272 path = safe_str(path)
273 # ensure path is traversed
273 # ensure path is traversed
274 self._get_id_for_path(path)
274 self._get_id_for_path(path)
275 return self._stat_modes[path]
275 return self._stat_modes[path]
276
276
277 def is_link(self, path):
277 def is_link(self, path):
278 return stat.S_ISLNK(self.get_file_mode(path))
278 return stat.S_ISLNK(self.get_file_mode(path))
279
279
280 def get_file_content(self, path):
280 def get_file_content(self, path):
281 """
281 """
282 Returns content of the file at given `path`.
282 Returns content of the file at given `path`.
283 """
283 """
284 id_, _ = self._get_id_for_path(path)
284 id_, _ = self._get_id_for_path(path)
285 return self._remote.blob_as_pretty_string(id_)
285 return self._remote.blob_as_pretty_string(id_)
286
286
287 def get_file_size(self, path):
287 def get_file_size(self, path):
288 """
288 """
289 Returns size of the file at given `path`.
289 Returns size of the file at given `path`.
290 """
290 """
291 id_, _ = self._get_id_for_path(path)
291 id_, _ = self._get_id_for_path(path)
292 return self._remote.blob_raw_length(id_)
292 return self._remote.blob_raw_length(id_)
293
293
294 def get_file_history(self, path, limit=None, pre_load=None):
294 def get_file_history(self, path, limit=None, pre_load=None):
295 """
295 """
296 Returns history of file as reversed list of `GitCommit` objects for
296 Returns history of file as reversed list of `GitCommit` objects for
297 which file at given `path` has been modified.
297 which file at given `path` has been modified.
298
298
299 TODO: This function now uses an underlying 'git' command which works
299 TODO: This function now uses an underlying 'git' command which works
300 quickly but ideally we should replace with an algorithm.
300 quickly but ideally we should replace with an algorithm.
301 """
301 """
302 self._get_filectx(path)
302 self._get_filectx(path)
303 f_path = safe_str(path)
303 f_path = safe_str(path)
304
304
305 cmd = ['log']
305 cmd = ['log']
306 if limit:
306 if limit:
307 cmd.extend(['-n', str(safe_int(limit, 0))])
307 cmd.extend(['-n', str(safe_int(limit, 0))])
308 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
308 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
309
309
310 output, __ = self.repository.run_git_command(cmd)
310 output, __ = self.repository.run_git_command(cmd)
311 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
311 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
312
312
313 return [
313 return [
314 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
314 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
315 for commit_id in commit_ids]
315 for commit_id in commit_ids]
316
316
317 # TODO: unused for now potential replacement for subprocess
317 # TODO: unused for now potential replacement for subprocess
318 def get_file_history_2(self, path, limit=None, pre_load=None):
318 def get_file_history_2(self, path, limit=None, pre_load=None):
319 """
319 """
320 Returns history of file as reversed list of `Commit` objects for
320 Returns history of file as reversed list of `Commit` objects for
321 which file at given `path` has been modified.
321 which file at given `path` has been modified.
322 """
322 """
323 self._get_filectx(path)
323 self._get_filectx(path)
324 f_path = safe_str(path)
324 f_path = safe_str(path)
325
325
326 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
326 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
327
327
328 return [
328 return [
329 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
329 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
330 for commit_id in commit_ids]
330 for commit_id in commit_ids]
331
331
332 def get_file_annotate(self, path, pre_load=None):
332 def get_file_annotate(self, path, pre_load=None):
333 """
333 """
334 Returns a generator of four element tuples with
334 Returns a generator of four element tuples with
335 lineno, commit_id, commit lazy loader and line
335 lineno, commit_id, commit lazy loader and line
336
336
337 TODO: This function now uses os underlying 'git' command which is
337 TODO: This function now uses os underlying 'git' command which is
338 generally not good. Should be replaced with algorithm iterating
338 generally not good. Should be replaced with algorithm iterating
339 commits.
339 commits.
340 """
340 """
341 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
341 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
342 # -l ==> outputs long shas (and we need all 40 characters)
342 # -l ==> outputs long shas (and we need all 40 characters)
343 # --root ==> doesn't put '^' character for bounderies
343 # --root ==> doesn't put '^' character for bounderies
344 # -r commit_id ==> blames for the given commit
344 # -r commit_id ==> blames for the given commit
345 output, __ = self.repository.run_git_command(cmd)
345 output, __ = self.repository.run_git_command(cmd)
346
346
347 for i, blame_line in enumerate(output.split('\n')[:-1]):
347 for i, blame_line in enumerate(output.split('\n')[:-1]):
348 line_no = i + 1
348 line_no = i + 1
349 commit_id, line = re.split(r' ', blame_line, 1)
349 commit_id, line = re.split(r' ', blame_line, 1)
350 yield (
350 yield (
351 line_no, commit_id,
351 line_no, commit_id,
352 lambda: self.repository.get_commit(commit_id=commit_id,
352 lambda: self.repository.get_commit(commit_id=commit_id,
353 pre_load=pre_load),
353 pre_load=pre_load),
354 line)
354 line)
355
355
356 def get_nodes(self, path):
356 def get_nodes(self, path):
357 if self._get_kind(path) != NodeKind.DIR:
357 if self._get_kind(path) != NodeKind.DIR:
358 raise CommitError(
358 raise CommitError(
359 "Directory does not exist for commit %s at "
359 "Directory does not exist for commit %s at "
360 " '%s'" % (self.raw_id, path))
360 " '%s'" % (self.raw_id, path))
361 path = self._fix_path(path)
361 path = self._fix_path(path)
362 id_, _ = self._get_id_for_path(path)
362 id_, _ = self._get_id_for_path(path)
363 tree_id = self._remote[id_]['id']
363 tree_id = self._remote[id_]['id']
364 dirnodes = []
364 dirnodes = []
365 filenodes = []
365 filenodes = []
366 alias = self.repository.alias
366 alias = self.repository.alias
367 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
367 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
368 if type_ == 'link':
368 if type_ == 'link':
369 url = self._get_submodule_url('/'.join((path, name)))
369 url = self._get_submodule_url('/'.join((path, name)))
370 dirnodes.append(SubModuleNode(
370 dirnodes.append(SubModuleNode(
371 name, url=url, commit=id_, alias=alias))
371 name, url=url, commit=id_, alias=alias))
372 continue
372 continue
373
373
374 if path != '':
374 if path != '':
375 obj_path = '/'.join((path, name))
375 obj_path = '/'.join((path, name))
376 else:
376 else:
377 obj_path = name
377 obj_path = name
378 if obj_path not in self._stat_modes:
378 if obj_path not in self._stat_modes:
379 self._stat_modes[obj_path] = stat_
379 self._stat_modes[obj_path] = stat_
380
380
381 if type_ == 'tree':
381 if type_ == 'tree':
382 dirnodes.append(DirNode(obj_path, commit=self))
382 dirnodes.append(DirNode(obj_path, commit=self))
383 elif type_ == 'blob':
383 elif type_ == 'blob':
384 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
384 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
385 else:
385 else:
386 raise CommitError(
386 raise CommitError(
387 "Requested object should be Tree or Blob, is %s", type_)
387 "Requested object should be Tree or Blob, is %s", type_)
388
388
389 nodes = dirnodes + filenodes
389 nodes = dirnodes + filenodes
390 for node in nodes:
390 for node in nodes:
391 if node.path not in self.nodes:
391 if node.path not in self.nodes:
392 self.nodes[node.path] = node
392 self.nodes[node.path] = node
393 nodes.sort()
393 nodes.sort()
394 return nodes
394 return nodes
395
395
396 def get_node(self, path, pre_load=None):
396 def get_node(self, path, pre_load=None):
397 if isinstance(path, unicode):
397 if isinstance(path, unicode):
398 path = path.encode('utf-8')
398 path = path.encode('utf-8')
399 path = self._fix_path(path)
399 path = self._fix_path(path)
400 if path not in self.nodes:
400 if path not in self.nodes:
401 try:
401 try:
402 id_, type_ = self._get_id_for_path(path)
402 id_, type_ = self._get_id_for_path(path)
403 except CommitError:
403 except CommitError:
404 raise NodeDoesNotExistError(
404 raise NodeDoesNotExistError(
405 "Cannot find one of parents' directories for a given "
405 "Cannot find one of parents' directories for a given "
406 "path: %s" % path)
406 "path: %s" % path)
407
407
408 if type_ == 'link':
408 if type_ == 'link':
409 url = self._get_submodule_url(path)
409 url = self._get_submodule_url(path)
410 node = SubModuleNode(path, url=url, commit=id_,
410 node = SubModuleNode(path, url=url, commit=id_,
411 alias=self.repository.alias)
411 alias=self.repository.alias)
412 elif type_ == 'tree':
412 elif type_ == 'tree':
413 if path == '':
413 if path == '':
414 node = RootNode(commit=self)
414 node = RootNode(commit=self)
415 else:
415 else:
416 node = DirNode(path, commit=self)
416 node = DirNode(path, commit=self)
417 elif type_ == 'blob':
417 elif type_ == 'blob':
418 node = FileNode(path, commit=self, pre_load=pre_load)
418 node = FileNode(path, commit=self, pre_load=pre_load)
419 else:
419 else:
420 raise self.no_node_at_path(path)
420 raise self.no_node_at_path(path)
421
421
422 # cache node
422 # cache node
423 self.nodes[path] = node
423 self.nodes[path] = node
424 return self.nodes[path]
424 return self.nodes[path]
425
425
426 def get_largefile_node(self, path):
427 id_, _ = self._get_id_for_path(path)
428 pointer_spec = self._remote.is_large_file(id_)
429
430 if pointer_spec:
431 # content of that file regular FileNode is the hash of largefile
432 file_id = pointer_spec.get('oid_hash')
433 if self._remote.in_largefiles_store(file_id):
434 lf_path = self._remote.store_path(file_id)
435 return LargeFileNode(lf_path, commit=self, org_path=path)
436
426 @LazyProperty
437 @LazyProperty
427 def affected_files(self):
438 def affected_files(self):
428 """
439 """
429 Gets a fast accessible file changes for given commit
440 Gets a fast accessible file changes for given commit
430 """
441 """
431 added, modified, deleted = self._changes_cache
442 added, modified, deleted = self._changes_cache
432 return list(added.union(modified).union(deleted))
443 return list(added.union(modified).union(deleted))
433
444
434 @LazyProperty
445 @LazyProperty
435 def _changes_cache(self):
446 def _changes_cache(self):
436 added = set()
447 added = set()
437 modified = set()
448 modified = set()
438 deleted = set()
449 deleted = set()
439 _r = self._remote
450 _r = self._remote
440
451
441 parents = self.parents
452 parents = self.parents
442 if not self.parents:
453 if not self.parents:
443 parents = [base.EmptyCommit()]
454 parents = [base.EmptyCommit()]
444 for parent in parents:
455 for parent in parents:
445 if isinstance(parent, base.EmptyCommit):
456 if isinstance(parent, base.EmptyCommit):
446 oid = None
457 oid = None
447 else:
458 else:
448 oid = parent.raw_id
459 oid = parent.raw_id
449 changes = _r.tree_changes(oid, self.raw_id)
460 changes = _r.tree_changes(oid, self.raw_id)
450 for (oldpath, newpath), (_, _), (_, _) in changes:
461 for (oldpath, newpath), (_, _), (_, _) in changes:
451 if newpath and oldpath:
462 if newpath and oldpath:
452 modified.add(newpath)
463 modified.add(newpath)
453 elif newpath and not oldpath:
464 elif newpath and not oldpath:
454 added.add(newpath)
465 added.add(newpath)
455 elif not newpath and oldpath:
466 elif not newpath and oldpath:
456 deleted.add(oldpath)
467 deleted.add(oldpath)
457 return added, modified, deleted
468 return added, modified, deleted
458
469
459 def _get_paths_for_status(self, status):
470 def _get_paths_for_status(self, status):
460 """
471 """
461 Returns sorted list of paths for given ``status``.
472 Returns sorted list of paths for given ``status``.
462
473
463 :param status: one of: *added*, *modified* or *deleted*
474 :param status: one of: *added*, *modified* or *deleted*
464 """
475 """
465 added, modified, deleted = self._changes_cache
476 added, modified, deleted = self._changes_cache
466 return sorted({
477 return sorted({
467 'added': list(added),
478 'added': list(added),
468 'modified': list(modified),
479 'modified': list(modified),
469 'deleted': list(deleted)}[status]
480 'deleted': list(deleted)}[status]
470 )
481 )
471
482
472 @LazyProperty
483 @LazyProperty
473 def added(self):
484 def added(self):
474 """
485 """
475 Returns list of added ``FileNode`` objects.
486 Returns list of added ``FileNode`` objects.
476 """
487 """
477 if not self.parents:
488 if not self.parents:
478 return list(self._get_file_nodes())
489 return list(self._get_file_nodes())
479 return AddedFileNodesGenerator(
490 return AddedFileNodesGenerator(
480 [n for n in self._get_paths_for_status('added')], self)
491 [n for n in self._get_paths_for_status('added')], self)
481
492
482 @LazyProperty
493 @LazyProperty
483 def changed(self):
494 def changed(self):
484 """
495 """
485 Returns list of modified ``FileNode`` objects.
496 Returns list of modified ``FileNode`` objects.
486 """
497 """
487 if not self.parents:
498 if not self.parents:
488 return []
499 return []
489 return ChangedFileNodesGenerator(
500 return ChangedFileNodesGenerator(
490 [n for n in self._get_paths_for_status('modified')], self)
501 [n for n in self._get_paths_for_status('modified')], self)
491
502
492 @LazyProperty
503 @LazyProperty
493 def removed(self):
504 def removed(self):
494 """
505 """
495 Returns list of removed ``FileNode`` objects.
506 Returns list of removed ``FileNode`` objects.
496 """
507 """
497 if not self.parents:
508 if not self.parents:
498 return []
509 return []
499 return RemovedFileNodesGenerator(
510 return RemovedFileNodesGenerator(
500 [n for n in self._get_paths_for_status('deleted')], self)
511 [n for n in self._get_paths_for_status('deleted')], self)
501
512
502 def _get_submodule_url(self, submodule_path):
513 def _get_submodule_url(self, submodule_path):
503 git_modules_path = '.gitmodules'
514 git_modules_path = '.gitmodules'
504
515
505 if self._submodules is None:
516 if self._submodules is None:
506 self._submodules = {}
517 self._submodules = {}
507
518
508 try:
519 try:
509 submodules_node = self.get_node(git_modules_path)
520 submodules_node = self.get_node(git_modules_path)
510 except NodeDoesNotExistError:
521 except NodeDoesNotExistError:
511 return None
522 return None
512
523
513 content = submodules_node.content
524 content = submodules_node.content
514
525
515 # ConfigParser fails if there are whitespaces
526 # ConfigParser fails if there are whitespaces
516 content = '\n'.join(l.strip() for l in content.split('\n'))
527 content = '\n'.join(l.strip() for l in content.split('\n'))
517
528
518 parser = ConfigParser()
529 parser = ConfigParser()
519 parser.readfp(StringIO(content))
530 parser.readfp(StringIO(content))
520
531
521 for section in parser.sections():
532 for section in parser.sections():
522 path = parser.get(section, 'path')
533 path = parser.get(section, 'path')
523 url = parser.get(section, 'url')
534 url = parser.get(section, 'url')
524 if path and url:
535 if path and url:
525 self._submodules[path.strip('/')] = url
536 self._submodules[path.strip('/')] = url
526
537
527 return self._submodules.get(submodule_path.strip('/'))
538 return self._submodules.get(submodule_path.strip('/'))
@@ -1,362 +1,362 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.utils import safe_str, safe_unicode
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
34 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
38 LargeFileNode, LARGEFILE_PREFIX)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
40
41
41
42 class MercurialCommit(base.BaseCommit):
42 class MercurialCommit(base.BaseCommit):
43 """
43 """
44 Represents state of the repository at the single commit.
44 Represents state of the repository at the single commit.
45 """
45 """
46
46
47 _filter_pre_load = [
47 _filter_pre_load = [
48 # git specific property not supported here
48 # git specific property not supported here
49 "_commit",
49 "_commit",
50 ]
50 ]
51
51
52 def __init__(self, repository, raw_id, idx, pre_load=None):
52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
53 raw_id = safe_str(raw_id)
54
54
55 self.repository = repository
55 self.repository = repository
56 self._remote = repository._remote
56 self._remote = repository._remote
57
57
58 self.raw_id = raw_id
58 self.raw_id = raw_id
59 self.idx = repository._sanitize_commit_idx(idx)
59 self.idx = repository._sanitize_commit_idx(idx)
60
60
61 self._set_bulk_properties(pre_load)
61 self._set_bulk_properties(pre_load)
62
62
63 # caches
63 # caches
64 self.nodes = {}
64 self.nodes = {}
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.idx, pre_load)
74 result = self._remote.bulk_request(self.idx, pre_load)
75 for attr, value in result.items():
75 for attr, value in result.items():
76 if attr in ["author", "branch", "message"]:
76 if attr in ["author", "branch", "message"]:
77 value = safe_unicode(value)
77 value = safe_unicode(value)
78 elif attr == "affected_files":
78 elif attr == "affected_files":
79 value = map(safe_unicode, value)
79 value = map(safe_unicode, value)
80 elif attr == "date":
80 elif attr == "date":
81 value = utcdate_fromtimestamp(*value)
81 value = utcdate_fromtimestamp(*value)
82 elif attr in ["children", "parents"]:
82 elif attr in ["children", "parents"]:
83 value = self._make_commits(value)
83 value = self._make_commits(value)
84 self.__dict__[attr] = value
84 self.__dict__[attr] = value
85
85
86 @LazyProperty
86 @LazyProperty
87 def tags(self):
87 def tags(self):
88 tags = [name for name, commit_id in self.repository.tags.iteritems()
88 tags = [name for name, commit_id in self.repository.tags.iteritems()
89 if commit_id == self.raw_id]
89 if commit_id == self.raw_id]
90 return tags
90 return tags
91
91
92 @LazyProperty
92 @LazyProperty
93 def branch(self):
93 def branch(self):
94 return safe_unicode(self._remote.ctx_branch(self.idx))
94 return safe_unicode(self._remote.ctx_branch(self.idx))
95
95
96 @LazyProperty
96 @LazyProperty
97 def bookmarks(self):
97 def bookmarks(self):
98 bookmarks = [
98 bookmarks = [
99 name for name, commit_id in self.repository.bookmarks.iteritems()
99 name for name, commit_id in self.repository.bookmarks.iteritems()
100 if commit_id == self.raw_id]
100 if commit_id == self.raw_id]
101 return bookmarks
101 return bookmarks
102
102
103 @LazyProperty
103 @LazyProperty
104 def message(self):
104 def message(self):
105 return safe_unicode(self._remote.ctx_description(self.idx))
105 return safe_unicode(self._remote.ctx_description(self.idx))
106
106
107 @LazyProperty
107 @LazyProperty
108 def committer(self):
108 def committer(self):
109 return safe_unicode(self.author)
109 return safe_unicode(self.author)
110
110
111 @LazyProperty
111 @LazyProperty
112 def author(self):
112 def author(self):
113 return safe_unicode(self._remote.ctx_user(self.idx))
113 return safe_unicode(self._remote.ctx_user(self.idx))
114
114
115 @LazyProperty
115 @LazyProperty
116 def date(self):
116 def date(self):
117 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
117 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
118
118
119 @LazyProperty
119 @LazyProperty
120 def status(self):
120 def status(self):
121 """
121 """
122 Returns modified, added, removed, deleted files for current commit
122 Returns modified, added, removed, deleted files for current commit
123 """
123 """
124 return self._remote.ctx_status(self.idx)
124 return self._remote.ctx_status(self.idx)
125
125
126 @LazyProperty
126 @LazyProperty
127 def _file_paths(self):
127 def _file_paths(self):
128 return self._remote.ctx_list(self.idx)
128 return self._remote.ctx_list(self.idx)
129
129
130 @LazyProperty
130 @LazyProperty
131 def _dir_paths(self):
131 def _dir_paths(self):
132 p = list(set(get_dirs_for_path(*self._file_paths)))
132 p = list(set(get_dirs_for_path(*self._file_paths)))
133 p.insert(0, '')
133 p.insert(0, '')
134 return p
134 return p
135
135
136 @LazyProperty
136 @LazyProperty
137 def _paths(self):
137 def _paths(self):
138 return self._dir_paths + self._file_paths
138 return self._dir_paths + self._file_paths
139
139
140 @LazyProperty
140 @LazyProperty
141 def id(self):
141 def id(self):
142 if self.last:
142 if self.last:
143 return u'tip'
143 return u'tip'
144 return self.short_id
144 return self.short_id
145
145
146 @LazyProperty
146 @LazyProperty
147 def short_id(self):
147 def short_id(self):
148 return self.raw_id[:12]
148 return self.raw_id[:12]
149
149
150 def _make_commits(self, indexes):
150 def _make_commits(self, indexes):
151 return [self.repository.get_commit(commit_idx=idx)
151 return [self.repository.get_commit(commit_idx=idx)
152 for idx in indexes if idx >= 0]
152 for idx in indexes if idx >= 0]
153
153
154 @LazyProperty
154 @LazyProperty
155 def parents(self):
155 def parents(self):
156 """
156 """
157 Returns list of parent commits.
157 Returns list of parent commits.
158 """
158 """
159 parents = self._remote.ctx_parents(self.idx)
159 parents = self._remote.ctx_parents(self.idx)
160 return self._make_commits(parents)
160 return self._make_commits(parents)
161
161
162 @LazyProperty
162 @LazyProperty
163 def children(self):
163 def children(self):
164 """
164 """
165 Returns list of child commits.
165 Returns list of child commits.
166 """
166 """
167 children = self._remote.ctx_children(self.idx)
167 children = self._remote.ctx_children(self.idx)
168 return self._make_commits(children)
168 return self._make_commits(children)
169
169
170 def diff(self, ignore_whitespace=True, context=3):
170 def diff(self, ignore_whitespace=True, context=3):
171 result = self._remote.ctx_diff(
171 result = self._remote.ctx_diff(
172 self.idx,
172 self.idx,
173 git=True, ignore_whitespace=ignore_whitespace, context=context)
173 git=True, ignore_whitespace=ignore_whitespace, context=context)
174 diff = ''.join(result)
174 diff = ''.join(result)
175 return MercurialDiff(diff)
175 return MercurialDiff(diff)
176
176
177 def _fix_path(self, path):
177 def _fix_path(self, path):
178 """
178 """
179 Mercurial keeps filenodes as str so we need to encode from unicode
179 Mercurial keeps filenodes as str so we need to encode from unicode
180 to str.
180 to str.
181 """
181 """
182 return safe_str(super(MercurialCommit, self)._fix_path(path))
182 return safe_str(super(MercurialCommit, self)._fix_path(path))
183
183
184 def _get_kind(self, path):
184 def _get_kind(self, path):
185 path = self._fix_path(path)
185 path = self._fix_path(path)
186 if path in self._file_paths:
186 if path in self._file_paths:
187 return NodeKind.FILE
187 return NodeKind.FILE
188 elif path in self._dir_paths:
188 elif path in self._dir_paths:
189 return NodeKind.DIR
189 return NodeKind.DIR
190 else:
190 else:
191 raise CommitError(
191 raise CommitError(
192 "Node does not exist at the given path '%s'" % (path, ))
192 "Node does not exist at the given path '%s'" % (path, ))
193
193
194 def _get_filectx(self, path):
194 def _get_filectx(self, path):
195 path = self._fix_path(path)
195 path = self._fix_path(path)
196 if self._get_kind(path) != NodeKind.FILE:
196 if self._get_kind(path) != NodeKind.FILE:
197 raise CommitError(
197 raise CommitError(
198 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
198 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
199 return path
199 return path
200
200
201 def get_file_mode(self, path):
201 def get_file_mode(self, path):
202 """
202 """
203 Returns stat mode of the file at the given ``path``.
203 Returns stat mode of the file at the given ``path``.
204 """
204 """
205 path = self._get_filectx(path)
205 path = self._get_filectx(path)
206 if 'x' in self._remote.fctx_flags(self.idx, path):
206 if 'x' in self._remote.fctx_flags(self.idx, path):
207 return base.FILEMODE_EXECUTABLE
207 return base.FILEMODE_EXECUTABLE
208 else:
208 else:
209 return base.FILEMODE_DEFAULT
209 return base.FILEMODE_DEFAULT
210
210
211 def is_link(self, path):
211 def is_link(self, path):
212 path = self._get_filectx(path)
212 path = self._get_filectx(path)
213 return 'l' in self._remote.fctx_flags(self.idx, path)
213 return 'l' in self._remote.fctx_flags(self.idx, path)
214
214
215 def get_file_content(self, path):
215 def get_file_content(self, path):
216 """
216 """
217 Returns content of the file at given ``path``.
217 Returns content of the file at given ``path``.
218 """
218 """
219 path = self._get_filectx(path)
219 path = self._get_filectx(path)
220 return self._remote.fctx_data(self.idx, path)
220 return self._remote.fctx_data(self.idx, path)
221
221
222 def get_file_size(self, path):
222 def get_file_size(self, path):
223 """
223 """
224 Returns size of the file at given ``path``.
224 Returns size of the file at given ``path``.
225 """
225 """
226 path = self._get_filectx(path)
226 path = self._get_filectx(path)
227 return self._remote.fctx_size(self.idx, path)
227 return self._remote.fctx_size(self.idx, path)
228
228
229 def get_file_history(self, path, limit=None, pre_load=None):
229 def get_file_history(self, path, limit=None, pre_load=None):
230 """
230 """
231 Returns history of file as reversed list of `MercurialCommit` objects
231 Returns history of file as reversed list of `MercurialCommit` objects
232 for which file at given ``path`` has been modified.
232 for which file at given ``path`` has been modified.
233 """
233 """
234 path = self._get_filectx(path)
234 path = self._get_filectx(path)
235 hist = self._remote.file_history(self.idx, path, limit)
235 hist = self._remote.file_history(self.idx, path, limit)
236 return [
236 return [
237 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
237 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
238 for commit_id in hist]
238 for commit_id in hist]
239
239
240 def get_file_annotate(self, path, pre_load=None):
240 def get_file_annotate(self, path, pre_load=None):
241 """
241 """
242 Returns a generator of four element tuples with
242 Returns a generator of four element tuples with
243 lineno, commit_id, commit lazy loader and line
243 lineno, commit_id, commit lazy loader and line
244 """
244 """
245 result = self._remote.fctx_annotate(self.idx, path)
245 result = self._remote.fctx_annotate(self.idx, path)
246
246
247 for ln_no, commit_id, content in result:
247 for ln_no, commit_id, content in result:
248 yield (
248 yield (
249 ln_no, commit_id,
249 ln_no, commit_id,
250 lambda: self.repository.get_commit(commit_id=commit_id,
250 lambda: self.repository.get_commit(commit_id=commit_id,
251 pre_load=pre_load),
251 pre_load=pre_load),
252 content)
252 content)
253
253
254 def get_nodes(self, path):
254 def get_nodes(self, path):
255 """
255 """
256 Returns combined ``DirNode`` and ``FileNode`` objects list representing
256 Returns combined ``DirNode`` and ``FileNode`` objects list representing
257 state of commit at the given ``path``. If node at the given ``path``
257 state of commit at the given ``path``. If node at the given ``path``
258 is not instance of ``DirNode``, CommitError would be raised.
258 is not instance of ``DirNode``, CommitError would be raised.
259 """
259 """
260
260
261 if self._get_kind(path) != NodeKind.DIR:
261 if self._get_kind(path) != NodeKind.DIR:
262 raise CommitError(
262 raise CommitError(
263 "Directory does not exist for idx %s at '%s'" %
263 "Directory does not exist for idx %s at '%s'" %
264 (self.idx, path))
264 (self.idx, path))
265 path = self._fix_path(path)
265 path = self._fix_path(path)
266
266
267 filenodes = [
267 filenodes = [
268 FileNode(f, commit=self) for f in self._file_paths
268 FileNode(f, commit=self) for f in self._file_paths
269 if os.path.dirname(f) == path]
269 if os.path.dirname(f) == path]
270 # TODO: johbo: Check if this can be done in a more obvious way
270 # TODO: johbo: Check if this can be done in a more obvious way
271 dirs = path == '' and '' or [
271 dirs = path == '' and '' or [
272 d for d in self._dir_paths
272 d for d in self._dir_paths
273 if d and vcspath.dirname(d) == path]
273 if d and vcspath.dirname(d) == path]
274 dirnodes = [
274 dirnodes = [
275 DirNode(d, commit=self) for d in dirs
275 DirNode(d, commit=self) for d in dirs
276 if os.path.dirname(d) == path]
276 if os.path.dirname(d) == path]
277
277
278 alias = self.repository.alias
278 alias = self.repository.alias
279 for k, vals in self._submodules.iteritems():
279 for k, vals in self._submodules.iteritems():
280 loc = vals[0]
280 loc = vals[0]
281 commit = vals[1]
281 commit = vals[1]
282 dirnodes.append(
282 dirnodes.append(
283 SubModuleNode(k, url=loc, commit=commit, alias=alias))
283 SubModuleNode(k, url=loc, commit=commit, alias=alias))
284 nodes = dirnodes + filenodes
284 nodes = dirnodes + filenodes
285 # cache nodes
285 # cache nodes
286 for node in nodes:
286 for node in nodes:
287 self.nodes[node.path] = node
287 self.nodes[node.path] = node
288 nodes.sort()
288 nodes.sort()
289
289
290 return nodes
290 return nodes
291
291
292 def get_node(self, path, pre_load=None):
292 def get_node(self, path, pre_load=None):
293 """
293 """
294 Returns `Node` object from the given `path`. If there is no node at
294 Returns `Node` object from the given `path`. If there is no node at
295 the given `path`, `NodeDoesNotExistError` would be raised.
295 the given `path`, `NodeDoesNotExistError` would be raised.
296 """
296 """
297 path = self._fix_path(path)
297 path = self._fix_path(path)
298
298
299 if path not in self.nodes:
299 if path not in self.nodes:
300 if path in self._file_paths:
300 if path in self._file_paths:
301 node = FileNode(path, commit=self, pre_load=pre_load)
301 node = FileNode(path, commit=self, pre_load=pre_load)
302 elif path in self._dir_paths:
302 elif path in self._dir_paths:
303 if path == '':
303 if path == '':
304 node = RootNode(commit=self)
304 node = RootNode(commit=self)
305 else:
305 else:
306 node = DirNode(path, commit=self)
306 node = DirNode(path, commit=self)
307 else:
307 else:
308 raise self.no_node_at_path(path)
308 raise self.no_node_at_path(path)
309
309
310 # cache node
310 # cache node
311 self.nodes[path] = node
311 self.nodes[path] = node
312 return self.nodes[path]
312 return self.nodes[path]
313
313
314 def get_largefile_node(self, path):
314 def get_largefile_node(self, path):
315 path = os.path.join(LARGEFILE_PREFIX, path)
316
315
317 if self._remote.is_large_file(path):
316 if self._remote.is_large_file(path):
318 # content of that file regular FileNode is the hash of largefile
317 # content of that file regular FileNode is the hash of largefile
319 file_id = self.get_file_content(path).strip()
318 file_id = self.get_file_content(path).strip()
320 if self._remote.in_store(file_id):
319
321 path = self._remote.store_path(file_id)
320 if self._remote.in_largefiles_store(file_id):
322 return LargeFileNode(path, commit=self)
321 lf_path = self._remote.store_path(file_id)
322 return LargeFileNode(lf_path, commit=self, org_path=path)
323 elif self._remote.in_user_cache(file_id):
323 elif self._remote.in_user_cache(file_id):
324 path = self._remote.store_path(file_id)
324 lf_path = self._remote.store_path(file_id)
325 self._remote.link(file_id, path)
325 self._remote.link(file_id, path)
326 return LargeFileNode(path, commit=self)
326 return LargeFileNode(lf_path, commit=self, org_path=path)
327
327
328 @LazyProperty
328 @LazyProperty
329 def _submodules(self):
329 def _submodules(self):
330 """
330 """
331 Returns a dictionary with submodule information from substate file
331 Returns a dictionary with submodule information from substate file
332 of hg repository.
332 of hg repository.
333 """
333 """
334 return self._remote.ctx_substate(self.idx)
334 return self._remote.ctx_substate(self.idx)
335
335
336 @LazyProperty
336 @LazyProperty
337 def affected_files(self):
337 def affected_files(self):
338 """
338 """
339 Gets a fast accessible file changes for given commit
339 Gets a fast accessible file changes for given commit
340 """
340 """
341 return self._remote.ctx_files(self.idx)
341 return self._remote.ctx_files(self.idx)
342
342
343 @property
343 @property
344 def added(self):
344 def added(self):
345 """
345 """
346 Returns list of added ``FileNode`` objects.
346 Returns list of added ``FileNode`` objects.
347 """
347 """
348 return AddedFileNodesGenerator([n for n in self.status[1]], self)
348 return AddedFileNodesGenerator([n for n in self.status[1]], self)
349
349
350 @property
350 @property
351 def changed(self):
351 def changed(self):
352 """
352 """
353 Returns list of modified ``FileNode`` objects.
353 Returns list of modified ``FileNode`` objects.
354 """
354 """
355 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
355 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
356
356
357 @property
357 @property
358 def removed(self):
358 def removed(self):
359 """
359 """
360 Returns list of removed ``FileNode`` objects.
360 Returns list of removed ``FileNode`` objects.
361 """
361 """
362 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
362 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,779 +1,800 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2017 RhodeCode GmbH
3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module holding everything related to vcs nodes, with vcs2 architecture.
22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 """
23 """
24
24
25
25 import os
26 import stat
26 import stat
27
27
28 from zope.cachedescriptors.property import Lazy as LazyProperty
28 from zope.cachedescriptors.property import Lazy as LazyProperty
29
29
30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 from rhodecode.lib.utils import safe_unicode, safe_str
31 from rhodecode.lib.utils import safe_unicode, safe_str
32 from rhodecode.lib.utils2 import md5
32 from rhodecode.lib.utils2 import md5
33 from rhodecode.lib.vcs import path as vcspath
33 from rhodecode.lib.vcs import path as vcspath
34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37
37
38 LARGEFILE_PREFIX = '.hglf'
38 LARGEFILE_PREFIX = '.hglf'
39
39
40
40
41 class NodeKind:
41 class NodeKind:
42 SUBMODULE = -1
42 SUBMODULE = -1
43 DIR = 1
43 DIR = 1
44 FILE = 2
44 FILE = 2
45 LARGEFILE = 3
45 LARGEFILE = 3
46
46
47
47
48 class NodeState:
48 class NodeState:
49 ADDED = u'added'
49 ADDED = u'added'
50 CHANGED = u'changed'
50 CHANGED = u'changed'
51 NOT_CHANGED = u'not changed'
51 NOT_CHANGED = u'not changed'
52 REMOVED = u'removed'
52 REMOVED = u'removed'
53
53
54
54
55 class NodeGeneratorBase(object):
55 class NodeGeneratorBase(object):
56 """
56 """
57 Base class for removed added and changed filenodes, it's a lazy generator
57 Base class for removed added and changed filenodes, it's a lazy generator
58 class that will create filenodes only on iteration or call
58 class that will create filenodes only on iteration or call
59
59
60 The len method doesn't need to create filenodes at all
60 The len method doesn't need to create filenodes at all
61 """
61 """
62
62
63 def __init__(self, current_paths, cs):
63 def __init__(self, current_paths, cs):
64 self.cs = cs
64 self.cs = cs
65 self.current_paths = current_paths
65 self.current_paths = current_paths
66
66
67 def __call__(self):
67 def __call__(self):
68 return [n for n in self]
68 return [n for n in self]
69
69
70 def __getslice__(self, i, j):
70 def __getslice__(self, i, j):
71 for p in self.current_paths[i:j]:
71 for p in self.current_paths[i:j]:
72 yield self.cs.get_node(p)
72 yield self.cs.get_node(p)
73
73
74 def __len__(self):
74 def __len__(self):
75 return len(self.current_paths)
75 return len(self.current_paths)
76
76
77 def __iter__(self):
77 def __iter__(self):
78 for p in self.current_paths:
78 for p in self.current_paths:
79 yield self.cs.get_node(p)
79 yield self.cs.get_node(p)
80
80
81
81
82 class AddedFileNodesGenerator(NodeGeneratorBase):
82 class AddedFileNodesGenerator(NodeGeneratorBase):
83 """
83 """
84 Class holding added files for current commit
84 Class holding added files for current commit
85 """
85 """
86
86
87
87
88 class ChangedFileNodesGenerator(NodeGeneratorBase):
88 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 """
89 """
90 Class holding changed files for current commit
90 Class holding changed files for current commit
91 """
91 """
92
92
93
93
94 class RemovedFileNodesGenerator(NodeGeneratorBase):
94 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 """
95 """
96 Class holding removed files for current commit
96 Class holding removed files for current commit
97 """
97 """
98 def __iter__(self):
98 def __iter__(self):
99 for p in self.current_paths:
99 for p in self.current_paths:
100 yield RemovedFileNode(path=p)
100 yield RemovedFileNode(path=p)
101
101
102 def __getslice__(self, i, j):
102 def __getslice__(self, i, j):
103 for p in self.current_paths[i:j]:
103 for p in self.current_paths[i:j]:
104 yield RemovedFileNode(path=p)
104 yield RemovedFileNode(path=p)
105
105
106
106
107 class Node(object):
107 class Node(object):
108 """
108 """
109 Simplest class representing file or directory on repository. SCM backends
109 Simplest class representing file or directory on repository. SCM backends
110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 directly.
111 directly.
112
112
113 Node's ``path`` cannot start with slash as we operate on *relative* paths
113 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 only. Moreover, every single node is identified by the ``path`` attribute,
114 only. Moreover, every single node is identified by the ``path`` attribute,
115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 """
116 """
117
117
118 commit = None
118 commit = None
119
119
120 def __init__(self, path, kind):
120 def __init__(self, path, kind):
121 self._validate_path(path) # can throw exception if path is invalid
121 self._validate_path(path) # can throw exception if path is invalid
122 self.path = safe_str(path.rstrip('/')) # we store paths as str
122 self.path = safe_str(path.rstrip('/')) # we store paths as str
123 if path == '' and kind != NodeKind.DIR:
123 if path == '' and kind != NodeKind.DIR:
124 raise NodeError("Only DirNode and its subclasses may be "
124 raise NodeError("Only DirNode and its subclasses may be "
125 "initialized with empty path")
125 "initialized with empty path")
126 self.kind = kind
126 self.kind = kind
127
127
128 if self.is_root() and not self.is_dir():
128 if self.is_root() and not self.is_dir():
129 raise NodeError("Root node cannot be FILE kind")
129 raise NodeError("Root node cannot be FILE kind")
130
130
131 def _validate_path(self, path):
131 def _validate_path(self, path):
132 if path.startswith('/'):
132 if path.startswith('/'):
133 raise NodeError(
133 raise NodeError(
134 "Cannot initialize Node objects with slash at "
134 "Cannot initialize Node objects with slash at "
135 "the beginning as only relative paths are supported. "
135 "the beginning as only relative paths are supported. "
136 "Got %s" % (path,))
136 "Got %s" % (path,))
137
137
138 @LazyProperty
138 @LazyProperty
139 def parent(self):
139 def parent(self):
140 parent_path = self.get_parent_path()
140 parent_path = self.get_parent_path()
141 if parent_path:
141 if parent_path:
142 if self.commit:
142 if self.commit:
143 return self.commit.get_node(parent_path)
143 return self.commit.get_node(parent_path)
144 return DirNode(parent_path)
144 return DirNode(parent_path)
145 return None
145 return None
146
146
147 @LazyProperty
147 @LazyProperty
148 def unicode_path(self):
148 def unicode_path(self):
149 return safe_unicode(self.path)
149 return safe_unicode(self.path)
150
150
151 @LazyProperty
151 @LazyProperty
152 def dir_path(self):
152 def dir_path(self):
153 """
153 """
154 Returns name of the directory from full path of this vcs node. Empty
154 Returns name of the directory from full path of this vcs node. Empty
155 string is returned if there's no directory in the path
155 string is returned if there's no directory in the path
156 """
156 """
157 _parts = self.path.rstrip('/').rsplit('/', 1)
157 _parts = self.path.rstrip('/').rsplit('/', 1)
158 if len(_parts) == 2:
158 if len(_parts) == 2:
159 return safe_unicode(_parts[0])
159 return safe_unicode(_parts[0])
160 return u''
160 return u''
161
161
162 @LazyProperty
162 @LazyProperty
163 def name(self):
163 def name(self):
164 """
164 """
165 Returns name of the node so if its path
165 Returns name of the node so if its path
166 then only last part is returned.
166 then only last part is returned.
167 """
167 """
168 return safe_unicode(self.path.rstrip('/').split('/')[-1])
168 return safe_unicode(self.path.rstrip('/').split('/')[-1])
169
169
170 @property
170 @property
171 def kind(self):
171 def kind(self):
172 return self._kind
172 return self._kind
173
173
174 @kind.setter
174 @kind.setter
175 def kind(self, kind):
175 def kind(self, kind):
176 if hasattr(self, '_kind'):
176 if hasattr(self, '_kind'):
177 raise NodeError("Cannot change node's kind")
177 raise NodeError("Cannot change node's kind")
178 else:
178 else:
179 self._kind = kind
179 self._kind = kind
180 # Post setter check (path's trailing slash)
180 # Post setter check (path's trailing slash)
181 if self.path.endswith('/'):
181 if self.path.endswith('/'):
182 raise NodeError("Node's path cannot end with slash")
182 raise NodeError("Node's path cannot end with slash")
183
183
184 def __cmp__(self, other):
184 def __cmp__(self, other):
185 """
185 """
186 Comparator using name of the node, needed for quick list sorting.
186 Comparator using name of the node, needed for quick list sorting.
187 """
187 """
188 kind_cmp = cmp(self.kind, other.kind)
188 kind_cmp = cmp(self.kind, other.kind)
189 if kind_cmp:
189 if kind_cmp:
190 return kind_cmp
190 return kind_cmp
191 return cmp(self.name, other.name)
191 return cmp(self.name, other.name)
192
192
193 def __eq__(self, other):
193 def __eq__(self, other):
194 for attr in ['name', 'path', 'kind']:
194 for attr in ['name', 'path', 'kind']:
195 if getattr(self, attr) != getattr(other, attr):
195 if getattr(self, attr) != getattr(other, attr):
196 return False
196 return False
197 if self.is_file():
197 if self.is_file():
198 if self.content != other.content:
198 if self.content != other.content:
199 return False
199 return False
200 else:
200 else:
201 # For DirNode's check without entering each dir
201 # For DirNode's check without entering each dir
202 self_nodes_paths = list(sorted(n.path for n in self.nodes))
202 self_nodes_paths = list(sorted(n.path for n in self.nodes))
203 other_nodes_paths = list(sorted(n.path for n in self.nodes))
203 other_nodes_paths = list(sorted(n.path for n in self.nodes))
204 if self_nodes_paths != other_nodes_paths:
204 if self_nodes_paths != other_nodes_paths:
205 return False
205 return False
206 return True
206 return True
207
207
208 def __ne__(self, other):
208 def __ne__(self, other):
209 return not self.__eq__(other)
209 return not self.__eq__(other)
210
210
211 def __repr__(self):
211 def __repr__(self):
212 return '<%s %r>' % (self.__class__.__name__, self.path)
212 return '<%s %r>' % (self.__class__.__name__, self.path)
213
213
214 def __str__(self):
214 def __str__(self):
215 return self.__repr__()
215 return self.__repr__()
216
216
217 def __unicode__(self):
217 def __unicode__(self):
218 return self.name
218 return self.name
219
219
220 def get_parent_path(self):
220 def get_parent_path(self):
221 """
221 """
222 Returns node's parent path or empty string if node is root.
222 Returns node's parent path or empty string if node is root.
223 """
223 """
224 if self.is_root():
224 if self.is_root():
225 return ''
225 return ''
226 return vcspath.dirname(self.path.rstrip('/')) + '/'
226 return vcspath.dirname(self.path.rstrip('/')) + '/'
227
227
228 def is_file(self):
228 def is_file(self):
229 """
229 """
230 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
230 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
231 otherwise.
231 otherwise.
232 """
232 """
233 return self.kind == NodeKind.FILE
233 return self.kind == NodeKind.FILE
234
234
235 def is_dir(self):
235 def is_dir(self):
236 """
236 """
237 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
237 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
238 otherwise.
238 otherwise.
239 """
239 """
240 return self.kind == NodeKind.DIR
240 return self.kind == NodeKind.DIR
241
241
242 def is_root(self):
242 def is_root(self):
243 """
243 """
244 Returns ``True`` if node is a root node and ``False`` otherwise.
244 Returns ``True`` if node is a root node and ``False`` otherwise.
245 """
245 """
246 return self.kind == NodeKind.DIR and self.path == ''
246 return self.kind == NodeKind.DIR and self.path == ''
247
247
248 def is_submodule(self):
248 def is_submodule(self):
249 """
249 """
250 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
250 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
251 otherwise.
251 otherwise.
252 """
252 """
253 return self.kind == NodeKind.SUBMODULE
253 return self.kind == NodeKind.SUBMODULE
254
254
255 def is_largefile(self):
255 def is_largefile(self):
256 """
256 """
257 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
257 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
258 otherwise
258 otherwise
259 """
259 """
260 return self.kind == NodeKind.LARGEFILE
260 return self.kind == NodeKind.LARGEFILE
261
261
262 def is_link(self):
262 def is_link(self):
263 if self.commit:
263 if self.commit:
264 return self.commit.is_link(self.path)
264 return self.commit.is_link(self.path)
265 return False
265 return False
266
266
267 @LazyProperty
267 @LazyProperty
268 def added(self):
268 def added(self):
269 return self.state is NodeState.ADDED
269 return self.state is NodeState.ADDED
270
270
271 @LazyProperty
271 @LazyProperty
272 def changed(self):
272 def changed(self):
273 return self.state is NodeState.CHANGED
273 return self.state is NodeState.CHANGED
274
274
275 @LazyProperty
275 @LazyProperty
276 def not_changed(self):
276 def not_changed(self):
277 return self.state is NodeState.NOT_CHANGED
277 return self.state is NodeState.NOT_CHANGED
278
278
279 @LazyProperty
279 @LazyProperty
280 def removed(self):
280 def removed(self):
281 return self.state is NodeState.REMOVED
281 return self.state is NodeState.REMOVED
282
282
283
283
284 class FileNode(Node):
284 class FileNode(Node):
285 """
285 """
286 Class representing file nodes.
286 Class representing file nodes.
287
287
288 :attribute: path: path to the node, relative to repository's root
288 :attribute: path: path to the node, relative to repository's root
289 :attribute: content: if given arbitrary sets content of the file
289 :attribute: content: if given arbitrary sets content of the file
290 :attribute: commit: if given, first time content is accessed, callback
290 :attribute: commit: if given, first time content is accessed, callback
291 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
291 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
292 """
292 """
293 _filter_pre_load = []
293 _filter_pre_load = []
294
294
295 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
295 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
296 """
296 """
297 Only one of ``content`` and ``commit`` may be given. Passing both
297 Only one of ``content`` and ``commit`` may be given. Passing both
298 would raise ``NodeError`` exception.
298 would raise ``NodeError`` exception.
299
299
300 :param path: relative path to the node
300 :param path: relative path to the node
301 :param content: content may be passed to constructor
301 :param content: content may be passed to constructor
302 :param commit: if given, will use it to lazily fetch content
302 :param commit: if given, will use it to lazily fetch content
303 :param mode: ST_MODE (i.e. 0100644)
303 :param mode: ST_MODE (i.e. 0100644)
304 """
304 """
305 if content and commit:
305 if content and commit:
306 raise NodeError("Cannot use both content and commit")
306 raise NodeError("Cannot use both content and commit")
307 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
307 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
308 self.commit = commit
308 self.commit = commit
309 self._content = content
309 self._content = content
310 self._mode = mode or FILEMODE_DEFAULT
310 self._mode = mode or FILEMODE_DEFAULT
311
311
312 self._set_bulk_properties(pre_load)
312 self._set_bulk_properties(pre_load)
313
313
314 def _set_bulk_properties(self, pre_load):
314 def _set_bulk_properties(self, pre_load):
315 if not pre_load:
315 if not pre_load:
316 return
316 return
317 pre_load = [entry for entry in pre_load
317 pre_load = [entry for entry in pre_load
318 if entry not in self._filter_pre_load]
318 if entry not in self._filter_pre_load]
319 if not pre_load:
319 if not pre_load:
320 return
320 return
321
321
322 for attr_name in pre_load:
322 for attr_name in pre_load:
323 result = getattr(self, attr_name)
323 result = getattr(self, attr_name)
324 if callable(result):
324 if callable(result):
325 result = result()
325 result = result()
326 self.__dict__[attr_name] = result
326 self.__dict__[attr_name] = result
327
327
328 @LazyProperty
328 @LazyProperty
329 def mode(self):
329 def mode(self):
330 """
330 """
331 Returns lazily mode of the FileNode. If `commit` is not set, would
331 Returns lazily mode of the FileNode. If `commit` is not set, would
332 use value given at initialization or `FILEMODE_DEFAULT` (default).
332 use value given at initialization or `FILEMODE_DEFAULT` (default).
333 """
333 """
334 if self.commit:
334 if self.commit:
335 mode = self.commit.get_file_mode(self.path)
335 mode = self.commit.get_file_mode(self.path)
336 else:
336 else:
337 mode = self._mode
337 mode = self._mode
338 return mode
338 return mode
339
339
340 @LazyProperty
340 @LazyProperty
341 def raw_bytes(self):
341 def raw_bytes(self):
342 """
342 """
343 Returns lazily the raw bytes of the FileNode.
343 Returns lazily the raw bytes of the FileNode.
344 """
344 """
345 if self.commit:
345 if self.commit:
346 if self._content is None:
346 if self._content is None:
347 self._content = self.commit.get_file_content(self.path)
347 self._content = self.commit.get_file_content(self.path)
348 content = self._content
348 content = self._content
349 else:
349 else:
350 content = self._content
350 content = self._content
351 return content
351 return content
352
352
353 @LazyProperty
353 @LazyProperty
354 def md5(self):
354 def md5(self):
355 """
355 """
356 Returns md5 of the file node.
356 Returns md5 of the file node.
357 """
357 """
358 return md5(self.raw_bytes)
358 return md5(self.raw_bytes)
359
359
360 @LazyProperty
360 @LazyProperty
361 def content(self):
361 def content(self):
362 """
362 """
363 Returns lazily content of the FileNode. If possible, would try to
363 Returns lazily content of the FileNode. If possible, would try to
364 decode content from UTF-8.
364 decode content from UTF-8.
365 """
365 """
366 content = self.raw_bytes
366 content = self.raw_bytes
367
367
368 if self.is_binary:
368 if self.is_binary:
369 return content
369 return content
370 return safe_unicode(content)
370 return safe_unicode(content)
371
371
372 @LazyProperty
372 @LazyProperty
373 def size(self):
373 def size(self):
374 if self.commit:
374 if self.commit:
375 return self.commit.get_file_size(self.path)
375 return self.commit.get_file_size(self.path)
376 raise NodeError(
376 raise NodeError(
377 "Cannot retrieve size of the file without related "
377 "Cannot retrieve size of the file without related "
378 "commit attribute")
378 "commit attribute")
379
379
380 @LazyProperty
380 @LazyProperty
381 def message(self):
381 def message(self):
382 if self.commit:
382 if self.commit:
383 return self.last_commit.message
383 return self.last_commit.message
384 raise NodeError(
384 raise NodeError(
385 "Cannot retrieve message of the file without related "
385 "Cannot retrieve message of the file without related "
386 "commit attribute")
386 "commit attribute")
387
387
388 @LazyProperty
388 @LazyProperty
389 def last_commit(self):
389 def last_commit(self):
390 if self.commit:
390 if self.commit:
391 pre_load = ["author", "date", "message"]
391 pre_load = ["author", "date", "message"]
392 return self.commit.get_file_commit(self.path, pre_load=pre_load)
392 return self.commit.get_file_commit(self.path, pre_load=pre_load)
393 raise NodeError(
393 raise NodeError(
394 "Cannot retrieve last commit of the file without "
394 "Cannot retrieve last commit of the file without "
395 "related commit attribute")
395 "related commit attribute")
396
396
397 def get_mimetype(self):
397 def get_mimetype(self):
398 """
398 """
399 Mimetype is calculated based on the file's content. If ``_mimetype``
399 Mimetype is calculated based on the file's content. If ``_mimetype``
400 attribute is available, it will be returned (backends which store
400 attribute is available, it will be returned (backends which store
401 mimetypes or can easily recognize them, should set this private
401 mimetypes or can easily recognize them, should set this private
402 attribute to indicate that type should *NOT* be calculated).
402 attribute to indicate that type should *NOT* be calculated).
403 """
403 """
404
404
405 if hasattr(self, '_mimetype'):
405 if hasattr(self, '_mimetype'):
406 if (isinstance(self._mimetype, (tuple, list,)) and
406 if (isinstance(self._mimetype, (tuple, list,)) and
407 len(self._mimetype) == 2):
407 len(self._mimetype) == 2):
408 return self._mimetype
408 return self._mimetype
409 else:
409 else:
410 raise NodeError('given _mimetype attribute must be an 2 '
410 raise NodeError('given _mimetype attribute must be an 2 '
411 'element list or tuple')
411 'element list or tuple')
412
412
413 db = get_mimetypes_db()
413 db = get_mimetypes_db()
414 mtype, encoding = db.guess_type(self.name)
414 mtype, encoding = db.guess_type(self.name)
415
415
416 if mtype is None:
416 if mtype is None:
417 if self.is_binary:
417 if self.is_binary:
418 mtype = 'application/octet-stream'
418 mtype = 'application/octet-stream'
419 encoding = None
419 encoding = None
420 else:
420 else:
421 mtype = 'text/plain'
421 mtype = 'text/plain'
422 encoding = None
422 encoding = None
423
423
424 # try with pygments
424 # try with pygments
425 try:
425 try:
426 from pygments.lexers import get_lexer_for_filename
426 from pygments.lexers import get_lexer_for_filename
427 mt = get_lexer_for_filename(self.name).mimetypes
427 mt = get_lexer_for_filename(self.name).mimetypes
428 except Exception:
428 except Exception:
429 mt = None
429 mt = None
430
430
431 if mt:
431 if mt:
432 mtype = mt[0]
432 mtype = mt[0]
433
433
434 return mtype, encoding
434 return mtype, encoding
435
435
436 @LazyProperty
436 @LazyProperty
437 def mimetype(self):
437 def mimetype(self):
438 """
438 """
439 Wrapper around full mimetype info. It returns only type of fetched
439 Wrapper around full mimetype info. It returns only type of fetched
440 mimetype without the encoding part. use get_mimetype function to fetch
440 mimetype without the encoding part. use get_mimetype function to fetch
441 full set of (type,encoding)
441 full set of (type,encoding)
442 """
442 """
443 return self.get_mimetype()[0]
443 return self.get_mimetype()[0]
444
444
445 @LazyProperty
445 @LazyProperty
446 def mimetype_main(self):
446 def mimetype_main(self):
447 return self.mimetype.split('/')[0]
447 return self.mimetype.split('/')[0]
448
448
449 @classmethod
449 @classmethod
450 def get_lexer(cls, filename, content=None):
450 def get_lexer(cls, filename, content=None):
451 from pygments import lexers
451 from pygments import lexers
452
452
453 extension = filename.split('.')[-1]
453 extension = filename.split('.')[-1]
454 lexer = None
454 lexer = None
455
455
456 try:
456 try:
457 lexer = lexers.guess_lexer_for_filename(
457 lexer = lexers.guess_lexer_for_filename(
458 filename, content, stripnl=False)
458 filename, content, stripnl=False)
459 except lexers.ClassNotFound:
459 except lexers.ClassNotFound:
460 lexer = None
460 lexer = None
461
461
462 # try our EXTENSION_MAP
462 # try our EXTENSION_MAP
463 if not lexer:
463 if not lexer:
464 try:
464 try:
465 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
465 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
466 if lexer_class:
466 if lexer_class:
467 lexer = lexers.get_lexer_by_name(lexer_class[0])
467 lexer = lexers.get_lexer_by_name(lexer_class[0])
468 except lexers.ClassNotFound:
468 except lexers.ClassNotFound:
469 lexer = None
469 lexer = None
470
470
471 if not lexer:
471 if not lexer:
472 lexer = lexers.TextLexer(stripnl=False)
472 lexer = lexers.TextLexer(stripnl=False)
473
473
474 return lexer
474 return lexer
475
475
476 @LazyProperty
476 @LazyProperty
477 def lexer(self):
477 def lexer(self):
478 """
478 """
479 Returns pygment's lexer class. Would try to guess lexer taking file's
479 Returns pygment's lexer class. Would try to guess lexer taking file's
480 content, name and mimetype.
480 content, name and mimetype.
481 """
481 """
482 return self.get_lexer(self.name, self.content)
482 return self.get_lexer(self.name, self.content)
483
483
484 @LazyProperty
484 @LazyProperty
485 def lexer_alias(self):
485 def lexer_alias(self):
486 """
486 """
487 Returns first alias of the lexer guessed for this file.
487 Returns first alias of the lexer guessed for this file.
488 """
488 """
489 return self.lexer.aliases[0]
489 return self.lexer.aliases[0]
490
490
491 @LazyProperty
491 @LazyProperty
492 def history(self):
492 def history(self):
493 """
493 """
494 Returns a list of commit for this file in which the file was changed
494 Returns a list of commit for this file in which the file was changed
495 """
495 """
496 if self.commit is None:
496 if self.commit is None:
497 raise NodeError('Unable to get commit for this FileNode')
497 raise NodeError('Unable to get commit for this FileNode')
498 return self.commit.get_file_history(self.path)
498 return self.commit.get_file_history(self.path)
499
499
500 @LazyProperty
500 @LazyProperty
501 def annotate(self):
501 def annotate(self):
502 """
502 """
503 Returns a list of three element tuples with lineno, commit and line
503 Returns a list of three element tuples with lineno, commit and line
504 """
504 """
505 if self.commit is None:
505 if self.commit is None:
506 raise NodeError('Unable to get commit for this FileNode')
506 raise NodeError('Unable to get commit for this FileNode')
507 pre_load = ["author", "date", "message"]
507 pre_load = ["author", "date", "message"]
508 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
508 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
509
509
510 @LazyProperty
510 @LazyProperty
511 def state(self):
511 def state(self):
512 if not self.commit:
512 if not self.commit:
513 raise NodeError(
513 raise NodeError(
514 "Cannot check state of the node if it's not "
514 "Cannot check state of the node if it's not "
515 "linked with commit")
515 "linked with commit")
516 elif self.path in (node.path for node in self.commit.added):
516 elif self.path in (node.path for node in self.commit.added):
517 return NodeState.ADDED
517 return NodeState.ADDED
518 elif self.path in (node.path for node in self.commit.changed):
518 elif self.path in (node.path for node in self.commit.changed):
519 return NodeState.CHANGED
519 return NodeState.CHANGED
520 else:
520 else:
521 return NodeState.NOT_CHANGED
521 return NodeState.NOT_CHANGED
522
522
523 @LazyProperty
523 @LazyProperty
524 def is_binary(self):
524 def is_binary(self):
525 """
525 """
526 Returns True if file has binary content.
526 Returns True if file has binary content.
527 """
527 """
528 _bin = self.raw_bytes and '\0' in self.raw_bytes
528 _bin = self.raw_bytes and '\0' in self.raw_bytes
529 return _bin
529 return _bin
530
530
531 @LazyProperty
531 @LazyProperty
532 def extension(self):
532 def extension(self):
533 """Returns filenode extension"""
533 """Returns filenode extension"""
534 return self.name.split('.')[-1]
534 return self.name.split('.')[-1]
535
535
536 @property
536 @property
537 def is_executable(self):
537 def is_executable(self):
538 """
538 """
539 Returns ``True`` if file has executable flag turned on.
539 Returns ``True`` if file has executable flag turned on.
540 """
540 """
541 return bool(self.mode & stat.S_IXUSR)
541 return bool(self.mode & stat.S_IXUSR)
542
542
543 def get_largefile_node(self):
543 def get_largefile_node(self):
544 """
544 """
545 Try to return a Mercurial FileNode from this node. It does internal
545 Try to return a Mercurial FileNode from this node. It does internal
546 checks inside largefile store, if that file exist there it will
546 checks inside largefile store, if that file exist there it will
547 create special instance of LargeFileNode which can get content from
547 create special instance of LargeFileNode which can get content from
548 LF store.
548 LF store.
549 """
549 """
550 if self.commit and self.path.startswith(LARGEFILE_PREFIX):
550 if self.commit:
551 largefile_path = self.path.split(LARGEFILE_PREFIX)[-1].lstrip('/')
551 return self.commit.get_largefile_node(self.path)
552 return self.commit.get_largefile_node(largefile_path)
553
552
554 def lines(self, count_empty=False):
553 def lines(self, count_empty=False):
555 all_lines, empty_lines = 0, 0
554 all_lines, empty_lines = 0, 0
556
555
557 if not self.is_binary:
556 if not self.is_binary:
558 content = self.content
557 content = self.content
559 if count_empty:
558 if count_empty:
560 all_lines = 0
559 all_lines = 0
561 empty_lines = 0
560 empty_lines = 0
562 for line in content.splitlines(True):
561 for line in content.splitlines(True):
563 if line == '\n':
562 if line == '\n':
564 empty_lines += 1
563 empty_lines += 1
565 all_lines += 1
564 all_lines += 1
566
565
567 return all_lines, all_lines - empty_lines
566 return all_lines, all_lines - empty_lines
568 else:
567 else:
569 # fast method
568 # fast method
570 empty_lines = all_lines = content.count('\n')
569 empty_lines = all_lines = content.count('\n')
571 if all_lines == 0 and content:
570 if all_lines == 0 and content:
572 # one-line without a newline
571 # one-line without a newline
573 empty_lines = all_lines = 1
572 empty_lines = all_lines = 1
574
573
575 return all_lines, empty_lines
574 return all_lines, empty_lines
576
575
577 def __repr__(self):
576 def __repr__(self):
578 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
577 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
579 getattr(self.commit, 'short_id', ''))
578 getattr(self.commit, 'short_id', ''))
580
579
581
580
582 class RemovedFileNode(FileNode):
581 class RemovedFileNode(FileNode):
583 """
582 """
584 Dummy FileNode class - trying to access any public attribute except path,
583 Dummy FileNode class - trying to access any public attribute except path,
585 name, kind or state (or methods/attributes checking those two) would raise
584 name, kind or state (or methods/attributes checking those two) would raise
586 RemovedFileNodeError.
585 RemovedFileNodeError.
587 """
586 """
588 ALLOWED_ATTRIBUTES = [
587 ALLOWED_ATTRIBUTES = [
589 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
588 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
590 'added', 'changed', 'not_changed', 'removed'
589 'added', 'changed', 'not_changed', 'removed'
591 ]
590 ]
592
591
593 def __init__(self, path):
592 def __init__(self, path):
594 """
593 """
595 :param path: relative path to the node
594 :param path: relative path to the node
596 """
595 """
597 super(RemovedFileNode, self).__init__(path=path)
596 super(RemovedFileNode, self).__init__(path=path)
598
597
599 def __getattribute__(self, attr):
598 def __getattribute__(self, attr):
600 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
599 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
601 return super(RemovedFileNode, self).__getattribute__(attr)
600 return super(RemovedFileNode, self).__getattribute__(attr)
602 raise RemovedFileNodeError(
601 raise RemovedFileNodeError(
603 "Cannot access attribute %s on RemovedFileNode" % attr)
602 "Cannot access attribute %s on RemovedFileNode" % attr)
604
603
605 @LazyProperty
604 @LazyProperty
606 def state(self):
605 def state(self):
607 return NodeState.REMOVED
606 return NodeState.REMOVED
608
607
609
608
610 class DirNode(Node):
609 class DirNode(Node):
611 """
610 """
612 DirNode stores list of files and directories within this node.
611 DirNode stores list of files and directories within this node.
613 Nodes may be used standalone but within repository context they
612 Nodes may be used standalone but within repository context they
614 lazily fetch data within same repositorty's commit.
613 lazily fetch data within same repositorty's commit.
615 """
614 """
616
615
617 def __init__(self, path, nodes=(), commit=None):
616 def __init__(self, path, nodes=(), commit=None):
618 """
617 """
619 Only one of ``nodes`` and ``commit`` may be given. Passing both
618 Only one of ``nodes`` and ``commit`` may be given. Passing both
620 would raise ``NodeError`` exception.
619 would raise ``NodeError`` exception.
621
620
622 :param path: relative path to the node
621 :param path: relative path to the node
623 :param nodes: content may be passed to constructor
622 :param nodes: content may be passed to constructor
624 :param commit: if given, will use it to lazily fetch content
623 :param commit: if given, will use it to lazily fetch content
625 """
624 """
626 if nodes and commit:
625 if nodes and commit:
627 raise NodeError("Cannot use both nodes and commit")
626 raise NodeError("Cannot use both nodes and commit")
628 super(DirNode, self).__init__(path, NodeKind.DIR)
627 super(DirNode, self).__init__(path, NodeKind.DIR)
629 self.commit = commit
628 self.commit = commit
630 self._nodes = nodes
629 self._nodes = nodes
631
630
632 @LazyProperty
631 @LazyProperty
633 def content(self):
632 def content(self):
634 raise NodeError(
633 raise NodeError(
635 "%s represents a dir and has no `content` attribute" % self)
634 "%s represents a dir and has no `content` attribute" % self)
636
635
637 @LazyProperty
636 @LazyProperty
638 def nodes(self):
637 def nodes(self):
639 if self.commit:
638 if self.commit:
640 nodes = self.commit.get_nodes(self.path)
639 nodes = self.commit.get_nodes(self.path)
641 else:
640 else:
642 nodes = self._nodes
641 nodes = self._nodes
643 self._nodes_dict = dict((node.path, node) for node in nodes)
642 self._nodes_dict = dict((node.path, node) for node in nodes)
644 return sorted(nodes)
643 return sorted(nodes)
645
644
646 @LazyProperty
645 @LazyProperty
647 def files(self):
646 def files(self):
648 return sorted((node for node in self.nodes if node.is_file()))
647 return sorted((node for node in self.nodes if node.is_file()))
649
648
650 @LazyProperty
649 @LazyProperty
651 def dirs(self):
650 def dirs(self):
652 return sorted((node for node in self.nodes if node.is_dir()))
651 return sorted((node for node in self.nodes if node.is_dir()))
653
652
654 def __iter__(self):
653 def __iter__(self):
655 for node in self.nodes:
654 for node in self.nodes:
656 yield node
655 yield node
657
656
658 def get_node(self, path):
657 def get_node(self, path):
659 """
658 """
660 Returns node from within this particular ``DirNode``, so it is now
659 Returns node from within this particular ``DirNode``, so it is now
661 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
660 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
662 'docs'. In order to access deeper nodes one must fetch nodes between
661 'docs'. In order to access deeper nodes one must fetch nodes between
663 them first - this would work::
662 them first - this would work::
664
663
665 docs = root.get_node('docs')
664 docs = root.get_node('docs')
666 docs.get_node('api').get_node('index.rst')
665 docs.get_node('api').get_node('index.rst')
667
666
668 :param: path - relative to the current node
667 :param: path - relative to the current node
669
668
670 .. note::
669 .. note::
671 To access lazily (as in example above) node have to be initialized
670 To access lazily (as in example above) node have to be initialized
672 with related commit object - without it node is out of
671 with related commit object - without it node is out of
673 context and may know nothing about anything else than nearest
672 context and may know nothing about anything else than nearest
674 (located at same level) nodes.
673 (located at same level) nodes.
675 """
674 """
676 try:
675 try:
677 path = path.rstrip('/')
676 path = path.rstrip('/')
678 if path == '':
677 if path == '':
679 raise NodeError("Cannot retrieve node without path")
678 raise NodeError("Cannot retrieve node without path")
680 self.nodes # access nodes first in order to set _nodes_dict
679 self.nodes # access nodes first in order to set _nodes_dict
681 paths = path.split('/')
680 paths = path.split('/')
682 if len(paths) == 1:
681 if len(paths) == 1:
683 if not self.is_root():
682 if not self.is_root():
684 path = '/'.join((self.path, paths[0]))
683 path = '/'.join((self.path, paths[0]))
685 else:
684 else:
686 path = paths[0]
685 path = paths[0]
687 return self._nodes_dict[path]
686 return self._nodes_dict[path]
688 elif len(paths) > 1:
687 elif len(paths) > 1:
689 if self.commit is None:
688 if self.commit is None:
690 raise NodeError(
689 raise NodeError(
691 "Cannot access deeper nodes without commit")
690 "Cannot access deeper nodes without commit")
692 else:
691 else:
693 path1, path2 = paths[0], '/'.join(paths[1:])
692 path1, path2 = paths[0], '/'.join(paths[1:])
694 return self.get_node(path1).get_node(path2)
693 return self.get_node(path1).get_node(path2)
695 else:
694 else:
696 raise KeyError
695 raise KeyError
697 except KeyError:
696 except KeyError:
698 raise NodeError("Node does not exist at %s" % path)
697 raise NodeError("Node does not exist at %s" % path)
699
698
700 @LazyProperty
699 @LazyProperty
701 def state(self):
700 def state(self):
702 raise NodeError("Cannot access state of DirNode")
701 raise NodeError("Cannot access state of DirNode")
703
702
704 @LazyProperty
703 @LazyProperty
705 def size(self):
704 def size(self):
706 size = 0
705 size = 0
707 for root, dirs, files in self.commit.walk(self.path):
706 for root, dirs, files in self.commit.walk(self.path):
708 for f in files:
707 for f in files:
709 size += f.size
708 size += f.size
710
709
711 return size
710 return size
712
711
713 def __repr__(self):
712 def __repr__(self):
714 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
713 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
715 getattr(self.commit, 'short_id', ''))
714 getattr(self.commit, 'short_id', ''))
716
715
717
716
718 class RootNode(DirNode):
717 class RootNode(DirNode):
719 """
718 """
720 DirNode being the root node of the repository.
719 DirNode being the root node of the repository.
721 """
720 """
722
721
723 def __init__(self, nodes=(), commit=None):
722 def __init__(self, nodes=(), commit=None):
724 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
723 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
725
724
726 def __repr__(self):
725 def __repr__(self):
727 return '<%s>' % self.__class__.__name__
726 return '<%s>' % self.__class__.__name__
728
727
729
728
730 class SubModuleNode(Node):
729 class SubModuleNode(Node):
731 """
730 """
732 represents a SubModule of Git or SubRepo of Mercurial
731 represents a SubModule of Git or SubRepo of Mercurial
733 """
732 """
734 is_binary = False
733 is_binary = False
735 size = 0
734 size = 0
736
735
737 def __init__(self, name, url=None, commit=None, alias=None):
736 def __init__(self, name, url=None, commit=None, alias=None):
738 self.path = name
737 self.path = name
739 self.kind = NodeKind.SUBMODULE
738 self.kind = NodeKind.SUBMODULE
740 self.alias = alias
739 self.alias = alias
741
740
742 # we have to use EmptyCommit here since this can point to svn/git/hg
741 # we have to use EmptyCommit here since this can point to svn/git/hg
743 # submodules we cannot get from repository
742 # submodules we cannot get from repository
744 self.commit = EmptyCommit(str(commit), alias=alias)
743 self.commit = EmptyCommit(str(commit), alias=alias)
745 self.url = url or self._extract_submodule_url()
744 self.url = url or self._extract_submodule_url()
746
745
747 def __repr__(self):
746 def __repr__(self):
748 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
747 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
749 getattr(self.commit, 'short_id', ''))
748 getattr(self.commit, 'short_id', ''))
750
749
751 def _extract_submodule_url(self):
750 def _extract_submodule_url(self):
752 # TODO: find a way to parse gits submodule file and extract the
751 # TODO: find a way to parse gits submodule file and extract the
753 # linking URL
752 # linking URL
754 return self.path
753 return self.path
755
754
756 @LazyProperty
755 @LazyProperty
757 def name(self):
756 def name(self):
758 """
757 """
759 Returns name of the node so if its path
758 Returns name of the node so if its path
760 then only last part is returned.
759 then only last part is returned.
761 """
760 """
762 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
761 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
763 return u'%s @ %s' % (org, self.commit.short_id)
762 return u'%s @ %s' % (org, self.commit.short_id)
764
763
765
764
766 class LargeFileNode(FileNode):
765 class LargeFileNode(FileNode):
767
766
767 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
768 self.path = path
769 self.org_path = org_path
770 self.kind = NodeKind.LARGEFILE
771 self.alias = alias
772
768 def _validate_path(self, path):
773 def _validate_path(self, path):
769 """
774 """
770 we override check since the LargeFileNode path is system absolute
775 we override check since the LargeFileNode path is system absolute
771 """
776 """
777 pass
772
778
779 def __repr__(self):
780 return '<%s %r>' % (self.__class__.__name__, self.path)
781
782 @LazyProperty
783 def size(self):
784 return os.stat(self.path).st_size
785
786 @LazyProperty
773 def raw_bytes(self):
787 def raw_bytes(self):
774 if self.commit:
788 if self.commit:
775 with open(self.path, 'rb') as f:
789 with open(self.path, 'rb') as f:
776 content = f.read()
790 content = f.read()
777 else:
791 else:
778 content = self._content
792 content = self._content
779 return content No newline at end of file
793 return content
794
795 @LazyProperty
796 def name(self):
797 """
798 Overwrites name to be the org lf path
799 """
800 return self.org_path
@@ -1,78 +1,87 b''
1 <%namespace name="sourceblock" file="/codeblocks/source.mako"/>
1 <%namespace name="sourceblock" file="/codeblocks/source.mako"/>
2
2
3 <div id="codeblock" class="codeblock">
3 <div id="codeblock" class="codeblock">
4 <div class="codeblock-header">
4 <div class="codeblock-header">
5 <div class="stats">
5 <div class="stats">
6 <span> <strong>${c.file}</strong></span>
6 <span> <strong>${c.file}</strong></span>
7 % if c.lf_node:
8 <span title="${_('This file is a pointer to large binary file')}"> | ${_('LargeFile')} ${h.format_byte_size_binary(c.lf_node.size)} </span>
9 % endif
7 <span> | ${c.file.lines()[0]} ${ungettext('line', 'lines', c.file.lines()[0])}</span>
10 <span> | ${c.file.lines()[0]} ${ungettext('line', 'lines', c.file.lines()[0])}</span>
8 <span> | ${h.format_byte_size_binary(c.file.size)}</span>
11 <span> | ${h.format_byte_size_binary(c.file.size)}</span>
9 <span> | ${c.file.mimetype} </span>
12 <span> | ${c.file.mimetype} </span>
10 <span class="item last"> | ${h.get_lexer_for_filenode(c.file).__class__.__name__}</span>
13 <span class="item last"> | ${h.get_lexer_for_filenode(c.file).__class__.__name__}</span>
11 </div>
14 </div>
12 <div class="buttons">
15 <div class="buttons">
13 <a id="file_history_overview" href="#">
16 <a id="file_history_overview" href="#">
14 ${_('History')}
17 ${_('History')}
15 </a>
18 </a>
16 <a id="file_history_overview_full" style="display: none" href="${h.url('changelog_file_home',repo_name=c.repo_name, revision=c.commit.raw_id, f_path=c.f_path)}">
19 <a id="file_history_overview_full" style="display: none" href="${h.url('changelog_file_home',repo_name=c.repo_name, revision=c.commit.raw_id, f_path=c.f_path)}">
17 ${_('Show Full History')}
20 ${_('Show Full History')}
18 </a> |
21 </a> |
19 %if c.annotate:
22 %if c.annotate:
20 ${h.link_to(_('Source'), h.url('files_home', repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
23 ${h.link_to(_('Source'), h.url('files_home', repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
21 %else:
24 %else:
22 ${h.link_to(_('Annotation'), h.url('files_annotate_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
25 ${h.link_to(_('Annotation'), h.url('files_annotate_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
23 %endif
26 %endif
24 | ${h.link_to(_('Raw'), h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
27 | ${h.link_to(_('Raw'), h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
25 | <a href="${h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path)}">
28 |
29 % if c.lf_node:
30 <a href="${h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path, lf=1)}">
31 ${_('Download largefile')}
32 </a>
33 % else:
34 <a href="${h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path)}">
26 ${_('Download')}
35 ${_('Download')}
27 </a>
36 </a>
37 % endif
28
38
29 %if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name):
39 %if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name):
30 |
40 |
31 %if c.on_branch_head and c.branch_or_raw_id and not c.file.is_binary:
41 %if c.on_branch_head and c.branch_or_raw_id and not c.file.is_binary:
32 <a href="${h.url('files_edit_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit')}">
42 <a href="${h.url('files_edit_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit')}">
33 ${_('Edit on Branch:%s') % c.branch_or_raw_id}
43 ${_('Edit on Branch:%s') % c.branch_or_raw_id}
34 </a>
44 </a>
35 | <a class="btn-danger btn-link" href="${h.url('files_delete_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit')}">${_('Delete')}
45 | <a class="btn-danger btn-link" href="${h.url('files_delete_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit')}">${_('Delete')}
36 </a>
46 </a>
37 %elif c.on_branch_head and c.branch_or_raw_id and c.file.is_binary:
47 %elif c.on_branch_head and c.branch_or_raw_id and c.file.is_binary:
38 ${h.link_to(_('Edit'), '#', class_="btn btn-link disabled tooltip", title=_('Editing binary files not allowed'))}
48 ${h.link_to(_('Edit'), '#', class_="btn btn-link disabled tooltip", title=_('Editing binary files not allowed'))}
39 | ${h.link_to(_('Delete'), h.url('files_delete_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit'),class_="btn-danger btn-link")}
49 | ${h.link_to(_('Delete'), h.url('files_delete_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit'),class_="btn-danger btn-link")}
40 %else:
50 %else:
41 ${h.link_to(_('Edit'), '#', class_="btn btn-link disabled tooltip", title=_('Editing files allowed only when on branch head commit'))}
51 ${h.link_to(_('Edit'), '#', class_="btn btn-link disabled tooltip", title=_('Editing files allowed only when on branch head commit'))}
42 | ${h.link_to(_('Delete'), '#', class_="btn btn-danger btn-link disabled tooltip", title=_('Deleting files allowed only when on branch head commit'))}
52 | ${h.link_to(_('Delete'), '#', class_="btn btn-danger btn-link disabled tooltip", title=_('Deleting files allowed only when on branch head commit'))}
43 %endif
53 %endif
44 %endif
54 %endif
45 </div>
55 </div>
46 </div>
56 </div>
47 <div id="file_history_container"></div>
57 <div id="file_history_container"></div>
48 <div class="code-body">
58 <div class="code-body">
49 %if c.file.is_binary:
59 %if c.file.is_binary:
50 <div>
60 <div>
51 ${_('Binary file (%s)') % c.file.mimetype}
61 ${_('Binary file (%s)') % c.file.mimetype}
52 </div>
62 </div>
53 %else:
63 %else:
54 % if c.file.size < c.cut_off_limit:
64 % if c.file.size < c.cut_off_limit:
55 %if c.renderer and not c.annotate:
65 %if c.renderer and not c.annotate:
56 ${h.render(c.file.content, renderer=c.renderer, relative_url=h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
66 ${h.render(c.file.content, renderer=c.renderer, relative_url=h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
57 %else:
67 %else:
58 <table class="cb codehilite">
68 <table class="cb codehilite">
59 %if c.annotate:
69 %if c.annotate:
60 <% color_hasher = h.color_hasher() %>
70 <% color_hasher = h.color_hasher() %>
61 %for annotation, lines in c.annotated_lines:
71 %for annotation, lines in c.annotated_lines:
62 ${sourceblock.render_annotation_lines(annotation, lines, color_hasher)}
72 ${sourceblock.render_annotation_lines(annotation, lines, color_hasher)}
63 %endfor
73 %endfor
64 %else:
74 %else:
65 %for line_num, tokens in enumerate(c.lines, 1):
75 %for line_num, tokens in enumerate(c.lines, 1):
66 ${sourceblock.render_line(line_num, tokens)}
76 ${sourceblock.render_line(line_num, tokens)}
67 %endfor
77 %endfor
68 %endif
78 %endif
69 </table>
79 </table>
70 </div>
71 %endif
80 %endif
72 %else:
81 %else:
73 ${_('File is too big to display')} ${h.link_to(_('Show as raw'),
82 ${_('File is too big to display')} ${h.link_to(_('Show as raw'),
74 h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
83 h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
75 %endif
84 %endif
76 %endif
85 %endif
77 </div>
86 </div>
78 </div> No newline at end of file
87 </div>
@@ -1,1809 +1,1814 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39
39
40 import rhodecode
40 import rhodecode
41 from rhodecode.lib.utils2 import AttributeDict
41 from rhodecode.lib.utils2 import AttributeDict
42 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.comment import CommentsModel
43 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.user import UserModel
51 from rhodecode.model.user import UserModel
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.integration import IntegrationModel
54 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.integrations import integration_type_registry
55 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations.types.base import IntegrationTypeBase
56 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.lib.utils import repo2db_mapper
57 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.vcs import create_vcsserver_proxy
58 from rhodecode.lib.vcs import create_vcsserver_proxy
59 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.tests import (
61 from rhodecode.tests import (
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_REGULAR_PASS)
64 TEST_USER_REGULAR_PASS)
65 from rhodecode.tests.utils import CustomTestApp
65 from rhodecode.tests.utils import CustomTestApp
66 from rhodecode.tests.fixture import Fixture
66 from rhodecode.tests.fixture import Fixture
67
67
68
68
69 def _split_comma(value):
69 def _split_comma(value):
70 return value.split(',')
70 return value.split(',')
71
71
72
72
73 def pytest_addoption(parser):
73 def pytest_addoption(parser):
74 parser.addoption(
74 parser.addoption(
75 '--keep-tmp-path', action='store_true',
75 '--keep-tmp-path', action='store_true',
76 help="Keep the test temporary directories")
76 help="Keep the test temporary directories")
77 parser.addoption(
77 parser.addoption(
78 '--backends', action='store', type=_split_comma,
78 '--backends', action='store', type=_split_comma,
79 default=['git', 'hg', 'svn'],
79 default=['git', 'hg', 'svn'],
80 help="Select which backends to test for backend specific tests.")
80 help="Select which backends to test for backend specific tests.")
81 parser.addoption(
81 parser.addoption(
82 '--dbs', action='store', type=_split_comma,
82 '--dbs', action='store', type=_split_comma,
83 default=['sqlite'],
83 default=['sqlite'],
84 help="Select which database to test for database specific tests. "
84 help="Select which database to test for database specific tests. "
85 "Possible options are sqlite,postgres,mysql")
85 "Possible options are sqlite,postgres,mysql")
86 parser.addoption(
86 parser.addoption(
87 '--appenlight', '--ae', action='store_true',
87 '--appenlight', '--ae', action='store_true',
88 help="Track statistics in appenlight.")
88 help="Track statistics in appenlight.")
89 parser.addoption(
89 parser.addoption(
90 '--appenlight-api-key', '--ae-key',
90 '--appenlight-api-key', '--ae-key',
91 help="API key for Appenlight.")
91 help="API key for Appenlight.")
92 parser.addoption(
92 parser.addoption(
93 '--appenlight-url', '--ae-url',
93 '--appenlight-url', '--ae-url',
94 default="https://ae.rhodecode.com",
94 default="https://ae.rhodecode.com",
95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 parser.addoption(
96 parser.addoption(
97 '--sqlite-connection-string', action='store',
97 '--sqlite-connection-string', action='store',
98 default='', help="Connection string for the dbs tests with SQLite")
98 default='', help="Connection string for the dbs tests with SQLite")
99 parser.addoption(
99 parser.addoption(
100 '--postgres-connection-string', action='store',
100 '--postgres-connection-string', action='store',
101 default='', help="Connection string for the dbs tests with Postgres")
101 default='', help="Connection string for the dbs tests with Postgres")
102 parser.addoption(
102 parser.addoption(
103 '--mysql-connection-string', action='store',
103 '--mysql-connection-string', action='store',
104 default='', help="Connection string for the dbs tests with MySQL")
104 default='', help="Connection string for the dbs tests with MySQL")
105 parser.addoption(
105 parser.addoption(
106 '--repeat', type=int, default=100,
106 '--repeat', type=int, default=100,
107 help="Number of repetitions in performance tests.")
107 help="Number of repetitions in performance tests.")
108
108
109
109
110 def pytest_configure(config):
110 def pytest_configure(config):
111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
112 from rhodecode.config import patches
112 from rhodecode.config import patches
113 patches.kombu_1_5_1_python_2_7_11()
113 patches.kombu_1_5_1_python_2_7_11()
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.function.backends.args
142 backends = metafunc.function.backends.args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.tests.other import example_rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = example_rcextensions
161
161
162 @request.addfinalizer
162 @request.addfinalizer
163 def cleanup():
163 def cleanup():
164 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
165
165
166
166
167 @pytest.fixture
167 @pytest.fixture
168 def capture_rcextensions():
168 def capture_rcextensions():
169 """
169 """
170 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
171 """
171 """
172 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
173 calls.clear()
173 calls.clear()
174 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
175 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
176 # is enough to make it work.
176 # is enough to make it work.
177 return calls
177 return calls
178
178
179
179
180 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
181 def http_environ_session():
181 def http_environ_session():
182 """
182 """
183 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
184 """
184 """
185 return http_environ(
185 return http_environ(
186 http_host_stub=http_host_stub())
186 http_host_stub=http_host_stub())
187
187
188
188
189 @pytest.fixture
189 @pytest.fixture
190 def http_host_stub():
190 def http_host_stub():
191 """
191 """
192 Value of HTTP_HOST in the test run.
192 Value of HTTP_HOST in the test run.
193 """
193 """
194 return 'test.example.com:80'
194 return 'test.example.com:80'
195
195
196
196
197 @pytest.fixture
197 @pytest.fixture
198 def http_environ(http_host_stub):
198 def http_environ(http_host_stub):
199 """
199 """
200 HTTP extra environ keys.
200 HTTP extra environ keys.
201
201
202 User by the test application and as well for setting up the pylons
202 User by the test application and as well for setting up the pylons
203 environment. In the case of the fixture "app" it should be possible
203 environment. In the case of the fixture "app" it should be possible
204 to override this for a specific test case.
204 to override this for a specific test case.
205 """
205 """
206 return {
206 return {
207 'SERVER_NAME': http_host_stub.split(':')[0],
207 'SERVER_NAME': http_host_stub.split(':')[0],
208 'SERVER_PORT': http_host_stub.split(':')[1],
208 'SERVER_PORT': http_host_stub.split(':')[1],
209 'HTTP_HOST': http_host_stub,
209 'HTTP_HOST': http_host_stub,
210 'HTTP_USER_AGENT': 'rc-test-agent',
210 'HTTP_USER_AGENT': 'rc-test-agent',
211 'REQUEST_METHOD': 'GET'
211 'REQUEST_METHOD': 'GET'
212 }
212 }
213
213
214
214
215 @pytest.fixture(scope='function')
215 @pytest.fixture(scope='function')
216 def app(request, pylonsapp, http_environ):
216 def app(request, pylonsapp, http_environ):
217 app = CustomTestApp(
217 app = CustomTestApp(
218 pylonsapp,
218 pylonsapp,
219 extra_environ=http_environ)
219 extra_environ=http_environ)
220 if request.cls:
220 if request.cls:
221 request.cls.app = app
221 request.cls.app = app
222 return app
222 return app
223
223
224
224
225 @pytest.fixture(scope='session')
225 @pytest.fixture(scope='session')
226 def app_settings(pylonsapp, pylons_config):
226 def app_settings(pylonsapp, pylons_config):
227 """
227 """
228 Settings dictionary used to create the app.
228 Settings dictionary used to create the app.
229
229
230 Parses the ini file and passes the result through the sanitize and apply
230 Parses the ini file and passes the result through the sanitize and apply
231 defaults mechanism in `rhodecode.config.middleware`.
231 defaults mechanism in `rhodecode.config.middleware`.
232 """
232 """
233 from paste.deploy.loadwsgi import loadcontext, APP
233 from paste.deploy.loadwsgi import loadcontext, APP
234 from rhodecode.config.middleware import (
234 from rhodecode.config.middleware import (
235 sanitize_settings_and_apply_defaults)
235 sanitize_settings_and_apply_defaults)
236 context = loadcontext(APP, 'config:' + pylons_config)
236 context = loadcontext(APP, 'config:' + pylons_config)
237 settings = sanitize_settings_and_apply_defaults(context.config())
237 settings = sanitize_settings_and_apply_defaults(context.config())
238 return settings
238 return settings
239
239
240
240
241 @pytest.fixture(scope='session')
241 @pytest.fixture(scope='session')
242 def db(app_settings):
242 def db(app_settings):
243 """
243 """
244 Initializes the database connection.
244 Initializes the database connection.
245
245
246 It uses the same settings which are used to create the ``pylonsapp`` or
246 It uses the same settings which are used to create the ``pylonsapp`` or
247 ``app`` fixtures.
247 ``app`` fixtures.
248 """
248 """
249 from rhodecode.config.utils import initialize_database
249 from rhodecode.config.utils import initialize_database
250 initialize_database(app_settings)
250 initialize_database(app_settings)
251
251
252
252
253 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
253 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
254
254
255
255
256 def _autologin_user(app, *args):
256 def _autologin_user(app, *args):
257 session = login_user_session(app, *args)
257 session = login_user_session(app, *args)
258 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
258 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
259 return LoginData(csrf_token, session['rhodecode_user'])
259 return LoginData(csrf_token, session['rhodecode_user'])
260
260
261
261
262 @pytest.fixture
262 @pytest.fixture
263 def autologin_user(app):
263 def autologin_user(app):
264 """
264 """
265 Utility fixture which makes sure that the admin user is logged in
265 Utility fixture which makes sure that the admin user is logged in
266 """
266 """
267 return _autologin_user(app)
267 return _autologin_user(app)
268
268
269
269
270 @pytest.fixture
270 @pytest.fixture
271 def autologin_regular_user(app):
271 def autologin_regular_user(app):
272 """
272 """
273 Utility fixture which makes sure that the regular user is logged in
273 Utility fixture which makes sure that the regular user is logged in
274 """
274 """
275 return _autologin_user(
275 return _autologin_user(
276 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
276 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
277
277
278
278
279 @pytest.fixture(scope='function')
279 @pytest.fixture(scope='function')
280 def csrf_token(request, autologin_user):
280 def csrf_token(request, autologin_user):
281 return autologin_user.csrf_token
281 return autologin_user.csrf_token
282
282
283
283
284 @pytest.fixture(scope='function')
284 @pytest.fixture(scope='function')
285 def xhr_header(request):
285 def xhr_header(request):
286 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
286 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
287
287
288
288
289 @pytest.fixture
289 @pytest.fixture
290 def real_crypto_backend(monkeypatch):
290 def real_crypto_backend(monkeypatch):
291 """
291 """
292 Switch the production crypto backend on for this test.
292 Switch the production crypto backend on for this test.
293
293
294 During the test run the crypto backend is replaced with a faster
294 During the test run the crypto backend is replaced with a faster
295 implementation based on the MD5 algorithm.
295 implementation based on the MD5 algorithm.
296 """
296 """
297 monkeypatch.setattr(rhodecode, 'is_test', False)
297 monkeypatch.setattr(rhodecode, 'is_test', False)
298
298
299
299
300 @pytest.fixture(scope='class')
300 @pytest.fixture(scope='class')
301 def index_location(request, pylonsapp):
301 def index_location(request, pylonsapp):
302 index_location = pylonsapp.config['app_conf']['search.location']
302 index_location = pylonsapp.config['app_conf']['search.location']
303 if request.cls:
303 if request.cls:
304 request.cls.index_location = index_location
304 request.cls.index_location = index_location
305 return index_location
305 return index_location
306
306
307
307
308 @pytest.fixture(scope='session', autouse=True)
308 @pytest.fixture(scope='session', autouse=True)
309 def tests_tmp_path(request):
309 def tests_tmp_path(request):
310 """
310 """
311 Create temporary directory to be used during the test session.
311 Create temporary directory to be used during the test session.
312 """
312 """
313 if not os.path.exists(TESTS_TMP_PATH):
313 if not os.path.exists(TESTS_TMP_PATH):
314 os.makedirs(TESTS_TMP_PATH)
314 os.makedirs(TESTS_TMP_PATH)
315
315
316 if not request.config.getoption('--keep-tmp-path'):
316 if not request.config.getoption('--keep-tmp-path'):
317 @request.addfinalizer
317 @request.addfinalizer
318 def remove_tmp_path():
318 def remove_tmp_path():
319 shutil.rmtree(TESTS_TMP_PATH)
319 shutil.rmtree(TESTS_TMP_PATH)
320
320
321 return TESTS_TMP_PATH
321 return TESTS_TMP_PATH
322
322
323
323
324 @pytest.fixture
324 @pytest.fixture
325 def test_repo_group(request):
325 def test_repo_group(request):
326 """
326 """
327 Create a temporary repository group, and destroy it after
327 Create a temporary repository group, and destroy it after
328 usage automatically
328 usage automatically
329 """
329 """
330 fixture = Fixture()
330 fixture = Fixture()
331 repogroupid = 'test_repo_group_%s' % int(time.time())
331 repogroupid = 'test_repo_group_%s' % int(time.time())
332 repo_group = fixture.create_repo_group(repogroupid)
332 repo_group = fixture.create_repo_group(repogroupid)
333
333
334 def _cleanup():
334 def _cleanup():
335 fixture.destroy_repo_group(repogroupid)
335 fixture.destroy_repo_group(repogroupid)
336
336
337 request.addfinalizer(_cleanup)
337 request.addfinalizer(_cleanup)
338 return repo_group
338 return repo_group
339
339
340
340
341 @pytest.fixture
341 @pytest.fixture
342 def test_user_group(request):
342 def test_user_group(request):
343 """
343 """
344 Create a temporary user group, and destroy it after
344 Create a temporary user group, and destroy it after
345 usage automatically
345 usage automatically
346 """
346 """
347 fixture = Fixture()
347 fixture = Fixture()
348 usergroupid = 'test_user_group_%s' % int(time.time())
348 usergroupid = 'test_user_group_%s' % int(time.time())
349 user_group = fixture.create_user_group(usergroupid)
349 user_group = fixture.create_user_group(usergroupid)
350
350
351 def _cleanup():
351 def _cleanup():
352 fixture.destroy_user_group(user_group)
352 fixture.destroy_user_group(user_group)
353
353
354 request.addfinalizer(_cleanup)
354 request.addfinalizer(_cleanup)
355 return user_group
355 return user_group
356
356
357
357
358 @pytest.fixture(scope='session')
358 @pytest.fixture(scope='session')
359 def test_repo(request):
359 def test_repo(request):
360 container = TestRepoContainer()
360 container = TestRepoContainer()
361 request.addfinalizer(container._cleanup)
361 request.addfinalizer(container._cleanup)
362 return container
362 return container
363
363
364
364
365 class TestRepoContainer(object):
365 class TestRepoContainer(object):
366 """
366 """
367 Container for test repositories which are used read only.
367 Container for test repositories which are used read only.
368
368
369 Repositories will be created on demand and re-used during the lifetime
369 Repositories will be created on demand and re-used during the lifetime
370 of this object.
370 of this object.
371
371
372 Usage to get the svn test repository "minimal"::
372 Usage to get the svn test repository "minimal"::
373
373
374 test_repo = TestContainer()
374 test_repo = TestContainer()
375 repo = test_repo('minimal', 'svn')
375 repo = test_repo('minimal', 'svn')
376
376
377 """
377 """
378
378
379 dump_extractors = {
379 dump_extractors = {
380 'git': utils.extract_git_repo_from_dump,
380 'git': utils.extract_git_repo_from_dump,
381 'hg': utils.extract_hg_repo_from_dump,
381 'hg': utils.extract_hg_repo_from_dump,
382 'svn': utils.extract_svn_repo_from_dump,
382 'svn': utils.extract_svn_repo_from_dump,
383 }
383 }
384
384
385 def __init__(self):
385 def __init__(self):
386 self._cleanup_repos = []
386 self._cleanup_repos = []
387 self._fixture = Fixture()
387 self._fixture = Fixture()
388 self._repos = {}
388 self._repos = {}
389
389
390 def __call__(self, dump_name, backend_alias):
390 def __call__(self, dump_name, backend_alias, config=None):
391 key = (dump_name, backend_alias)
391 key = (dump_name, backend_alias)
392 if key not in self._repos:
392 if key not in self._repos:
393 repo = self._create_repo(dump_name, backend_alias)
393 repo = self._create_repo(dump_name, backend_alias, config)
394 self._repos[key] = repo.repo_id
394 self._repos[key] = repo.repo_id
395 return Repository.get(self._repos[key])
395 return Repository.get(self._repos[key])
396
396
397 def _create_repo(self, dump_name, backend_alias):
397 def _create_repo(self, dump_name, backend_alias, config):
398 repo_name = '%s-%s' % (backend_alias, dump_name)
398 repo_name = '%s-%s' % (backend_alias, dump_name)
399 backend_class = get_backend(backend_alias)
399 backend_class = get_backend(backend_alias)
400 dump_extractor = self.dump_extractors[backend_alias]
400 dump_extractor = self.dump_extractors[backend_alias]
401 repo_path = dump_extractor(dump_name, repo_name)
401 repo_path = dump_extractor(dump_name, repo_name)
402 vcs_repo = backend_class(repo_path)
402
403 vcs_repo = backend_class(repo_path, config=config)
403 repo2db_mapper({repo_name: vcs_repo})
404 repo2db_mapper({repo_name: vcs_repo})
405
404 repo = RepoModel().get_by_repo_name(repo_name)
406 repo = RepoModel().get_by_repo_name(repo_name)
405 self._cleanup_repos.append(repo_name)
407 self._cleanup_repos.append(repo_name)
406 return repo
408 return repo
407
409
408 def _cleanup(self):
410 def _cleanup(self):
409 for repo_name in reversed(self._cleanup_repos):
411 for repo_name in reversed(self._cleanup_repos):
410 self._fixture.destroy_repo(repo_name)
412 self._fixture.destroy_repo(repo_name)
411
413
412
414
413 @pytest.fixture
415 @pytest.fixture
414 def backend(request, backend_alias, pylonsapp, test_repo):
416 def backend(request, backend_alias, pylonsapp, test_repo):
415 """
417 """
416 Parametrized fixture which represents a single backend implementation.
418 Parametrized fixture which represents a single backend implementation.
417
419
418 It respects the option `--backends` to focus the test run on specific
420 It respects the option `--backends` to focus the test run on specific
419 backend implementations.
421 backend implementations.
420
422
421 It also supports `pytest.mark.xfail_backends` to mark tests as failing
423 It also supports `pytest.mark.xfail_backends` to mark tests as failing
422 for specific backends. This is intended as a utility for incremental
424 for specific backends. This is intended as a utility for incremental
423 development of a new backend implementation.
425 development of a new backend implementation.
424 """
426 """
425 if backend_alias not in request.config.getoption('--backends'):
427 if backend_alias not in request.config.getoption('--backends'):
426 pytest.skip("Backend %s not selected." % (backend_alias, ))
428 pytest.skip("Backend %s not selected." % (backend_alias, ))
427
429
428 utils.check_xfail_backends(request.node, backend_alias)
430 utils.check_xfail_backends(request.node, backend_alias)
429 utils.check_skip_backends(request.node, backend_alias)
431 utils.check_skip_backends(request.node, backend_alias)
430
432
431 repo_name = 'vcs_test_%s' % (backend_alias, )
433 repo_name = 'vcs_test_%s' % (backend_alias, )
432 backend = Backend(
434 backend = Backend(
433 alias=backend_alias,
435 alias=backend_alias,
434 repo_name=repo_name,
436 repo_name=repo_name,
435 test_name=request.node.name,
437 test_name=request.node.name,
436 test_repo_container=test_repo)
438 test_repo_container=test_repo)
437 request.addfinalizer(backend.cleanup)
439 request.addfinalizer(backend.cleanup)
438 return backend
440 return backend
439
441
440
442
441 @pytest.fixture
443 @pytest.fixture
442 def backend_git(request, pylonsapp, test_repo):
444 def backend_git(request, pylonsapp, test_repo):
443 return backend(request, 'git', pylonsapp, test_repo)
445 return backend(request, 'git', pylonsapp, test_repo)
444
446
445
447
446 @pytest.fixture
448 @pytest.fixture
447 def backend_hg(request, pylonsapp, test_repo):
449 def backend_hg(request, pylonsapp, test_repo):
448 return backend(request, 'hg', pylonsapp, test_repo)
450 return backend(request, 'hg', pylonsapp, test_repo)
449
451
450
452
451 @pytest.fixture
453 @pytest.fixture
452 def backend_svn(request, pylonsapp, test_repo):
454 def backend_svn(request, pylonsapp, test_repo):
453 return backend(request, 'svn', pylonsapp, test_repo)
455 return backend(request, 'svn', pylonsapp, test_repo)
454
456
455
457
456 @pytest.fixture
458 @pytest.fixture
457 def backend_random(backend_git):
459 def backend_random(backend_git):
458 """
460 """
459 Use this to express that your tests need "a backend.
461 Use this to express that your tests need "a backend.
460
462
461 A few of our tests need a backend, so that we can run the code. This
463 A few of our tests need a backend, so that we can run the code. This
462 fixture is intended to be used for such cases. It will pick one of the
464 fixture is intended to be used for such cases. It will pick one of the
463 backends and run the tests.
465 backends and run the tests.
464
466
465 The fixture `backend` would run the test multiple times for each
467 The fixture `backend` would run the test multiple times for each
466 available backend which is a pure waste of time if the test is
468 available backend which is a pure waste of time if the test is
467 independent of the backend type.
469 independent of the backend type.
468 """
470 """
469 # TODO: johbo: Change this to pick a random backend
471 # TODO: johbo: Change this to pick a random backend
470 return backend_git
472 return backend_git
471
473
472
474
473 @pytest.fixture
475 @pytest.fixture
474 def backend_stub(backend_git):
476 def backend_stub(backend_git):
475 """
477 """
476 Use this to express that your tests need a backend stub
478 Use this to express that your tests need a backend stub
477
479
478 TODO: mikhail: Implement a real stub logic instead of returning
480 TODO: mikhail: Implement a real stub logic instead of returning
479 a git backend
481 a git backend
480 """
482 """
481 return backend_git
483 return backend_git
482
484
483
485
484 @pytest.fixture
486 @pytest.fixture
485 def repo_stub(backend_stub):
487 def repo_stub(backend_stub):
486 """
488 """
487 Use this to express that your tests need a repository stub
489 Use this to express that your tests need a repository stub
488 """
490 """
489 return backend_stub.create_repo()
491 return backend_stub.create_repo()
490
492
491
493
492 class Backend(object):
494 class Backend(object):
493 """
495 """
494 Represents the test configuration for one supported backend
496 Represents the test configuration for one supported backend
495
497
496 Provides easy access to different test repositories based on
498 Provides easy access to different test repositories based on
497 `__getitem__`. Such repositories will only be created once per test
499 `__getitem__`. Such repositories will only be created once per test
498 session.
500 session.
499 """
501 """
500
502
501 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
503 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
502 _master_repo = None
504 _master_repo = None
503 _commit_ids = {}
505 _commit_ids = {}
504
506
505 def __init__(self, alias, repo_name, test_name, test_repo_container):
507 def __init__(self, alias, repo_name, test_name, test_repo_container):
506 self.alias = alias
508 self.alias = alias
507 self.repo_name = repo_name
509 self.repo_name = repo_name
508 self._cleanup_repos = []
510 self._cleanup_repos = []
509 self._test_name = test_name
511 self._test_name = test_name
510 self._test_repo_container = test_repo_container
512 self._test_repo_container = test_repo_container
511 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
513 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
512 # Fixture will survive in the end.
514 # Fixture will survive in the end.
513 self._fixture = Fixture()
515 self._fixture = Fixture()
514
516
515 def __getitem__(self, key):
517 def __getitem__(self, key):
516 return self._test_repo_container(key, self.alias)
518 return self._test_repo_container(key, self.alias)
517
519
520 def create_test_repo(self, key, config=None):
521 return self._test_repo_container(key, self.alias, config)
522
518 @property
523 @property
519 def repo(self):
524 def repo(self):
520 """
525 """
521 Returns the "current" repository. This is the vcs_test repo or the
526 Returns the "current" repository. This is the vcs_test repo or the
522 last repo which has been created with `create_repo`.
527 last repo which has been created with `create_repo`.
523 """
528 """
524 from rhodecode.model.db import Repository
529 from rhodecode.model.db import Repository
525 return Repository.get_by_repo_name(self.repo_name)
530 return Repository.get_by_repo_name(self.repo_name)
526
531
527 @property
532 @property
528 def default_branch_name(self):
533 def default_branch_name(self):
529 VcsRepository = get_backend(self.alias)
534 VcsRepository = get_backend(self.alias)
530 return VcsRepository.DEFAULT_BRANCH_NAME
535 return VcsRepository.DEFAULT_BRANCH_NAME
531
536
532 @property
537 @property
533 def default_head_id(self):
538 def default_head_id(self):
534 """
539 """
535 Returns the default head id of the underlying backend.
540 Returns the default head id of the underlying backend.
536
541
537 This will be the default branch name in case the backend does have a
542 This will be the default branch name in case the backend does have a
538 default branch. In the other cases it will point to a valid head
543 default branch. In the other cases it will point to a valid head
539 which can serve as the base to create a new commit on top of it.
544 which can serve as the base to create a new commit on top of it.
540 """
545 """
541 vcsrepo = self.repo.scm_instance()
546 vcsrepo = self.repo.scm_instance()
542 head_id = (
547 head_id = (
543 vcsrepo.DEFAULT_BRANCH_NAME or
548 vcsrepo.DEFAULT_BRANCH_NAME or
544 vcsrepo.commit_ids[-1])
549 vcsrepo.commit_ids[-1])
545 return head_id
550 return head_id
546
551
547 @property
552 @property
548 def commit_ids(self):
553 def commit_ids(self):
549 """
554 """
550 Returns the list of commits for the last created repository
555 Returns the list of commits for the last created repository
551 """
556 """
552 return self._commit_ids
557 return self._commit_ids
553
558
554 def create_master_repo(self, commits):
559 def create_master_repo(self, commits):
555 """
560 """
556 Create a repository and remember it as a template.
561 Create a repository and remember it as a template.
557
562
558 This allows to easily create derived repositories to construct
563 This allows to easily create derived repositories to construct
559 more complex scenarios for diff, compare and pull requests.
564 more complex scenarios for diff, compare and pull requests.
560
565
561 Returns a commit map which maps from commit message to raw_id.
566 Returns a commit map which maps from commit message to raw_id.
562 """
567 """
563 self._master_repo = self.create_repo(commits=commits)
568 self._master_repo = self.create_repo(commits=commits)
564 return self._commit_ids
569 return self._commit_ids
565
570
566 def create_repo(
571 def create_repo(
567 self, commits=None, number_of_commits=0, heads=None,
572 self, commits=None, number_of_commits=0, heads=None,
568 name_suffix=u'', **kwargs):
573 name_suffix=u'', **kwargs):
569 """
574 """
570 Create a repository and record it for later cleanup.
575 Create a repository and record it for later cleanup.
571
576
572 :param commits: Optional. A sequence of dict instances.
577 :param commits: Optional. A sequence of dict instances.
573 Will add a commit per entry to the new repository.
578 Will add a commit per entry to the new repository.
574 :param number_of_commits: Optional. If set to a number, this number of
579 :param number_of_commits: Optional. If set to a number, this number of
575 commits will be added to the new repository.
580 commits will be added to the new repository.
576 :param heads: Optional. Can be set to a sequence of of commit
581 :param heads: Optional. Can be set to a sequence of of commit
577 names which shall be pulled in from the master repository.
582 names which shall be pulled in from the master repository.
578
583
579 """
584 """
580 self.repo_name = self._next_repo_name() + name_suffix
585 self.repo_name = self._next_repo_name() + name_suffix
581 repo = self._fixture.create_repo(
586 repo = self._fixture.create_repo(
582 self.repo_name, repo_type=self.alias, **kwargs)
587 self.repo_name, repo_type=self.alias, **kwargs)
583 self._cleanup_repos.append(repo.repo_name)
588 self._cleanup_repos.append(repo.repo_name)
584
589
585 commits = commits or [
590 commits = commits or [
586 {'message': 'Commit %s of %s' % (x, self.repo_name)}
591 {'message': 'Commit %s of %s' % (x, self.repo_name)}
587 for x in xrange(number_of_commits)]
592 for x in xrange(number_of_commits)]
588 self._add_commits_to_repo(repo.scm_instance(), commits)
593 self._add_commits_to_repo(repo.scm_instance(), commits)
589 if heads:
594 if heads:
590 self.pull_heads(repo, heads)
595 self.pull_heads(repo, heads)
591
596
592 return repo
597 return repo
593
598
594 def pull_heads(self, repo, heads):
599 def pull_heads(self, repo, heads):
595 """
600 """
596 Make sure that repo contains all commits mentioned in `heads`
601 Make sure that repo contains all commits mentioned in `heads`
597 """
602 """
598 vcsmaster = self._master_repo.scm_instance()
603 vcsmaster = self._master_repo.scm_instance()
599 vcsrepo = repo.scm_instance()
604 vcsrepo = repo.scm_instance()
600 vcsrepo.config.clear_section('hooks')
605 vcsrepo.config.clear_section('hooks')
601 commit_ids = [self._commit_ids[h] for h in heads]
606 commit_ids = [self._commit_ids[h] for h in heads]
602 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
607 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
603
608
604 def create_fork(self):
609 def create_fork(self):
605 repo_to_fork = self.repo_name
610 repo_to_fork = self.repo_name
606 self.repo_name = self._next_repo_name()
611 self.repo_name = self._next_repo_name()
607 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
612 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
608 self._cleanup_repos.append(self.repo_name)
613 self._cleanup_repos.append(self.repo_name)
609 return repo
614 return repo
610
615
611 def new_repo_name(self, suffix=u''):
616 def new_repo_name(self, suffix=u''):
612 self.repo_name = self._next_repo_name() + suffix
617 self.repo_name = self._next_repo_name() + suffix
613 self._cleanup_repos.append(self.repo_name)
618 self._cleanup_repos.append(self.repo_name)
614 return self.repo_name
619 return self.repo_name
615
620
616 def _next_repo_name(self):
621 def _next_repo_name(self):
617 return u"%s_%s" % (
622 return u"%s_%s" % (
618 self.invalid_repo_name.sub(u'_', self._test_name),
623 self.invalid_repo_name.sub(u'_', self._test_name),
619 len(self._cleanup_repos))
624 len(self._cleanup_repos))
620
625
621 def ensure_file(self, filename, content='Test content\n'):
626 def ensure_file(self, filename, content='Test content\n'):
622 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
627 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
623 commits = [
628 commits = [
624 {'added': [
629 {'added': [
625 FileNode(filename, content=content),
630 FileNode(filename, content=content),
626 ]},
631 ]},
627 ]
632 ]
628 self._add_commits_to_repo(self.repo.scm_instance(), commits)
633 self._add_commits_to_repo(self.repo.scm_instance(), commits)
629
634
630 def enable_downloads(self):
635 def enable_downloads(self):
631 repo = self.repo
636 repo = self.repo
632 repo.enable_downloads = True
637 repo.enable_downloads = True
633 Session().add(repo)
638 Session().add(repo)
634 Session().commit()
639 Session().commit()
635
640
636 def cleanup(self):
641 def cleanup(self):
637 for repo_name in reversed(self._cleanup_repos):
642 for repo_name in reversed(self._cleanup_repos):
638 self._fixture.destroy_repo(repo_name)
643 self._fixture.destroy_repo(repo_name)
639
644
640 def _add_commits_to_repo(self, repo, commits):
645 def _add_commits_to_repo(self, repo, commits):
641 commit_ids = _add_commits_to_repo(repo, commits)
646 commit_ids = _add_commits_to_repo(repo, commits)
642 if not commit_ids:
647 if not commit_ids:
643 return
648 return
644 self._commit_ids = commit_ids
649 self._commit_ids = commit_ids
645
650
646 # Creating refs for Git to allow fetching them from remote repository
651 # Creating refs for Git to allow fetching them from remote repository
647 if self.alias == 'git':
652 if self.alias == 'git':
648 refs = {}
653 refs = {}
649 for message in self._commit_ids:
654 for message in self._commit_ids:
650 # TODO: mikhail: do more special chars replacements
655 # TODO: mikhail: do more special chars replacements
651 ref_name = 'refs/test-refs/{}'.format(
656 ref_name = 'refs/test-refs/{}'.format(
652 message.replace(' ', ''))
657 message.replace(' ', ''))
653 refs[ref_name] = self._commit_ids[message]
658 refs[ref_name] = self._commit_ids[message]
654 self._create_refs(repo, refs)
659 self._create_refs(repo, refs)
655
660
656 def _create_refs(self, repo, refs):
661 def _create_refs(self, repo, refs):
657 for ref_name in refs:
662 for ref_name in refs:
658 repo.set_refs(ref_name, refs[ref_name])
663 repo.set_refs(ref_name, refs[ref_name])
659
664
660
665
661 @pytest.fixture
666 @pytest.fixture
662 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
667 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
663 """
668 """
664 Parametrized fixture which represents a single vcs backend implementation.
669 Parametrized fixture which represents a single vcs backend implementation.
665
670
666 See the fixture `backend` for more details. This one implements the same
671 See the fixture `backend` for more details. This one implements the same
667 concept, but on vcs level. So it does not provide model instances etc.
672 concept, but on vcs level. So it does not provide model instances etc.
668
673
669 Parameters are generated dynamically, see :func:`pytest_generate_tests`
674 Parameters are generated dynamically, see :func:`pytest_generate_tests`
670 for how this works.
675 for how this works.
671 """
676 """
672 if backend_alias not in request.config.getoption('--backends'):
677 if backend_alias not in request.config.getoption('--backends'):
673 pytest.skip("Backend %s not selected." % (backend_alias, ))
678 pytest.skip("Backend %s not selected." % (backend_alias, ))
674
679
675 utils.check_xfail_backends(request.node, backend_alias)
680 utils.check_xfail_backends(request.node, backend_alias)
676 utils.check_skip_backends(request.node, backend_alias)
681 utils.check_skip_backends(request.node, backend_alias)
677
682
678 repo_name = 'vcs_test_%s' % (backend_alias, )
683 repo_name = 'vcs_test_%s' % (backend_alias, )
679 repo_path = os.path.join(tests_tmp_path, repo_name)
684 repo_path = os.path.join(tests_tmp_path, repo_name)
680 backend = VcsBackend(
685 backend = VcsBackend(
681 alias=backend_alias,
686 alias=backend_alias,
682 repo_path=repo_path,
687 repo_path=repo_path,
683 test_name=request.node.name,
688 test_name=request.node.name,
684 test_repo_container=test_repo)
689 test_repo_container=test_repo)
685 request.addfinalizer(backend.cleanup)
690 request.addfinalizer(backend.cleanup)
686 return backend
691 return backend
687
692
688
693
689 @pytest.fixture
694 @pytest.fixture
690 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
695 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
691 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
696 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
692
697
693
698
694 @pytest.fixture
699 @pytest.fixture
695 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
700 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
696 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
701 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
697
702
698
703
699 @pytest.fixture
704 @pytest.fixture
700 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
705 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
701 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
706 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
702
707
703
708
704 @pytest.fixture
709 @pytest.fixture
705 def vcsbackend_random(vcsbackend_git):
710 def vcsbackend_random(vcsbackend_git):
706 """
711 """
707 Use this to express that your tests need "a vcsbackend".
712 Use this to express that your tests need "a vcsbackend".
708
713
709 The fixture `vcsbackend` would run the test multiple times for each
714 The fixture `vcsbackend` would run the test multiple times for each
710 available vcs backend which is a pure waste of time if the test is
715 available vcs backend which is a pure waste of time if the test is
711 independent of the vcs backend type.
716 independent of the vcs backend type.
712 """
717 """
713 # TODO: johbo: Change this to pick a random backend
718 # TODO: johbo: Change this to pick a random backend
714 return vcsbackend_git
719 return vcsbackend_git
715
720
716
721
717 @pytest.fixture
722 @pytest.fixture
718 def vcsbackend_stub(vcsbackend_git):
723 def vcsbackend_stub(vcsbackend_git):
719 """
724 """
720 Use this to express that your test just needs a stub of a vcsbackend.
725 Use this to express that your test just needs a stub of a vcsbackend.
721
726
722 Plan is to eventually implement an in-memory stub to speed tests up.
727 Plan is to eventually implement an in-memory stub to speed tests up.
723 """
728 """
724 return vcsbackend_git
729 return vcsbackend_git
725
730
726
731
727 class VcsBackend(object):
732 class VcsBackend(object):
728 """
733 """
729 Represents the test configuration for one supported vcs backend.
734 Represents the test configuration for one supported vcs backend.
730 """
735 """
731
736
732 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
737 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
733
738
734 def __init__(self, alias, repo_path, test_name, test_repo_container):
739 def __init__(self, alias, repo_path, test_name, test_repo_container):
735 self.alias = alias
740 self.alias = alias
736 self._repo_path = repo_path
741 self._repo_path = repo_path
737 self._cleanup_repos = []
742 self._cleanup_repos = []
738 self._test_name = test_name
743 self._test_name = test_name
739 self._test_repo_container = test_repo_container
744 self._test_repo_container = test_repo_container
740
745
741 def __getitem__(self, key):
746 def __getitem__(self, key):
742 return self._test_repo_container(key, self.alias).scm_instance()
747 return self._test_repo_container(key, self.alias).scm_instance()
743
748
744 @property
749 @property
745 def repo(self):
750 def repo(self):
746 """
751 """
747 Returns the "current" repository. This is the vcs_test repo of the last
752 Returns the "current" repository. This is the vcs_test repo of the last
748 repo which has been created.
753 repo which has been created.
749 """
754 """
750 Repository = get_backend(self.alias)
755 Repository = get_backend(self.alias)
751 return Repository(self._repo_path)
756 return Repository(self._repo_path)
752
757
753 @property
758 @property
754 def backend(self):
759 def backend(self):
755 """
760 """
756 Returns the backend implementation class.
761 Returns the backend implementation class.
757 """
762 """
758 return get_backend(self.alias)
763 return get_backend(self.alias)
759
764
760 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
765 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
761 repo_name = self._next_repo_name()
766 repo_name = self._next_repo_name()
762 self._repo_path = get_new_dir(repo_name)
767 self._repo_path = get_new_dir(repo_name)
763 repo_class = get_backend(self.alias)
768 repo_class = get_backend(self.alias)
764 src_url = None
769 src_url = None
765 if _clone_repo:
770 if _clone_repo:
766 src_url = _clone_repo.path
771 src_url = _clone_repo.path
767 repo = repo_class(self._repo_path, create=True, src_url=src_url)
772 repo = repo_class(self._repo_path, create=True, src_url=src_url)
768 self._cleanup_repos.append(repo)
773 self._cleanup_repos.append(repo)
769
774
770 commits = commits or [
775 commits = commits or [
771 {'message': 'Commit %s of %s' % (x, repo_name)}
776 {'message': 'Commit %s of %s' % (x, repo_name)}
772 for x in xrange(number_of_commits)]
777 for x in xrange(number_of_commits)]
773 _add_commits_to_repo(repo, commits)
778 _add_commits_to_repo(repo, commits)
774 return repo
779 return repo
775
780
776 def clone_repo(self, repo):
781 def clone_repo(self, repo):
777 return self.create_repo(_clone_repo=repo)
782 return self.create_repo(_clone_repo=repo)
778
783
779 def cleanup(self):
784 def cleanup(self):
780 for repo in self._cleanup_repos:
785 for repo in self._cleanup_repos:
781 shutil.rmtree(repo.path)
786 shutil.rmtree(repo.path)
782
787
783 def new_repo_path(self):
788 def new_repo_path(self):
784 repo_name = self._next_repo_name()
789 repo_name = self._next_repo_name()
785 self._repo_path = get_new_dir(repo_name)
790 self._repo_path = get_new_dir(repo_name)
786 return self._repo_path
791 return self._repo_path
787
792
788 def _next_repo_name(self):
793 def _next_repo_name(self):
789 return "%s_%s" % (
794 return "%s_%s" % (
790 self.invalid_repo_name.sub('_', self._test_name),
795 self.invalid_repo_name.sub('_', self._test_name),
791 len(self._cleanup_repos))
796 len(self._cleanup_repos))
792
797
793 def add_file(self, repo, filename, content='Test content\n'):
798 def add_file(self, repo, filename, content='Test content\n'):
794 imc = repo.in_memory_commit
799 imc = repo.in_memory_commit
795 imc.add(FileNode(filename, content=content))
800 imc.add(FileNode(filename, content=content))
796 imc.commit(
801 imc.commit(
797 message=u'Automatic commit from vcsbackend fixture',
802 message=u'Automatic commit from vcsbackend fixture',
798 author=u'Automatic')
803 author=u'Automatic')
799
804
800 def ensure_file(self, filename, content='Test content\n'):
805 def ensure_file(self, filename, content='Test content\n'):
801 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
806 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
802 self.add_file(self.repo, filename, content)
807 self.add_file(self.repo, filename, content)
803
808
804
809
805 def _add_commits_to_repo(vcs_repo, commits):
810 def _add_commits_to_repo(vcs_repo, commits):
806 commit_ids = {}
811 commit_ids = {}
807 if not commits:
812 if not commits:
808 return commit_ids
813 return commit_ids
809
814
810 imc = vcs_repo.in_memory_commit
815 imc = vcs_repo.in_memory_commit
811 commit = None
816 commit = None
812
817
813 for idx, commit in enumerate(commits):
818 for idx, commit in enumerate(commits):
814 message = unicode(commit.get('message', 'Commit %s' % idx))
819 message = unicode(commit.get('message', 'Commit %s' % idx))
815
820
816 for node in commit.get('added', []):
821 for node in commit.get('added', []):
817 imc.add(FileNode(node.path, content=node.content))
822 imc.add(FileNode(node.path, content=node.content))
818 for node in commit.get('changed', []):
823 for node in commit.get('changed', []):
819 imc.change(FileNode(node.path, content=node.content))
824 imc.change(FileNode(node.path, content=node.content))
820 for node in commit.get('removed', []):
825 for node in commit.get('removed', []):
821 imc.remove(FileNode(node.path))
826 imc.remove(FileNode(node.path))
822
827
823 parents = [
828 parents = [
824 vcs_repo.get_commit(commit_id=commit_ids[p])
829 vcs_repo.get_commit(commit_id=commit_ids[p])
825 for p in commit.get('parents', [])]
830 for p in commit.get('parents', [])]
826
831
827 operations = ('added', 'changed', 'removed')
832 operations = ('added', 'changed', 'removed')
828 if not any((commit.get(o) for o in operations)):
833 if not any((commit.get(o) for o in operations)):
829 imc.add(FileNode('file_%s' % idx, content=message))
834 imc.add(FileNode('file_%s' % idx, content=message))
830
835
831 commit = imc.commit(
836 commit = imc.commit(
832 message=message,
837 message=message,
833 author=unicode(commit.get('author', 'Automatic')),
838 author=unicode(commit.get('author', 'Automatic')),
834 date=commit.get('date'),
839 date=commit.get('date'),
835 branch=commit.get('branch'),
840 branch=commit.get('branch'),
836 parents=parents)
841 parents=parents)
837
842
838 commit_ids[commit.message] = commit.raw_id
843 commit_ids[commit.message] = commit.raw_id
839
844
840 return commit_ids
845 return commit_ids
841
846
842
847
843 @pytest.fixture
848 @pytest.fixture
844 def reposerver(request):
849 def reposerver(request):
845 """
850 """
846 Allows to serve a backend repository
851 Allows to serve a backend repository
847 """
852 """
848
853
849 repo_server = RepoServer()
854 repo_server = RepoServer()
850 request.addfinalizer(repo_server.cleanup)
855 request.addfinalizer(repo_server.cleanup)
851 return repo_server
856 return repo_server
852
857
853
858
854 class RepoServer(object):
859 class RepoServer(object):
855 """
860 """
856 Utility to serve a local repository for the duration of a test case.
861 Utility to serve a local repository for the duration of a test case.
857
862
858 Supports only Subversion so far.
863 Supports only Subversion so far.
859 """
864 """
860
865
861 url = None
866 url = None
862
867
863 def __init__(self):
868 def __init__(self):
864 self._cleanup_servers = []
869 self._cleanup_servers = []
865
870
866 def serve(self, vcsrepo):
871 def serve(self, vcsrepo):
867 if vcsrepo.alias != 'svn':
872 if vcsrepo.alias != 'svn':
868 raise TypeError("Backend %s not supported" % vcsrepo.alias)
873 raise TypeError("Backend %s not supported" % vcsrepo.alias)
869
874
870 proc = subprocess32.Popen(
875 proc = subprocess32.Popen(
871 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
876 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
872 '--root', vcsrepo.path])
877 '--root', vcsrepo.path])
873 self._cleanup_servers.append(proc)
878 self._cleanup_servers.append(proc)
874 self.url = 'svn://localhost'
879 self.url = 'svn://localhost'
875
880
876 def cleanup(self):
881 def cleanup(self):
877 for proc in self._cleanup_servers:
882 for proc in self._cleanup_servers:
878 proc.terminate()
883 proc.terminate()
879
884
880
885
881 @pytest.fixture
886 @pytest.fixture
882 def pr_util(backend, request):
887 def pr_util(backend, request):
883 """
888 """
884 Utility for tests of models and for functional tests around pull requests.
889 Utility for tests of models and for functional tests around pull requests.
885
890
886 It gives an instance of :class:`PRTestUtility` which provides various
891 It gives an instance of :class:`PRTestUtility` which provides various
887 utility methods around one pull request.
892 utility methods around one pull request.
888
893
889 This fixture uses `backend` and inherits its parameterization.
894 This fixture uses `backend` and inherits its parameterization.
890 """
895 """
891
896
892 util = PRTestUtility(backend)
897 util = PRTestUtility(backend)
893
898
894 @request.addfinalizer
899 @request.addfinalizer
895 def cleanup():
900 def cleanup():
896 util.cleanup()
901 util.cleanup()
897
902
898 return util
903 return util
899
904
900
905
901 class PRTestUtility(object):
906 class PRTestUtility(object):
902
907
903 pull_request = None
908 pull_request = None
904 pull_request_id = None
909 pull_request_id = None
905 mergeable_patcher = None
910 mergeable_patcher = None
906 mergeable_mock = None
911 mergeable_mock = None
907 notification_patcher = None
912 notification_patcher = None
908
913
909 def __init__(self, backend):
914 def __init__(self, backend):
910 self.backend = backend
915 self.backend = backend
911
916
912 def create_pull_request(
917 def create_pull_request(
913 self, commits=None, target_head=None, source_head=None,
918 self, commits=None, target_head=None, source_head=None,
914 revisions=None, approved=False, author=None, mergeable=False,
919 revisions=None, approved=False, author=None, mergeable=False,
915 enable_notifications=True, name_suffix=u'', reviewers=None,
920 enable_notifications=True, name_suffix=u'', reviewers=None,
916 title=u"Test", description=u"Description"):
921 title=u"Test", description=u"Description"):
917 self.set_mergeable(mergeable)
922 self.set_mergeable(mergeable)
918 if not enable_notifications:
923 if not enable_notifications:
919 # mock notification side effect
924 # mock notification side effect
920 self.notification_patcher = mock.patch(
925 self.notification_patcher = mock.patch(
921 'rhodecode.model.notification.NotificationModel.create')
926 'rhodecode.model.notification.NotificationModel.create')
922 self.notification_patcher.start()
927 self.notification_patcher.start()
923
928
924 if not self.pull_request:
929 if not self.pull_request:
925 if not commits:
930 if not commits:
926 commits = [
931 commits = [
927 {'message': 'c1'},
932 {'message': 'c1'},
928 {'message': 'c2'},
933 {'message': 'c2'},
929 {'message': 'c3'},
934 {'message': 'c3'},
930 ]
935 ]
931 target_head = 'c1'
936 target_head = 'c1'
932 source_head = 'c2'
937 source_head = 'c2'
933 revisions = ['c2']
938 revisions = ['c2']
934
939
935 self.commit_ids = self.backend.create_master_repo(commits)
940 self.commit_ids = self.backend.create_master_repo(commits)
936 self.target_repository = self.backend.create_repo(
941 self.target_repository = self.backend.create_repo(
937 heads=[target_head], name_suffix=name_suffix)
942 heads=[target_head], name_suffix=name_suffix)
938 self.source_repository = self.backend.create_repo(
943 self.source_repository = self.backend.create_repo(
939 heads=[source_head], name_suffix=name_suffix)
944 heads=[source_head], name_suffix=name_suffix)
940 self.author = author or UserModel().get_by_username(
945 self.author = author or UserModel().get_by_username(
941 TEST_USER_ADMIN_LOGIN)
946 TEST_USER_ADMIN_LOGIN)
942
947
943 model = PullRequestModel()
948 model = PullRequestModel()
944 self.create_parameters = {
949 self.create_parameters = {
945 'created_by': self.author,
950 'created_by': self.author,
946 'source_repo': self.source_repository.repo_name,
951 'source_repo': self.source_repository.repo_name,
947 'source_ref': self._default_branch_reference(source_head),
952 'source_ref': self._default_branch_reference(source_head),
948 'target_repo': self.target_repository.repo_name,
953 'target_repo': self.target_repository.repo_name,
949 'target_ref': self._default_branch_reference(target_head),
954 'target_ref': self._default_branch_reference(target_head),
950 'revisions': [self.commit_ids[r] for r in revisions],
955 'revisions': [self.commit_ids[r] for r in revisions],
951 'reviewers': reviewers or self._get_reviewers(),
956 'reviewers': reviewers or self._get_reviewers(),
952 'title': title,
957 'title': title,
953 'description': description,
958 'description': description,
954 }
959 }
955 self.pull_request = model.create(**self.create_parameters)
960 self.pull_request = model.create(**self.create_parameters)
956 assert model.get_versions(self.pull_request) == []
961 assert model.get_versions(self.pull_request) == []
957
962
958 self.pull_request_id = self.pull_request.pull_request_id
963 self.pull_request_id = self.pull_request.pull_request_id
959
964
960 if approved:
965 if approved:
961 self.approve()
966 self.approve()
962
967
963 Session().add(self.pull_request)
968 Session().add(self.pull_request)
964 Session().commit()
969 Session().commit()
965
970
966 return self.pull_request
971 return self.pull_request
967
972
968 def approve(self):
973 def approve(self):
969 self.create_status_votes(
974 self.create_status_votes(
970 ChangesetStatus.STATUS_APPROVED,
975 ChangesetStatus.STATUS_APPROVED,
971 *self.pull_request.reviewers)
976 *self.pull_request.reviewers)
972
977
973 def close(self):
978 def close(self):
974 PullRequestModel().close_pull_request(self.pull_request, self.author)
979 PullRequestModel().close_pull_request(self.pull_request, self.author)
975
980
976 def _default_branch_reference(self, commit_message):
981 def _default_branch_reference(self, commit_message):
977 reference = '%s:%s:%s' % (
982 reference = '%s:%s:%s' % (
978 'branch',
983 'branch',
979 self.backend.default_branch_name,
984 self.backend.default_branch_name,
980 self.commit_ids[commit_message])
985 self.commit_ids[commit_message])
981 return reference
986 return reference
982
987
983 def _get_reviewers(self):
988 def _get_reviewers(self):
984 model = UserModel()
989 model = UserModel()
985 return [
990 return [
986 model.get_by_username(TEST_USER_REGULAR_LOGIN),
991 model.get_by_username(TEST_USER_REGULAR_LOGIN),
987 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
992 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
988 ]
993 ]
989
994
990 def update_source_repository(self, head=None):
995 def update_source_repository(self, head=None):
991 heads = [head or 'c3']
996 heads = [head or 'c3']
992 self.backend.pull_heads(self.source_repository, heads=heads)
997 self.backend.pull_heads(self.source_repository, heads=heads)
993
998
994 def add_one_commit(self, head=None):
999 def add_one_commit(self, head=None):
995 self.update_source_repository(head=head)
1000 self.update_source_repository(head=head)
996 old_commit_ids = set(self.pull_request.revisions)
1001 old_commit_ids = set(self.pull_request.revisions)
997 PullRequestModel().update_commits(self.pull_request)
1002 PullRequestModel().update_commits(self.pull_request)
998 commit_ids = set(self.pull_request.revisions)
1003 commit_ids = set(self.pull_request.revisions)
999 new_commit_ids = commit_ids - old_commit_ids
1004 new_commit_ids = commit_ids - old_commit_ids
1000 assert len(new_commit_ids) == 1
1005 assert len(new_commit_ids) == 1
1001 return new_commit_ids.pop()
1006 return new_commit_ids.pop()
1002
1007
1003 def remove_one_commit(self):
1008 def remove_one_commit(self):
1004 assert len(self.pull_request.revisions) == 2
1009 assert len(self.pull_request.revisions) == 2
1005 source_vcs = self.source_repository.scm_instance()
1010 source_vcs = self.source_repository.scm_instance()
1006 removed_commit_id = source_vcs.commit_ids[-1]
1011 removed_commit_id = source_vcs.commit_ids[-1]
1007
1012
1008 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1013 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1009 # remove the if once that's sorted out.
1014 # remove the if once that's sorted out.
1010 if self.backend.alias == "git":
1015 if self.backend.alias == "git":
1011 kwargs = {'branch_name': self.backend.default_branch_name}
1016 kwargs = {'branch_name': self.backend.default_branch_name}
1012 else:
1017 else:
1013 kwargs = {}
1018 kwargs = {}
1014 source_vcs.strip(removed_commit_id, **kwargs)
1019 source_vcs.strip(removed_commit_id, **kwargs)
1015
1020
1016 PullRequestModel().update_commits(self.pull_request)
1021 PullRequestModel().update_commits(self.pull_request)
1017 assert len(self.pull_request.revisions) == 1
1022 assert len(self.pull_request.revisions) == 1
1018 return removed_commit_id
1023 return removed_commit_id
1019
1024
1020 def create_comment(self, linked_to=None):
1025 def create_comment(self, linked_to=None):
1021 comment = CommentsModel().create(
1026 comment = CommentsModel().create(
1022 text=u"Test comment",
1027 text=u"Test comment",
1023 repo=self.target_repository.repo_name,
1028 repo=self.target_repository.repo_name,
1024 user=self.author,
1029 user=self.author,
1025 pull_request=self.pull_request)
1030 pull_request=self.pull_request)
1026 assert comment.pull_request_version_id is None
1031 assert comment.pull_request_version_id is None
1027
1032
1028 if linked_to:
1033 if linked_to:
1029 PullRequestModel()._link_comments_to_version(linked_to)
1034 PullRequestModel()._link_comments_to_version(linked_to)
1030
1035
1031 return comment
1036 return comment
1032
1037
1033 def create_inline_comment(
1038 def create_inline_comment(
1034 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1039 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1035 comment = CommentsModel().create(
1040 comment = CommentsModel().create(
1036 text=u"Test comment",
1041 text=u"Test comment",
1037 repo=self.target_repository.repo_name,
1042 repo=self.target_repository.repo_name,
1038 user=self.author,
1043 user=self.author,
1039 line_no=line_no,
1044 line_no=line_no,
1040 f_path=file_path,
1045 f_path=file_path,
1041 pull_request=self.pull_request)
1046 pull_request=self.pull_request)
1042 assert comment.pull_request_version_id is None
1047 assert comment.pull_request_version_id is None
1043
1048
1044 if linked_to:
1049 if linked_to:
1045 PullRequestModel()._link_comments_to_version(linked_to)
1050 PullRequestModel()._link_comments_to_version(linked_to)
1046
1051
1047 return comment
1052 return comment
1048
1053
1049 def create_version_of_pull_request(self):
1054 def create_version_of_pull_request(self):
1050 pull_request = self.create_pull_request()
1055 pull_request = self.create_pull_request()
1051 version = PullRequestModel()._create_version_from_snapshot(
1056 version = PullRequestModel()._create_version_from_snapshot(
1052 pull_request)
1057 pull_request)
1053 return version
1058 return version
1054
1059
1055 def create_status_votes(self, status, *reviewers):
1060 def create_status_votes(self, status, *reviewers):
1056 for reviewer in reviewers:
1061 for reviewer in reviewers:
1057 ChangesetStatusModel().set_status(
1062 ChangesetStatusModel().set_status(
1058 repo=self.pull_request.target_repo,
1063 repo=self.pull_request.target_repo,
1059 status=status,
1064 status=status,
1060 user=reviewer.user_id,
1065 user=reviewer.user_id,
1061 pull_request=self.pull_request)
1066 pull_request=self.pull_request)
1062
1067
1063 def set_mergeable(self, value):
1068 def set_mergeable(self, value):
1064 if not self.mergeable_patcher:
1069 if not self.mergeable_patcher:
1065 self.mergeable_patcher = mock.patch.object(
1070 self.mergeable_patcher = mock.patch.object(
1066 VcsSettingsModel, 'get_general_settings')
1071 VcsSettingsModel, 'get_general_settings')
1067 self.mergeable_mock = self.mergeable_patcher.start()
1072 self.mergeable_mock = self.mergeable_patcher.start()
1068 self.mergeable_mock.return_value = {
1073 self.mergeable_mock.return_value = {
1069 'rhodecode_pr_merge_enabled': value}
1074 'rhodecode_pr_merge_enabled': value}
1070
1075
1071 def cleanup(self):
1076 def cleanup(self):
1072 # In case the source repository is already cleaned up, the pull
1077 # In case the source repository is already cleaned up, the pull
1073 # request will already be deleted.
1078 # request will already be deleted.
1074 pull_request = PullRequest().get(self.pull_request_id)
1079 pull_request = PullRequest().get(self.pull_request_id)
1075 if pull_request:
1080 if pull_request:
1076 PullRequestModel().delete(pull_request)
1081 PullRequestModel().delete(pull_request)
1077 Session().commit()
1082 Session().commit()
1078
1083
1079 if self.notification_patcher:
1084 if self.notification_patcher:
1080 self.notification_patcher.stop()
1085 self.notification_patcher.stop()
1081
1086
1082 if self.mergeable_patcher:
1087 if self.mergeable_patcher:
1083 self.mergeable_patcher.stop()
1088 self.mergeable_patcher.stop()
1084
1089
1085
1090
1086 @pytest.fixture
1091 @pytest.fixture
1087 def user_admin(pylonsapp):
1092 def user_admin(pylonsapp):
1088 """
1093 """
1089 Provides the default admin test user as an instance of `db.User`.
1094 Provides the default admin test user as an instance of `db.User`.
1090 """
1095 """
1091 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1096 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1092 return user
1097 return user
1093
1098
1094
1099
1095 @pytest.fixture
1100 @pytest.fixture
1096 def user_regular(pylonsapp):
1101 def user_regular(pylonsapp):
1097 """
1102 """
1098 Provides the default regular test user as an instance of `db.User`.
1103 Provides the default regular test user as an instance of `db.User`.
1099 """
1104 """
1100 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1105 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1101 return user
1106 return user
1102
1107
1103
1108
1104 @pytest.fixture
1109 @pytest.fixture
1105 def user_util(request, pylonsapp):
1110 def user_util(request, pylonsapp):
1106 """
1111 """
1107 Provides a wired instance of `UserUtility` with integrated cleanup.
1112 Provides a wired instance of `UserUtility` with integrated cleanup.
1108 """
1113 """
1109 utility = UserUtility(test_name=request.node.name)
1114 utility = UserUtility(test_name=request.node.name)
1110 request.addfinalizer(utility.cleanup)
1115 request.addfinalizer(utility.cleanup)
1111 return utility
1116 return utility
1112
1117
1113
1118
1114 # TODO: johbo: Split this up into utilities per domain or something similar
1119 # TODO: johbo: Split this up into utilities per domain or something similar
1115 class UserUtility(object):
1120 class UserUtility(object):
1116
1121
1117 def __init__(self, test_name="test"):
1122 def __init__(self, test_name="test"):
1118 self._test_name = self._sanitize_name(test_name)
1123 self._test_name = self._sanitize_name(test_name)
1119 self.fixture = Fixture()
1124 self.fixture = Fixture()
1120 self.repo_group_ids = []
1125 self.repo_group_ids = []
1121 self.repos_ids = []
1126 self.repos_ids = []
1122 self.user_ids = []
1127 self.user_ids = []
1123 self.user_group_ids = []
1128 self.user_group_ids = []
1124 self.user_repo_permission_ids = []
1129 self.user_repo_permission_ids = []
1125 self.user_group_repo_permission_ids = []
1130 self.user_group_repo_permission_ids = []
1126 self.user_repo_group_permission_ids = []
1131 self.user_repo_group_permission_ids = []
1127 self.user_group_repo_group_permission_ids = []
1132 self.user_group_repo_group_permission_ids = []
1128 self.user_user_group_permission_ids = []
1133 self.user_user_group_permission_ids = []
1129 self.user_group_user_group_permission_ids = []
1134 self.user_group_user_group_permission_ids = []
1130 self.user_permissions = []
1135 self.user_permissions = []
1131
1136
1132 def _sanitize_name(self, name):
1137 def _sanitize_name(self, name):
1133 for char in ['[', ']']:
1138 for char in ['[', ']']:
1134 name = name.replace(char, '_')
1139 name = name.replace(char, '_')
1135 return name
1140 return name
1136
1141
1137 def create_repo_group(
1142 def create_repo_group(
1138 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1143 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1139 group_name = "{prefix}_repogroup_{count}".format(
1144 group_name = "{prefix}_repogroup_{count}".format(
1140 prefix=self._test_name,
1145 prefix=self._test_name,
1141 count=len(self.repo_group_ids))
1146 count=len(self.repo_group_ids))
1142 repo_group = self.fixture.create_repo_group(
1147 repo_group = self.fixture.create_repo_group(
1143 group_name, cur_user=owner)
1148 group_name, cur_user=owner)
1144 if auto_cleanup:
1149 if auto_cleanup:
1145 self.repo_group_ids.append(repo_group.group_id)
1150 self.repo_group_ids.append(repo_group.group_id)
1146 return repo_group
1151 return repo_group
1147
1152
1148 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1153 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1149 auto_cleanup=True, repo_type='hg'):
1154 auto_cleanup=True, repo_type='hg'):
1150 repo_name = "{prefix}_repository_{count}".format(
1155 repo_name = "{prefix}_repository_{count}".format(
1151 prefix=self._test_name,
1156 prefix=self._test_name,
1152 count=len(self.repos_ids))
1157 count=len(self.repos_ids))
1153
1158
1154 repository = self.fixture.create_repo(
1159 repository = self.fixture.create_repo(
1155 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1160 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1156 if auto_cleanup:
1161 if auto_cleanup:
1157 self.repos_ids.append(repository.repo_id)
1162 self.repos_ids.append(repository.repo_id)
1158 return repository
1163 return repository
1159
1164
1160 def create_user(self, auto_cleanup=True, **kwargs):
1165 def create_user(self, auto_cleanup=True, **kwargs):
1161 user_name = "{prefix}_user_{count}".format(
1166 user_name = "{prefix}_user_{count}".format(
1162 prefix=self._test_name,
1167 prefix=self._test_name,
1163 count=len(self.user_ids))
1168 count=len(self.user_ids))
1164 user = self.fixture.create_user(user_name, **kwargs)
1169 user = self.fixture.create_user(user_name, **kwargs)
1165 if auto_cleanup:
1170 if auto_cleanup:
1166 self.user_ids.append(user.user_id)
1171 self.user_ids.append(user.user_id)
1167 return user
1172 return user
1168
1173
1169 def create_user_with_group(self):
1174 def create_user_with_group(self):
1170 user = self.create_user()
1175 user = self.create_user()
1171 user_group = self.create_user_group(members=[user])
1176 user_group = self.create_user_group(members=[user])
1172 return user, user_group
1177 return user, user_group
1173
1178
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1179 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1175 auto_cleanup=True, **kwargs):
1180 auto_cleanup=True, **kwargs):
1176 group_name = "{prefix}_usergroup_{count}".format(
1181 group_name = "{prefix}_usergroup_{count}".format(
1177 prefix=self._test_name,
1182 prefix=self._test_name,
1178 count=len(self.user_group_ids))
1183 count=len(self.user_group_ids))
1179 user_group = self.fixture.create_user_group(
1184 user_group = self.fixture.create_user_group(
1180 group_name, cur_user=owner, **kwargs)
1185 group_name, cur_user=owner, **kwargs)
1181
1186
1182 if auto_cleanup:
1187 if auto_cleanup:
1183 self.user_group_ids.append(user_group.users_group_id)
1188 self.user_group_ids.append(user_group.users_group_id)
1184 if members:
1189 if members:
1185 for user in members:
1190 for user in members:
1186 UserGroupModel().add_user_to_group(user_group, user)
1191 UserGroupModel().add_user_to_group(user_group, user)
1187 return user_group
1192 return user_group
1188
1193
1189 def grant_user_permission(self, user_name, permission_name):
1194 def grant_user_permission(self, user_name, permission_name):
1190 self._inherit_default_user_permissions(user_name, False)
1195 self._inherit_default_user_permissions(user_name, False)
1191 self.user_permissions.append((user_name, permission_name))
1196 self.user_permissions.append((user_name, permission_name))
1192
1197
1193 def grant_user_permission_to_repo_group(
1198 def grant_user_permission_to_repo_group(
1194 self, repo_group, user, permission_name):
1199 self, repo_group, user, permission_name):
1195 permission = RepoGroupModel().grant_user_permission(
1200 permission = RepoGroupModel().grant_user_permission(
1196 repo_group, user, permission_name)
1201 repo_group, user, permission_name)
1197 self.user_repo_group_permission_ids.append(
1202 self.user_repo_group_permission_ids.append(
1198 (repo_group.group_id, user.user_id))
1203 (repo_group.group_id, user.user_id))
1199 return permission
1204 return permission
1200
1205
1201 def grant_user_group_permission_to_repo_group(
1206 def grant_user_group_permission_to_repo_group(
1202 self, repo_group, user_group, permission_name):
1207 self, repo_group, user_group, permission_name):
1203 permission = RepoGroupModel().grant_user_group_permission(
1208 permission = RepoGroupModel().grant_user_group_permission(
1204 repo_group, user_group, permission_name)
1209 repo_group, user_group, permission_name)
1205 self.user_group_repo_group_permission_ids.append(
1210 self.user_group_repo_group_permission_ids.append(
1206 (repo_group.group_id, user_group.users_group_id))
1211 (repo_group.group_id, user_group.users_group_id))
1207 return permission
1212 return permission
1208
1213
1209 def grant_user_permission_to_repo(
1214 def grant_user_permission_to_repo(
1210 self, repo, user, permission_name):
1215 self, repo, user, permission_name):
1211 permission = RepoModel().grant_user_permission(
1216 permission = RepoModel().grant_user_permission(
1212 repo, user, permission_name)
1217 repo, user, permission_name)
1213 self.user_repo_permission_ids.append(
1218 self.user_repo_permission_ids.append(
1214 (repo.repo_id, user.user_id))
1219 (repo.repo_id, user.user_id))
1215 return permission
1220 return permission
1216
1221
1217 def grant_user_group_permission_to_repo(
1222 def grant_user_group_permission_to_repo(
1218 self, repo, user_group, permission_name):
1223 self, repo, user_group, permission_name):
1219 permission = RepoModel().grant_user_group_permission(
1224 permission = RepoModel().grant_user_group_permission(
1220 repo, user_group, permission_name)
1225 repo, user_group, permission_name)
1221 self.user_group_repo_permission_ids.append(
1226 self.user_group_repo_permission_ids.append(
1222 (repo.repo_id, user_group.users_group_id))
1227 (repo.repo_id, user_group.users_group_id))
1223 return permission
1228 return permission
1224
1229
1225 def grant_user_permission_to_user_group(
1230 def grant_user_permission_to_user_group(
1226 self, target_user_group, user, permission_name):
1231 self, target_user_group, user, permission_name):
1227 permission = UserGroupModel().grant_user_permission(
1232 permission = UserGroupModel().grant_user_permission(
1228 target_user_group, user, permission_name)
1233 target_user_group, user, permission_name)
1229 self.user_user_group_permission_ids.append(
1234 self.user_user_group_permission_ids.append(
1230 (target_user_group.users_group_id, user.user_id))
1235 (target_user_group.users_group_id, user.user_id))
1231 return permission
1236 return permission
1232
1237
1233 def grant_user_group_permission_to_user_group(
1238 def grant_user_group_permission_to_user_group(
1234 self, target_user_group, user_group, permission_name):
1239 self, target_user_group, user_group, permission_name):
1235 permission = UserGroupModel().grant_user_group_permission(
1240 permission = UserGroupModel().grant_user_group_permission(
1236 target_user_group, user_group, permission_name)
1241 target_user_group, user_group, permission_name)
1237 self.user_group_user_group_permission_ids.append(
1242 self.user_group_user_group_permission_ids.append(
1238 (target_user_group.users_group_id, user_group.users_group_id))
1243 (target_user_group.users_group_id, user_group.users_group_id))
1239 return permission
1244 return permission
1240
1245
1241 def revoke_user_permission(self, user_name, permission_name):
1246 def revoke_user_permission(self, user_name, permission_name):
1242 self._inherit_default_user_permissions(user_name, True)
1247 self._inherit_default_user_permissions(user_name, True)
1243 UserModel().revoke_perm(user_name, permission_name)
1248 UserModel().revoke_perm(user_name, permission_name)
1244
1249
1245 def _inherit_default_user_permissions(self, user_name, value):
1250 def _inherit_default_user_permissions(self, user_name, value):
1246 user = UserModel().get_by_username(user_name)
1251 user = UserModel().get_by_username(user_name)
1247 user.inherit_default_permissions = value
1252 user.inherit_default_permissions = value
1248 Session().add(user)
1253 Session().add(user)
1249 Session().commit()
1254 Session().commit()
1250
1255
1251 def cleanup(self):
1256 def cleanup(self):
1252 self._cleanup_permissions()
1257 self._cleanup_permissions()
1253 self._cleanup_repos()
1258 self._cleanup_repos()
1254 self._cleanup_repo_groups()
1259 self._cleanup_repo_groups()
1255 self._cleanup_user_groups()
1260 self._cleanup_user_groups()
1256 self._cleanup_users()
1261 self._cleanup_users()
1257
1262
1258 def _cleanup_permissions(self):
1263 def _cleanup_permissions(self):
1259 if self.user_permissions:
1264 if self.user_permissions:
1260 for user_name, permission_name in self.user_permissions:
1265 for user_name, permission_name in self.user_permissions:
1261 self.revoke_user_permission(user_name, permission_name)
1266 self.revoke_user_permission(user_name, permission_name)
1262
1267
1263 for permission in self.user_repo_permission_ids:
1268 for permission in self.user_repo_permission_ids:
1264 RepoModel().revoke_user_permission(*permission)
1269 RepoModel().revoke_user_permission(*permission)
1265
1270
1266 for permission in self.user_group_repo_permission_ids:
1271 for permission in self.user_group_repo_permission_ids:
1267 RepoModel().revoke_user_group_permission(*permission)
1272 RepoModel().revoke_user_group_permission(*permission)
1268
1273
1269 for permission in self.user_repo_group_permission_ids:
1274 for permission in self.user_repo_group_permission_ids:
1270 RepoGroupModel().revoke_user_permission(*permission)
1275 RepoGroupModel().revoke_user_permission(*permission)
1271
1276
1272 for permission in self.user_group_repo_group_permission_ids:
1277 for permission in self.user_group_repo_group_permission_ids:
1273 RepoGroupModel().revoke_user_group_permission(*permission)
1278 RepoGroupModel().revoke_user_group_permission(*permission)
1274
1279
1275 for permission in self.user_user_group_permission_ids:
1280 for permission in self.user_user_group_permission_ids:
1276 UserGroupModel().revoke_user_permission(*permission)
1281 UserGroupModel().revoke_user_permission(*permission)
1277
1282
1278 for permission in self.user_group_user_group_permission_ids:
1283 for permission in self.user_group_user_group_permission_ids:
1279 UserGroupModel().revoke_user_group_permission(*permission)
1284 UserGroupModel().revoke_user_group_permission(*permission)
1280
1285
1281 def _cleanup_repo_groups(self):
1286 def _cleanup_repo_groups(self):
1282 def _repo_group_compare(first_group_id, second_group_id):
1287 def _repo_group_compare(first_group_id, second_group_id):
1283 """
1288 """
1284 Gives higher priority to the groups with the most complex paths
1289 Gives higher priority to the groups with the most complex paths
1285 """
1290 """
1286 first_group = RepoGroup.get(first_group_id)
1291 first_group = RepoGroup.get(first_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1292 second_group = RepoGroup.get(second_group_id)
1288 first_group_parts = (
1293 first_group_parts = (
1289 len(first_group.group_name.split('/')) if first_group else 0)
1294 len(first_group.group_name.split('/')) if first_group else 0)
1290 second_group_parts = (
1295 second_group_parts = (
1291 len(second_group.group_name.split('/')) if second_group else 0)
1296 len(second_group.group_name.split('/')) if second_group else 0)
1292 return cmp(second_group_parts, first_group_parts)
1297 return cmp(second_group_parts, first_group_parts)
1293
1298
1294 sorted_repo_group_ids = sorted(
1299 sorted_repo_group_ids = sorted(
1295 self.repo_group_ids, cmp=_repo_group_compare)
1300 self.repo_group_ids, cmp=_repo_group_compare)
1296 for repo_group_id in sorted_repo_group_ids:
1301 for repo_group_id in sorted_repo_group_ids:
1297 self.fixture.destroy_repo_group(repo_group_id)
1302 self.fixture.destroy_repo_group(repo_group_id)
1298
1303
1299 def _cleanup_repos(self):
1304 def _cleanup_repos(self):
1300 sorted_repos_ids = sorted(self.repos_ids)
1305 sorted_repos_ids = sorted(self.repos_ids)
1301 for repo_id in sorted_repos_ids:
1306 for repo_id in sorted_repos_ids:
1302 self.fixture.destroy_repo(repo_id)
1307 self.fixture.destroy_repo(repo_id)
1303
1308
1304 def _cleanup_user_groups(self):
1309 def _cleanup_user_groups(self):
1305 def _user_group_compare(first_group_id, second_group_id):
1310 def _user_group_compare(first_group_id, second_group_id):
1306 """
1311 """
1307 Gives higher priority to the groups with the most complex paths
1312 Gives higher priority to the groups with the most complex paths
1308 """
1313 """
1309 first_group = UserGroup.get(first_group_id)
1314 first_group = UserGroup.get(first_group_id)
1310 second_group = UserGroup.get(second_group_id)
1315 second_group = UserGroup.get(second_group_id)
1311 first_group_parts = (
1316 first_group_parts = (
1312 len(first_group.users_group_name.split('/'))
1317 len(first_group.users_group_name.split('/'))
1313 if first_group else 0)
1318 if first_group else 0)
1314 second_group_parts = (
1319 second_group_parts = (
1315 len(second_group.users_group_name.split('/'))
1320 len(second_group.users_group_name.split('/'))
1316 if second_group else 0)
1321 if second_group else 0)
1317 return cmp(second_group_parts, first_group_parts)
1322 return cmp(second_group_parts, first_group_parts)
1318
1323
1319 sorted_user_group_ids = sorted(
1324 sorted_user_group_ids = sorted(
1320 self.user_group_ids, cmp=_user_group_compare)
1325 self.user_group_ids, cmp=_user_group_compare)
1321 for user_group_id in sorted_user_group_ids:
1326 for user_group_id in sorted_user_group_ids:
1322 self.fixture.destroy_user_group(user_group_id)
1327 self.fixture.destroy_user_group(user_group_id)
1323
1328
1324 def _cleanup_users(self):
1329 def _cleanup_users(self):
1325 for user_id in self.user_ids:
1330 for user_id in self.user_ids:
1326 self.fixture.destroy_user(user_id)
1331 self.fixture.destroy_user(user_id)
1327
1332
1328
1333
1329 # TODO: Think about moving this into a pytest-pyro package and make it a
1334 # TODO: Think about moving this into a pytest-pyro package and make it a
1330 # pytest plugin
1335 # pytest plugin
1331 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1336 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1332 def pytest_runtest_makereport(item, call):
1337 def pytest_runtest_makereport(item, call):
1333 """
1338 """
1334 Adding the remote traceback if the exception has this information.
1339 Adding the remote traceback if the exception has this information.
1335
1340
1336 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1341 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1337 to the exception instance.
1342 to the exception instance.
1338 """
1343 """
1339 outcome = yield
1344 outcome = yield
1340 report = outcome.get_result()
1345 report = outcome.get_result()
1341 if call.excinfo:
1346 if call.excinfo:
1342 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1347 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1343
1348
1344
1349
1345 def _add_vcsserver_remote_traceback(report, exc):
1350 def _add_vcsserver_remote_traceback(report, exc):
1346 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1351 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1347
1352
1348 if vcsserver_traceback:
1353 if vcsserver_traceback:
1349 section = 'VCSServer remote traceback ' + report.when
1354 section = 'VCSServer remote traceback ' + report.when
1350 report.sections.append((section, vcsserver_traceback))
1355 report.sections.append((section, vcsserver_traceback))
1351
1356
1352
1357
1353 @pytest.fixture(scope='session')
1358 @pytest.fixture(scope='session')
1354 def testrun():
1359 def testrun():
1355 return {
1360 return {
1356 'uuid': uuid.uuid4(),
1361 'uuid': uuid.uuid4(),
1357 'start': datetime.datetime.utcnow().isoformat(),
1362 'start': datetime.datetime.utcnow().isoformat(),
1358 'timestamp': int(time.time()),
1363 'timestamp': int(time.time()),
1359 }
1364 }
1360
1365
1361
1366
1362 @pytest.fixture(autouse=True)
1367 @pytest.fixture(autouse=True)
1363 def collect_appenlight_stats(request, testrun):
1368 def collect_appenlight_stats(request, testrun):
1364 """
1369 """
1365 This fixture reports memory consumtion of single tests.
1370 This fixture reports memory consumtion of single tests.
1366
1371
1367 It gathers data based on `psutil` and sends them to Appenlight. The option
1372 It gathers data based on `psutil` and sends them to Appenlight. The option
1368 ``--ae`` has te be used to enable this fixture and the API key for your
1373 ``--ae`` has te be used to enable this fixture and the API key for your
1369 application has to be provided in ``--ae-key``.
1374 application has to be provided in ``--ae-key``.
1370 """
1375 """
1371 try:
1376 try:
1372 # cygwin cannot have yet psutil support.
1377 # cygwin cannot have yet psutil support.
1373 import psutil
1378 import psutil
1374 except ImportError:
1379 except ImportError:
1375 return
1380 return
1376
1381
1377 if not request.config.getoption('--appenlight'):
1382 if not request.config.getoption('--appenlight'):
1378 return
1383 return
1379 else:
1384 else:
1380 # Only request the pylonsapp fixture if appenlight tracking is
1385 # Only request the pylonsapp fixture if appenlight tracking is
1381 # enabled. This will speed up a test run of unit tests by 2 to 3
1386 # enabled. This will speed up a test run of unit tests by 2 to 3
1382 # seconds if appenlight is not enabled.
1387 # seconds if appenlight is not enabled.
1383 pylonsapp = request.getfuncargvalue("pylonsapp")
1388 pylonsapp = request.getfuncargvalue("pylonsapp")
1384 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1389 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1385 client = AppenlightClient(
1390 client = AppenlightClient(
1386 url=url,
1391 url=url,
1387 api_key=request.config.getoption('--appenlight-api-key'),
1392 api_key=request.config.getoption('--appenlight-api-key'),
1388 namespace=request.node.nodeid,
1393 namespace=request.node.nodeid,
1389 request=str(testrun['uuid']),
1394 request=str(testrun['uuid']),
1390 testrun=testrun)
1395 testrun=testrun)
1391
1396
1392 client.collect({
1397 client.collect({
1393 'message': "Starting",
1398 'message': "Starting",
1394 })
1399 })
1395
1400
1396 server_and_port = pylonsapp.config['vcs.server']
1401 server_and_port = pylonsapp.config['vcs.server']
1397 protocol = pylonsapp.config['vcs.server.protocol']
1402 protocol = pylonsapp.config['vcs.server.protocol']
1398 server = create_vcsserver_proxy(server_and_port, protocol)
1403 server = create_vcsserver_proxy(server_and_port, protocol)
1399 with server:
1404 with server:
1400 vcs_pid = server.get_pid()
1405 vcs_pid = server.get_pid()
1401 server.run_gc()
1406 server.run_gc()
1402 vcs_process = psutil.Process(vcs_pid)
1407 vcs_process = psutil.Process(vcs_pid)
1403 mem = vcs_process.memory_info()
1408 mem = vcs_process.memory_info()
1404 client.tag_before('vcsserver.rss', mem.rss)
1409 client.tag_before('vcsserver.rss', mem.rss)
1405 client.tag_before('vcsserver.vms', mem.vms)
1410 client.tag_before('vcsserver.vms', mem.vms)
1406
1411
1407 test_process = psutil.Process()
1412 test_process = psutil.Process()
1408 mem = test_process.memory_info()
1413 mem = test_process.memory_info()
1409 client.tag_before('test.rss', mem.rss)
1414 client.tag_before('test.rss', mem.rss)
1410 client.tag_before('test.vms', mem.vms)
1415 client.tag_before('test.vms', mem.vms)
1411
1416
1412 client.tag_before('time', time.time())
1417 client.tag_before('time', time.time())
1413
1418
1414 @request.addfinalizer
1419 @request.addfinalizer
1415 def send_stats():
1420 def send_stats():
1416 client.tag_after('time', time.time())
1421 client.tag_after('time', time.time())
1417 with server:
1422 with server:
1418 gc_stats = server.run_gc()
1423 gc_stats = server.run_gc()
1419 for tag, value in gc_stats.items():
1424 for tag, value in gc_stats.items():
1420 client.tag_after(tag, value)
1425 client.tag_after(tag, value)
1421 mem = vcs_process.memory_info()
1426 mem = vcs_process.memory_info()
1422 client.tag_after('vcsserver.rss', mem.rss)
1427 client.tag_after('vcsserver.rss', mem.rss)
1423 client.tag_after('vcsserver.vms', mem.vms)
1428 client.tag_after('vcsserver.vms', mem.vms)
1424
1429
1425 mem = test_process.memory_info()
1430 mem = test_process.memory_info()
1426 client.tag_after('test.rss', mem.rss)
1431 client.tag_after('test.rss', mem.rss)
1427 client.tag_after('test.vms', mem.vms)
1432 client.tag_after('test.vms', mem.vms)
1428
1433
1429 client.collect({
1434 client.collect({
1430 'message': "Finished",
1435 'message': "Finished",
1431 })
1436 })
1432 client.send_stats()
1437 client.send_stats()
1433
1438
1434 return client
1439 return client
1435
1440
1436
1441
1437 class AppenlightClient():
1442 class AppenlightClient():
1438
1443
1439 url_template = '{url}?protocol_version=0.5'
1444 url_template = '{url}?protocol_version=0.5'
1440
1445
1441 def __init__(
1446 def __init__(
1442 self, url, api_key, add_server=True, add_timestamp=True,
1447 self, url, api_key, add_server=True, add_timestamp=True,
1443 namespace=None, request=None, testrun=None):
1448 namespace=None, request=None, testrun=None):
1444 self.url = self.url_template.format(url=url)
1449 self.url = self.url_template.format(url=url)
1445 self.api_key = api_key
1450 self.api_key = api_key
1446 self.add_server = add_server
1451 self.add_server = add_server
1447 self.add_timestamp = add_timestamp
1452 self.add_timestamp = add_timestamp
1448 self.namespace = namespace
1453 self.namespace = namespace
1449 self.request = request
1454 self.request = request
1450 self.server = socket.getfqdn(socket.gethostname())
1455 self.server = socket.getfqdn(socket.gethostname())
1451 self.tags_before = {}
1456 self.tags_before = {}
1452 self.tags_after = {}
1457 self.tags_after = {}
1453 self.stats = []
1458 self.stats = []
1454 self.testrun = testrun or {}
1459 self.testrun = testrun or {}
1455
1460
1456 def tag_before(self, tag, value):
1461 def tag_before(self, tag, value):
1457 self.tags_before[tag] = value
1462 self.tags_before[tag] = value
1458
1463
1459 def tag_after(self, tag, value):
1464 def tag_after(self, tag, value):
1460 self.tags_after[tag] = value
1465 self.tags_after[tag] = value
1461
1466
1462 def collect(self, data):
1467 def collect(self, data):
1463 if self.add_server:
1468 if self.add_server:
1464 data.setdefault('server', self.server)
1469 data.setdefault('server', self.server)
1465 if self.add_timestamp:
1470 if self.add_timestamp:
1466 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1471 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1467 if self.namespace:
1472 if self.namespace:
1468 data.setdefault('namespace', self.namespace)
1473 data.setdefault('namespace', self.namespace)
1469 if self.request:
1474 if self.request:
1470 data.setdefault('request', self.request)
1475 data.setdefault('request', self.request)
1471 self.stats.append(data)
1476 self.stats.append(data)
1472
1477
1473 def send_stats(self):
1478 def send_stats(self):
1474 tags = [
1479 tags = [
1475 ('testrun', self.request),
1480 ('testrun', self.request),
1476 ('testrun.start', self.testrun['start']),
1481 ('testrun.start', self.testrun['start']),
1477 ('testrun.timestamp', self.testrun['timestamp']),
1482 ('testrun.timestamp', self.testrun['timestamp']),
1478 ('test', self.namespace),
1483 ('test', self.namespace),
1479 ]
1484 ]
1480 for key, value in self.tags_before.items():
1485 for key, value in self.tags_before.items():
1481 tags.append((key + '.before', value))
1486 tags.append((key + '.before', value))
1482 try:
1487 try:
1483 delta = self.tags_after[key] - value
1488 delta = self.tags_after[key] - value
1484 tags.append((key + '.delta', delta))
1489 tags.append((key + '.delta', delta))
1485 except Exception:
1490 except Exception:
1486 pass
1491 pass
1487 for key, value in self.tags_after.items():
1492 for key, value in self.tags_after.items():
1488 tags.append((key + '.after', value))
1493 tags.append((key + '.after', value))
1489 self.collect({
1494 self.collect({
1490 'message': "Collected tags",
1495 'message': "Collected tags",
1491 'tags': tags,
1496 'tags': tags,
1492 })
1497 })
1493
1498
1494 response = requests.post(
1499 response = requests.post(
1495 self.url,
1500 self.url,
1496 headers={
1501 headers={
1497 'X-appenlight-api-key': self.api_key},
1502 'X-appenlight-api-key': self.api_key},
1498 json=self.stats,
1503 json=self.stats,
1499 )
1504 )
1500
1505
1501 if not response.status_code == 200:
1506 if not response.status_code == 200:
1502 pprint.pprint(self.stats)
1507 pprint.pprint(self.stats)
1503 print response.headers
1508 print response.headers
1504 print response.text
1509 print response.text
1505 raise Exception('Sending to appenlight failed')
1510 raise Exception('Sending to appenlight failed')
1506
1511
1507
1512
1508 @pytest.fixture
1513 @pytest.fixture
1509 def gist_util(request, pylonsapp):
1514 def gist_util(request, pylonsapp):
1510 """
1515 """
1511 Provides a wired instance of `GistUtility` with integrated cleanup.
1516 Provides a wired instance of `GistUtility` with integrated cleanup.
1512 """
1517 """
1513 utility = GistUtility()
1518 utility = GistUtility()
1514 request.addfinalizer(utility.cleanup)
1519 request.addfinalizer(utility.cleanup)
1515 return utility
1520 return utility
1516
1521
1517
1522
1518 class GistUtility(object):
1523 class GistUtility(object):
1519 def __init__(self):
1524 def __init__(self):
1520 self.fixture = Fixture()
1525 self.fixture = Fixture()
1521 self.gist_ids = []
1526 self.gist_ids = []
1522
1527
1523 def create_gist(self, **kwargs):
1528 def create_gist(self, **kwargs):
1524 gist = self.fixture.create_gist(**kwargs)
1529 gist = self.fixture.create_gist(**kwargs)
1525 self.gist_ids.append(gist.gist_id)
1530 self.gist_ids.append(gist.gist_id)
1526 return gist
1531 return gist
1527
1532
1528 def cleanup(self):
1533 def cleanup(self):
1529 for id_ in self.gist_ids:
1534 for id_ in self.gist_ids:
1530 self.fixture.destroy_gists(str(id_))
1535 self.fixture.destroy_gists(str(id_))
1531
1536
1532
1537
1533 @pytest.fixture
1538 @pytest.fixture
1534 def enabled_backends(request):
1539 def enabled_backends(request):
1535 backends = request.config.option.backends
1540 backends = request.config.option.backends
1536 return backends[:]
1541 return backends[:]
1537
1542
1538
1543
1539 @pytest.fixture
1544 @pytest.fixture
1540 def settings_util(request):
1545 def settings_util(request):
1541 """
1546 """
1542 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1547 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1543 """
1548 """
1544 utility = SettingsUtility()
1549 utility = SettingsUtility()
1545 request.addfinalizer(utility.cleanup)
1550 request.addfinalizer(utility.cleanup)
1546 return utility
1551 return utility
1547
1552
1548
1553
1549 class SettingsUtility(object):
1554 class SettingsUtility(object):
1550 def __init__(self):
1555 def __init__(self):
1551 self.rhodecode_ui_ids = []
1556 self.rhodecode_ui_ids = []
1552 self.rhodecode_setting_ids = []
1557 self.rhodecode_setting_ids = []
1553 self.repo_rhodecode_ui_ids = []
1558 self.repo_rhodecode_ui_ids = []
1554 self.repo_rhodecode_setting_ids = []
1559 self.repo_rhodecode_setting_ids = []
1555
1560
1556 def create_repo_rhodecode_ui(
1561 def create_repo_rhodecode_ui(
1557 self, repo, section, value, key=None, active=True, cleanup=True):
1562 self, repo, section, value, key=None, active=True, cleanup=True):
1558 key = key or hashlib.sha1(
1563 key = key or hashlib.sha1(
1559 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1564 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1560
1565
1561 setting = RepoRhodeCodeUi()
1566 setting = RepoRhodeCodeUi()
1562 setting.repository_id = repo.repo_id
1567 setting.repository_id = repo.repo_id
1563 setting.ui_section = section
1568 setting.ui_section = section
1564 setting.ui_value = value
1569 setting.ui_value = value
1565 setting.ui_key = key
1570 setting.ui_key = key
1566 setting.ui_active = active
1571 setting.ui_active = active
1567 Session().add(setting)
1572 Session().add(setting)
1568 Session().commit()
1573 Session().commit()
1569
1574
1570 if cleanup:
1575 if cleanup:
1571 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1576 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1572 return setting
1577 return setting
1573
1578
1574 def create_rhodecode_ui(
1579 def create_rhodecode_ui(
1575 self, section, value, key=None, active=True, cleanup=True):
1580 self, section, value, key=None, active=True, cleanup=True):
1576 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1581 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1577
1582
1578 setting = RhodeCodeUi()
1583 setting = RhodeCodeUi()
1579 setting.ui_section = section
1584 setting.ui_section = section
1580 setting.ui_value = value
1585 setting.ui_value = value
1581 setting.ui_key = key
1586 setting.ui_key = key
1582 setting.ui_active = active
1587 setting.ui_active = active
1583 Session().add(setting)
1588 Session().add(setting)
1584 Session().commit()
1589 Session().commit()
1585
1590
1586 if cleanup:
1591 if cleanup:
1587 self.rhodecode_ui_ids.append(setting.ui_id)
1592 self.rhodecode_ui_ids.append(setting.ui_id)
1588 return setting
1593 return setting
1589
1594
1590 def create_repo_rhodecode_setting(
1595 def create_repo_rhodecode_setting(
1591 self, repo, name, value, type_, cleanup=True):
1596 self, repo, name, value, type_, cleanup=True):
1592 setting = RepoRhodeCodeSetting(
1597 setting = RepoRhodeCodeSetting(
1593 repo.repo_id, key=name, val=value, type=type_)
1598 repo.repo_id, key=name, val=value, type=type_)
1594 Session().add(setting)
1599 Session().add(setting)
1595 Session().commit()
1600 Session().commit()
1596
1601
1597 if cleanup:
1602 if cleanup:
1598 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1603 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1599 return setting
1604 return setting
1600
1605
1601 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1606 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1602 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1607 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1603 Session().add(setting)
1608 Session().add(setting)
1604 Session().commit()
1609 Session().commit()
1605
1610
1606 if cleanup:
1611 if cleanup:
1607 self.rhodecode_setting_ids.append(setting.app_settings_id)
1612 self.rhodecode_setting_ids.append(setting.app_settings_id)
1608
1613
1609 return setting
1614 return setting
1610
1615
1611 def cleanup(self):
1616 def cleanup(self):
1612 for id_ in self.rhodecode_ui_ids:
1617 for id_ in self.rhodecode_ui_ids:
1613 setting = RhodeCodeUi.get(id_)
1618 setting = RhodeCodeUi.get(id_)
1614 Session().delete(setting)
1619 Session().delete(setting)
1615
1620
1616 for id_ in self.rhodecode_setting_ids:
1621 for id_ in self.rhodecode_setting_ids:
1617 setting = RhodeCodeSetting.get(id_)
1622 setting = RhodeCodeSetting.get(id_)
1618 Session().delete(setting)
1623 Session().delete(setting)
1619
1624
1620 for id_ in self.repo_rhodecode_ui_ids:
1625 for id_ in self.repo_rhodecode_ui_ids:
1621 setting = RepoRhodeCodeUi.get(id_)
1626 setting = RepoRhodeCodeUi.get(id_)
1622 Session().delete(setting)
1627 Session().delete(setting)
1623
1628
1624 for id_ in self.repo_rhodecode_setting_ids:
1629 for id_ in self.repo_rhodecode_setting_ids:
1625 setting = RepoRhodeCodeSetting.get(id_)
1630 setting = RepoRhodeCodeSetting.get(id_)
1626 Session().delete(setting)
1631 Session().delete(setting)
1627
1632
1628 Session().commit()
1633 Session().commit()
1629
1634
1630
1635
1631 @pytest.fixture
1636 @pytest.fixture
1632 def no_notifications(request):
1637 def no_notifications(request):
1633 notification_patcher = mock.patch(
1638 notification_patcher = mock.patch(
1634 'rhodecode.model.notification.NotificationModel.create')
1639 'rhodecode.model.notification.NotificationModel.create')
1635 notification_patcher.start()
1640 notification_patcher.start()
1636 request.addfinalizer(notification_patcher.stop)
1641 request.addfinalizer(notification_patcher.stop)
1637
1642
1638
1643
1639 @pytest.fixture
1644 @pytest.fixture
1640 def silence_action_logger(request):
1645 def silence_action_logger(request):
1641 notification_patcher = mock.patch(
1646 notification_patcher = mock.patch(
1642 'rhodecode.lib.utils.action_logger')
1647 'rhodecode.lib.utils.action_logger')
1643 notification_patcher.start()
1648 notification_patcher.start()
1644 request.addfinalizer(notification_patcher.stop)
1649 request.addfinalizer(notification_patcher.stop)
1645
1650
1646
1651
1647 @pytest.fixture(scope='session')
1652 @pytest.fixture(scope='session')
1648 def repeat(request):
1653 def repeat(request):
1649 """
1654 """
1650 The number of repetitions is based on this fixture.
1655 The number of repetitions is based on this fixture.
1651
1656
1652 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1657 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1653 tests are not too slow in our default test suite.
1658 tests are not too slow in our default test suite.
1654 """
1659 """
1655 return request.config.getoption('--repeat')
1660 return request.config.getoption('--repeat')
1656
1661
1657
1662
1658 @pytest.fixture
1663 @pytest.fixture
1659 def rhodecode_fixtures():
1664 def rhodecode_fixtures():
1660 return Fixture()
1665 return Fixture()
1661
1666
1662
1667
1663 @pytest.fixture
1668 @pytest.fixture
1664 def request_stub():
1669 def request_stub():
1665 """
1670 """
1666 Stub request object.
1671 Stub request object.
1667 """
1672 """
1668 request = pyramid.testing.DummyRequest()
1673 request = pyramid.testing.DummyRequest()
1669 request.scheme = 'https'
1674 request.scheme = 'https'
1670 return request
1675 return request
1671
1676
1672
1677
1673 @pytest.fixture
1678 @pytest.fixture
1674 def config_stub(request, request_stub):
1679 def config_stub(request, request_stub):
1675 """
1680 """
1676 Set up pyramid.testing and return the Configurator.
1681 Set up pyramid.testing and return the Configurator.
1677 """
1682 """
1678 config = pyramid.testing.setUp(request=request_stub)
1683 config = pyramid.testing.setUp(request=request_stub)
1679
1684
1680 @request.addfinalizer
1685 @request.addfinalizer
1681 def cleanup():
1686 def cleanup():
1682 pyramid.testing.tearDown()
1687 pyramid.testing.tearDown()
1683
1688
1684 return config
1689 return config
1685
1690
1686
1691
1687 @pytest.fixture
1692 @pytest.fixture
1688 def StubIntegrationType():
1693 def StubIntegrationType():
1689 class _StubIntegrationType(IntegrationTypeBase):
1694 class _StubIntegrationType(IntegrationTypeBase):
1690 """ Test integration type class """
1695 """ Test integration type class """
1691
1696
1692 key = 'test'
1697 key = 'test'
1693 display_name = 'Test integration type'
1698 display_name = 'Test integration type'
1694 description = 'A test integration type for testing'
1699 description = 'A test integration type for testing'
1695 icon = 'test_icon_html_image'
1700 icon = 'test_icon_html_image'
1696
1701
1697 def __init__(self, settings):
1702 def __init__(self, settings):
1698 super(_StubIntegrationType, self).__init__(settings)
1703 super(_StubIntegrationType, self).__init__(settings)
1699 self.sent_events = [] # for testing
1704 self.sent_events = [] # for testing
1700
1705
1701 def send_event(self, event):
1706 def send_event(self, event):
1702 self.sent_events.append(event)
1707 self.sent_events.append(event)
1703
1708
1704 def settings_schema(self):
1709 def settings_schema(self):
1705 class SettingsSchema(colander.Schema):
1710 class SettingsSchema(colander.Schema):
1706 test_string_field = colander.SchemaNode(
1711 test_string_field = colander.SchemaNode(
1707 colander.String(),
1712 colander.String(),
1708 missing=colander.required,
1713 missing=colander.required,
1709 title='test string field',
1714 title='test string field',
1710 )
1715 )
1711 test_int_field = colander.SchemaNode(
1716 test_int_field = colander.SchemaNode(
1712 colander.Int(),
1717 colander.Int(),
1713 title='some integer setting',
1718 title='some integer setting',
1714 )
1719 )
1715 return SettingsSchema()
1720 return SettingsSchema()
1716
1721
1717
1722
1718 integration_type_registry.register_integration_type(_StubIntegrationType)
1723 integration_type_registry.register_integration_type(_StubIntegrationType)
1719 return _StubIntegrationType
1724 return _StubIntegrationType
1720
1725
1721 @pytest.fixture
1726 @pytest.fixture
1722 def stub_integration_settings():
1727 def stub_integration_settings():
1723 return {
1728 return {
1724 'test_string_field': 'some data',
1729 'test_string_field': 'some data',
1725 'test_int_field': 100,
1730 'test_int_field': 100,
1726 }
1731 }
1727
1732
1728
1733
1729 @pytest.fixture
1734 @pytest.fixture
1730 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1735 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1731 stub_integration_settings):
1736 stub_integration_settings):
1732 integration = IntegrationModel().create(
1737 integration = IntegrationModel().create(
1733 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1738 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1734 name='test repo integration',
1739 name='test repo integration',
1735 repo=repo_stub, repo_group=None, child_repos_only=None)
1740 repo=repo_stub, repo_group=None, child_repos_only=None)
1736
1741
1737 @request.addfinalizer
1742 @request.addfinalizer
1738 def cleanup():
1743 def cleanup():
1739 IntegrationModel().delete(integration)
1744 IntegrationModel().delete(integration)
1740
1745
1741 return integration
1746 return integration
1742
1747
1743
1748
1744 @pytest.fixture
1749 @pytest.fixture
1745 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1750 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1746 stub_integration_settings):
1751 stub_integration_settings):
1747 integration = IntegrationModel().create(
1752 integration = IntegrationModel().create(
1748 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1753 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1749 name='test repogroup integration',
1754 name='test repogroup integration',
1750 repo=None, repo_group=test_repo_group, child_repos_only=True)
1755 repo=None, repo_group=test_repo_group, child_repos_only=True)
1751
1756
1752 @request.addfinalizer
1757 @request.addfinalizer
1753 def cleanup():
1758 def cleanup():
1754 IntegrationModel().delete(integration)
1759 IntegrationModel().delete(integration)
1755
1760
1756 return integration
1761 return integration
1757
1762
1758
1763
1759 @pytest.fixture
1764 @pytest.fixture
1760 def repogroup_recursive_integration_stub(request, test_repo_group,
1765 def repogroup_recursive_integration_stub(request, test_repo_group,
1761 StubIntegrationType, stub_integration_settings):
1766 StubIntegrationType, stub_integration_settings):
1762 integration = IntegrationModel().create(
1767 integration = IntegrationModel().create(
1763 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1768 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1764 name='test recursive repogroup integration',
1769 name='test recursive repogroup integration',
1765 repo=None, repo_group=test_repo_group, child_repos_only=False)
1770 repo=None, repo_group=test_repo_group, child_repos_only=False)
1766
1771
1767 @request.addfinalizer
1772 @request.addfinalizer
1768 def cleanup():
1773 def cleanup():
1769 IntegrationModel().delete(integration)
1774 IntegrationModel().delete(integration)
1770
1775
1771 return integration
1776 return integration
1772
1777
1773
1778
1774 @pytest.fixture
1779 @pytest.fixture
1775 def global_integration_stub(request, StubIntegrationType,
1780 def global_integration_stub(request, StubIntegrationType,
1776 stub_integration_settings):
1781 stub_integration_settings):
1777 integration = IntegrationModel().create(
1782 integration = IntegrationModel().create(
1778 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1783 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1779 name='test global integration',
1784 name='test global integration',
1780 repo=None, repo_group=None, child_repos_only=None)
1785 repo=None, repo_group=None, child_repos_only=None)
1781
1786
1782 @request.addfinalizer
1787 @request.addfinalizer
1783 def cleanup():
1788 def cleanup():
1784 IntegrationModel().delete(integration)
1789 IntegrationModel().delete(integration)
1785
1790
1786 return integration
1791 return integration
1787
1792
1788
1793
1789 @pytest.fixture
1794 @pytest.fixture
1790 def root_repos_integration_stub(request, StubIntegrationType,
1795 def root_repos_integration_stub(request, StubIntegrationType,
1791 stub_integration_settings):
1796 stub_integration_settings):
1792 integration = IntegrationModel().create(
1797 integration = IntegrationModel().create(
1793 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1798 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1794 name='test global integration',
1799 name='test global integration',
1795 repo=None, repo_group=None, child_repos_only=True)
1800 repo=None, repo_group=None, child_repos_only=True)
1796
1801
1797 @request.addfinalizer
1802 @request.addfinalizer
1798 def cleanup():
1803 def cleanup():
1799 IntegrationModel().delete(integration)
1804 IntegrationModel().delete(integration)
1800
1805
1801 return integration
1806 return integration
1802
1807
1803
1808
1804 @pytest.fixture
1809 @pytest.fixture
1805 def local_dt_to_utc():
1810 def local_dt_to_utc():
1806 def _factory(dt):
1811 def _factory(dt):
1807 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1812 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1808 dateutil.tz.tzutc()).replace(tzinfo=None)
1813 dateutil.tz.tzutc()).replace(tzinfo=None)
1809 return _factory
1814 return _factory
@@ -1,1242 +1,1269 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25
26
26 import pytest
27 import pytest
27
28
29 from rhodecode.lib.utils import make_db_config
28 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
29 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
30 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
31 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError
34 RepositoryError, VCSError, NodeDoesNotExistError)
33 )
34 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
35 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests.vcs.base import BackendTestMixin
38 from rhodecode.tests.vcs.base import BackendTestMixin
38
39
39
40
40 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
41
42
42
43
43 def repo_path_generator():
44 def repo_path_generator():
44 """
45 """
45 Return a different path to be used for cloning repos.
46 Return a different path to be used for cloning repos.
46 """
47 """
47 i = 0
48 i = 0
48 while True:
49 while True:
49 i += 1
50 i += 1
50 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51
52
52
53
53 REPO_PATH_GENERATOR = repo_path_generator()
54 REPO_PATH_GENERATOR = repo_path_generator()
54
55
55
56
56 class TestGitRepository:
57 class TestGitRepository:
57
58
58 # pylint: disable=protected-access
59 # pylint: disable=protected-access
59
60
60 def __check_for_existing_repo(self):
61 def __check_for_existing_repo(self):
61 if os.path.exists(TEST_GIT_REPO_CLONE):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 self.fail('Cannot test git clone repo as location %s already '
63 self.fail('Cannot test git clone repo as location %s already '
63 'exists. You should manually remove it first.'
64 'exists. You should manually remove it first.'
64 % TEST_GIT_REPO_CLONE)
65 % TEST_GIT_REPO_CLONE)
65
66
66 @pytest.fixture(autouse=True)
67 @pytest.fixture(autouse=True)
67 def prepare(self, request, pylonsapp):
68 def prepare(self, request, pylonsapp):
68 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69
70
70 def get_clone_repo(self):
71 def get_clone_repo(self):
71 """
72 """
72 Return a non bare clone of the base repo.
73 Return a non bare clone of the base repo.
73 """
74 """
74 clone_path = next(REPO_PATH_GENERATOR)
75 clone_path = next(REPO_PATH_GENERATOR)
75 repo_clone = GitRepository(
76 repo_clone = GitRepository(
76 clone_path, create=True, src_url=self.repo.path, bare=False)
77 clone_path, create=True, src_url=self.repo.path, bare=False)
77
78
78 return repo_clone
79 return repo_clone
79
80
80 def get_empty_repo(self, bare=False):
81 def get_empty_repo(self, bare=False):
81 """
82 """
82 Return a non bare empty repo.
83 Return a non bare empty repo.
83 """
84 """
84 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85
86
86 def test_wrong_repo_path(self):
87 def test_wrong_repo_path(self):
87 wrong_repo_path = '/tmp/errorrepo'
88 wrong_repo_path = '/tmp/errorrepo'
88 with pytest.raises(RepositoryError):
89 with pytest.raises(RepositoryError):
89 GitRepository(wrong_repo_path)
90 GitRepository(wrong_repo_path)
90
91
91 def test_repo_clone(self):
92 def test_repo_clone(self):
92 self.__check_for_existing_repo()
93 self.__check_for_existing_repo()
93 repo = GitRepository(TEST_GIT_REPO)
94 repo = GitRepository(TEST_GIT_REPO)
94 repo_clone = GitRepository(
95 repo_clone = GitRepository(
95 TEST_GIT_REPO_CLONE,
96 TEST_GIT_REPO_CLONE,
96 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
97 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
97 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 # Checking hashes of commits should be enough
99 # Checking hashes of commits should be enough
99 for commit in repo.get_commits():
100 for commit in repo.get_commits():
100 raw_id = commit.raw_id
101 raw_id = commit.raw_id
101 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102
103
103 def test_repo_clone_without_create(self):
104 def test_repo_clone_without_create(self):
104 with pytest.raises(RepositoryError):
105 with pytest.raises(RepositoryError):
105 GitRepository(
106 GitRepository(
106 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107
108
108 def test_repo_clone_with_update(self):
109 def test_repo_clone_with_update(self):
109 repo = GitRepository(TEST_GIT_REPO)
110 repo = GitRepository(TEST_GIT_REPO)
110 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 repo_clone = GitRepository(
112 repo_clone = GitRepository(
112 clone_path,
113 clone_path,
113 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
114 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
114 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115
116
116 # check if current workdir was updated
117 # check if current workdir was updated
117 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 assert os.path.isfile(fpath)
119 assert os.path.isfile(fpath)
119
120
120 def test_repo_clone_without_update(self):
121 def test_repo_clone_without_update(self):
121 repo = GitRepository(TEST_GIT_REPO)
122 repo = GitRepository(TEST_GIT_REPO)
122 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 repo_clone = GitRepository(
124 repo_clone = GitRepository(
124 clone_path,
125 clone_path,
125 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
126 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 # check if current workdir was *NOT* updated
128 # check if current workdir was *NOT* updated
128 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 # Make sure it's not bare repo
130 # Make sure it's not bare repo
130 assert not repo_clone.bare
131 assert not repo_clone.bare
131 assert not os.path.isfile(fpath)
132 assert not os.path.isfile(fpath)
132
133
133 def test_repo_clone_into_bare_repo(self):
134 def test_repo_clone_into_bare_repo(self):
134 repo = GitRepository(TEST_GIT_REPO)
135 repo = GitRepository(TEST_GIT_REPO)
135 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 repo_clone = GitRepository(
137 repo_clone = GitRepository(
137 clone_path, create=True, src_url=repo.path, bare=True)
138 clone_path, create=True, src_url=repo.path, bare=True)
138 assert repo_clone.bare
139 assert repo_clone.bare
139
140
140 def test_create_repo_is_not_bare_by_default(self):
141 def test_create_repo_is_not_bare_by_default(self):
141 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 assert not repo.bare
143 assert not repo.bare
143
144
144 def test_create_bare_repo(self):
145 def test_create_bare_repo(self):
145 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 assert repo.bare
147 assert repo.bare
147
148
148 def test_update_server_info(self):
149 def test_update_server_info(self):
149 self.repo._update_server_info()
150 self.repo._update_server_info()
150
151
151 def test_fetch(self, vcsbackend_git):
152 def test_fetch(self, vcsbackend_git):
152 # Note: This is a git specific part of the API, it's only implemented
153 # Note: This is a git specific part of the API, it's only implemented
153 # by the git backend.
154 # by the git backend.
154 source_repo = vcsbackend_git.repo
155 source_repo = vcsbackend_git.repo
155 target_repo = vcsbackend_git.create_repo()
156 target_repo = vcsbackend_git.create_repo()
156 target_repo.fetch(source_repo.path)
157 target_repo.fetch(source_repo.path)
157 # Note: Get a fresh instance, avoids caching trouble
158 # Note: Get a fresh instance, avoids caching trouble
158 target_repo = vcsbackend_git.backend(target_repo.path)
159 target_repo = vcsbackend_git.backend(target_repo.path)
159 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160
161
161 def test_commit_ids(self):
162 def test_commit_ids(self):
162 # there are 112 commits (by now)
163 # there are 112 commits (by now)
163 # so we can assume they would be available from now on
164 # so we can assume they would be available from now on
164 subset = set([
165 subset = set([
165 'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 '2d1028c054665b962fa3d307adfc923ddd528038',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 '8430a588b43b5d6da365400117c89400326e7992',
175 '8430a588b43b5d6da365400117c89400326e7992',
175 'd955cd312c17b02143c04fa1099a352b04368118',
176 'd955cd312c17b02143c04fa1099a352b04368118',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
190 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
190 assert subset.issubset(set(self.repo.commit_ids))
191 assert subset.issubset(set(self.repo.commit_ids))
191
192
192 def test_slicing(self):
193 def test_slicing(self):
193 # 4 1 5 10 95
194 # 4 1 5 10 95
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 (10, 20, 10), (5, 100, 95)]:
196 (10, 20, 10), (5, 100, 95)]:
196 commit_ids = list(self.repo[sfrom:sto])
197 commit_ids = list(self.repo[sfrom:sto])
197 assert len(commit_ids) == size
198 assert len(commit_ids) == size
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200
201
201 def test_branches(self):
202 def test_branches(self):
202 # TODO: Need more tests here
203 # TODO: Need more tests here
203 # Removed (those are 'remotes' branches for cloned repo)
204 # Removed (those are 'remotes' branches for cloned repo)
204 # assert 'master' in self.repo.branches
205 # assert 'master' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
206 # assert 'gittree' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
207 # assert 'web-branch' in self.repo.branches
207 for __, commit_id in self.repo.branches.items():
208 for __, commit_id in self.repo.branches.items():
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209
210
210 def test_tags(self):
211 def test_tags(self):
211 # TODO: Need more tests here
212 # TODO: Need more tests here
212 assert 'v0.1.1' in self.repo.tags
213 assert 'v0.1.1' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
214 assert 'v0.1.2' in self.repo.tags
214 for __, commit_id in self.repo.tags.items():
215 for __, commit_id in self.repo.tags.items():
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216
217
217 def _test_single_commit_cache(self, commit_id):
218 def _test_single_commit_cache(self, commit_id):
218 commit = self.repo.get_commit(commit_id)
219 commit = self.repo.get_commit(commit_id)
219 assert commit_id in self.repo.commits
220 assert commit_id in self.repo.commits
220 assert commit is self.repo.commits[commit_id]
221 assert commit is self.repo.commits[commit_id]
221
222
222 def test_initial_commit(self):
223 def test_initial_commit(self):
223 commit_id = self.repo.commit_ids[0]
224 commit_id = self.repo.commit_ids[0]
224 init_commit = self.repo.get_commit(commit_id)
225 init_commit = self.repo.get_commit(commit_id)
225 init_author = init_commit.author
226 init_author = init_commit.author
226
227
227 assert init_commit.message == 'initial import\n'
228 assert init_commit.message == 'initial import\n'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 assert init_author == init_commit.committer
230 assert init_author == init_commit.committer
230 for path in ('vcs/__init__.py',
231 for path in ('vcs/__init__.py',
231 'vcs/backends/BaseRepository.py',
232 'vcs/backends/BaseRepository.py',
232 'vcs/backends/__init__.py'):
233 'vcs/backends/__init__.py'):
233 assert isinstance(init_commit.get_node(path), FileNode)
234 assert isinstance(init_commit.get_node(path), FileNode)
234 for path in ('', 'vcs', 'vcs/backends'):
235 for path in ('', 'vcs', 'vcs/backends'):
235 assert isinstance(init_commit.get_node(path), DirNode)
236 assert isinstance(init_commit.get_node(path), DirNode)
236
237
237 with pytest.raises(NodeDoesNotExistError):
238 with pytest.raises(NodeDoesNotExistError):
238 init_commit.get_node(path='foobar')
239 init_commit.get_node(path='foobar')
239
240
240 node = init_commit.get_node('vcs/')
241 node = init_commit.get_node('vcs/')
241 assert hasattr(node, 'kind')
242 assert hasattr(node, 'kind')
242 assert node.kind == NodeKind.DIR
243 assert node.kind == NodeKind.DIR
243
244
244 node = init_commit.get_node('vcs')
245 node = init_commit.get_node('vcs')
245 assert hasattr(node, 'kind')
246 assert hasattr(node, 'kind')
246 assert node.kind == NodeKind.DIR
247 assert node.kind == NodeKind.DIR
247
248
248 node = init_commit.get_node('vcs/__init__.py')
249 node = init_commit.get_node('vcs/__init__.py')
249 assert hasattr(node, 'kind')
250 assert hasattr(node, 'kind')
250 assert node.kind == NodeKind.FILE
251 assert node.kind == NodeKind.FILE
251
252
252 def test_not_existing_commit(self):
253 def test_not_existing_commit(self):
253 with pytest.raises(RepositoryError):
254 with pytest.raises(RepositoryError):
254 self.repo.get_commit('f' * 40)
255 self.repo.get_commit('f' * 40)
255
256
256 def test_commit10(self):
257 def test_commit10(self):
257
258
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 README = """===
260 README = """===
260 VCS
261 VCS
261 ===
262 ===
262
263
263 Various Version Control System management abstraction layer for Python.
264 Various Version Control System management abstraction layer for Python.
264
265
265 Introduction
266 Introduction
266 ------------
267 ------------
267
268
268 TODO: To be written...
269 TODO: To be written...
269
270
270 """
271 """
271 node = commit10.get_node('README.rst')
272 node = commit10.get_node('README.rst')
272 assert node.kind == NodeKind.FILE
273 assert node.kind == NodeKind.FILE
273 assert node.content == README
274 assert node.content == README
274
275
275 def test_head(self):
276 def test_head(self):
276 assert self.repo.head == self.repo.get_commit().raw_id
277 assert self.repo.head == self.repo.get_commit().raw_id
277
278
278 def test_checkout_with_create(self):
279 def test_checkout_with_create(self):
279 repo_clone = self.get_clone_repo()
280 repo_clone = self.get_clone_repo()
280
281
281 new_branch = 'new_branch'
282 new_branch = 'new_branch'
282 assert repo_clone._current_branch() == 'master'
283 assert repo_clone._current_branch() == 'master'
283 assert set(repo_clone.branches) == set(('master',))
284 assert set(repo_clone.branches) == set(('master',))
284 repo_clone._checkout(new_branch, create=True)
285 repo_clone._checkout(new_branch, create=True)
285
286
286 # Branches is a lazy property so we need to recrete the Repo object.
287 # Branches is a lazy property so we need to recrete the Repo object.
287 repo_clone = GitRepository(repo_clone.path)
288 repo_clone = GitRepository(repo_clone.path)
288 assert set(repo_clone.branches) == set(('master', new_branch))
289 assert set(repo_clone.branches) == set(('master', new_branch))
289 assert repo_clone._current_branch() == new_branch
290 assert repo_clone._current_branch() == new_branch
290
291
291 def test_checkout(self):
292 def test_checkout(self):
292 repo_clone = self.get_clone_repo()
293 repo_clone = self.get_clone_repo()
293
294
294 repo_clone._checkout('new_branch', create=True)
295 repo_clone._checkout('new_branch', create=True)
295 repo_clone._checkout('master')
296 repo_clone._checkout('master')
296
297
297 assert repo_clone._current_branch() == 'master'
298 assert repo_clone._current_branch() == 'master'
298
299
299 def test_checkout_same_branch(self):
300 def test_checkout_same_branch(self):
300 repo_clone = self.get_clone_repo()
301 repo_clone = self.get_clone_repo()
301
302
302 repo_clone._checkout('master')
303 repo_clone._checkout('master')
303 assert repo_clone._current_branch() == 'master'
304 assert repo_clone._current_branch() == 'master'
304
305
305 def test_checkout_branch_already_exists(self):
306 def test_checkout_branch_already_exists(self):
306 repo_clone = self.get_clone_repo()
307 repo_clone = self.get_clone_repo()
307
308
308 with pytest.raises(RepositoryError):
309 with pytest.raises(RepositoryError):
309 repo_clone._checkout('master', create=True)
310 repo_clone._checkout('master', create=True)
310
311
311 def test_checkout_bare_repo(self):
312 def test_checkout_bare_repo(self):
312 with pytest.raises(RepositoryError):
313 with pytest.raises(RepositoryError):
313 self.repo._checkout('master')
314 self.repo._checkout('master')
314
315
315 def test_current_branch_bare_repo(self):
316 def test_current_branch_bare_repo(self):
316 with pytest.raises(RepositoryError):
317 with pytest.raises(RepositoryError):
317 self.repo._current_branch()
318 self.repo._current_branch()
318
319
319 def test_current_branch_empty_repo(self):
320 def test_current_branch_empty_repo(self):
320 repo = self.get_empty_repo()
321 repo = self.get_empty_repo()
321 assert repo._current_branch() is None
322 assert repo._current_branch() is None
322
323
323 def test_local_clone(self):
324 def test_local_clone(self):
324 clone_path = next(REPO_PATH_GENERATOR)
325 clone_path = next(REPO_PATH_GENERATOR)
325 self.repo._local_clone(clone_path, 'master')
326 self.repo._local_clone(clone_path, 'master')
326 repo_clone = GitRepository(clone_path)
327 repo_clone = GitRepository(clone_path)
327
328
328 assert self.repo.commit_ids == repo_clone.commit_ids
329 assert self.repo.commit_ids == repo_clone.commit_ids
329
330
330 def test_local_clone_with_specific_branch(self):
331 def test_local_clone_with_specific_branch(self):
331 source_repo = self.get_clone_repo()
332 source_repo = self.get_clone_repo()
332
333
333 # Create a new branch in source repo
334 # Create a new branch in source repo
334 new_branch_commit = source_repo.commit_ids[-3]
335 new_branch_commit = source_repo.commit_ids[-3]
335 source_repo._checkout(new_branch_commit)
336 source_repo._checkout(new_branch_commit)
336 source_repo._checkout('new_branch', create=True)
337 source_repo._checkout('new_branch', create=True)
337
338
338 clone_path = next(REPO_PATH_GENERATOR)
339 clone_path = next(REPO_PATH_GENERATOR)
339 source_repo._local_clone(clone_path, 'new_branch')
340 source_repo._local_clone(clone_path, 'new_branch')
340 repo_clone = GitRepository(clone_path)
341 repo_clone = GitRepository(clone_path)
341
342
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343
344
344 clone_path = next(REPO_PATH_GENERATOR)
345 clone_path = next(REPO_PATH_GENERATOR)
345 source_repo._local_clone(clone_path, 'master')
346 source_repo._local_clone(clone_path, 'master')
346 repo_clone = GitRepository(clone_path)
347 repo_clone = GitRepository(clone_path)
347
348
348 assert source_repo.commit_ids == repo_clone.commit_ids
349 assert source_repo.commit_ids == repo_clone.commit_ids
349
350
350 def test_local_clone_fails_if_target_exists(self):
351 def test_local_clone_fails_if_target_exists(self):
351 with pytest.raises(RepositoryError):
352 with pytest.raises(RepositoryError):
352 self.repo._local_clone(self.repo.path, 'master')
353 self.repo._local_clone(self.repo.path, 'master')
353
354
354 def test_local_fetch(self):
355 def test_local_fetch(self):
355 target_repo = self.get_empty_repo()
356 target_repo = self.get_empty_repo()
356 source_repo = self.get_clone_repo()
357 source_repo = self.get_clone_repo()
357
358
358 # Create a new branch in source repo
359 # Create a new branch in source repo
359 master_commit = source_repo.commit_ids[-1]
360 master_commit = source_repo.commit_ids[-1]
360 new_branch_commit = source_repo.commit_ids[-3]
361 new_branch_commit = source_repo.commit_ids[-3]
361 source_repo._checkout(new_branch_commit)
362 source_repo._checkout(new_branch_commit)
362 source_repo._checkout('new_branch', create=True)
363 source_repo._checkout('new_branch', create=True)
363
364
364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 target_repo._local_fetch(source_repo.path, 'new_branch')
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 assert target_repo._last_fetch_heads() == [new_branch_commit]
366
367
367 target_repo._local_fetch(source_repo.path, 'master')
368 target_repo._local_fetch(source_repo.path, 'master')
368 assert target_repo._last_fetch_heads() == [master_commit]
369 assert target_repo._last_fetch_heads() == [master_commit]
369
370
370 def test_local_fetch_from_bare_repo(self):
371 def test_local_fetch_from_bare_repo(self):
371 target_repo = self.get_empty_repo()
372 target_repo = self.get_empty_repo()
372 target_repo._local_fetch(self.repo.path, 'master')
373 target_repo._local_fetch(self.repo.path, 'master')
373
374
374 master_commit = self.repo.commit_ids[-1]
375 master_commit = self.repo.commit_ids[-1]
375 assert target_repo._last_fetch_heads() == [master_commit]
376 assert target_repo._last_fetch_heads() == [master_commit]
376
377
377 def test_local_fetch_from_same_repo(self):
378 def test_local_fetch_from_same_repo(self):
378 with pytest.raises(ValueError):
379 with pytest.raises(ValueError):
379 self.repo._local_fetch(self.repo.path, 'master')
380 self.repo._local_fetch(self.repo.path, 'master')
380
381
381 def test_local_fetch_branch_does_not_exist(self):
382 def test_local_fetch_branch_does_not_exist(self):
382 target_repo = self.get_empty_repo()
383 target_repo = self.get_empty_repo()
383
384
384 with pytest.raises(RepositoryError):
385 with pytest.raises(RepositoryError):
385 target_repo._local_fetch(self.repo.path, 'new_branch')
386 target_repo._local_fetch(self.repo.path, 'new_branch')
386
387
387 def test_local_pull(self):
388 def test_local_pull(self):
388 target_repo = self.get_empty_repo()
389 target_repo = self.get_empty_repo()
389 source_repo = self.get_clone_repo()
390 source_repo = self.get_clone_repo()
390
391
391 # Create a new branch in source repo
392 # Create a new branch in source repo
392 master_commit = source_repo.commit_ids[-1]
393 master_commit = source_repo.commit_ids[-1]
393 new_branch_commit = source_repo.commit_ids[-3]
394 new_branch_commit = source_repo.commit_ids[-3]
394 source_repo._checkout(new_branch_commit)
395 source_repo._checkout(new_branch_commit)
395 source_repo._checkout('new_branch', create=True)
396 source_repo._checkout('new_branch', create=True)
396
397
397 target_repo._local_pull(source_repo.path, 'new_branch')
398 target_repo._local_pull(source_repo.path, 'new_branch')
398 target_repo = GitRepository(target_repo.path)
399 target_repo = GitRepository(target_repo.path)
399 assert target_repo.head == new_branch_commit
400 assert target_repo.head == new_branch_commit
400
401
401 target_repo._local_pull(source_repo.path, 'master')
402 target_repo._local_pull(source_repo.path, 'master')
402 target_repo = GitRepository(target_repo.path)
403 target_repo = GitRepository(target_repo.path)
403 assert target_repo.head == master_commit
404 assert target_repo.head == master_commit
404
405
405 def test_local_pull_in_bare_repo(self):
406 def test_local_pull_in_bare_repo(self):
406 with pytest.raises(RepositoryError):
407 with pytest.raises(RepositoryError):
407 self.repo._local_pull(self.repo.path, 'master')
408 self.repo._local_pull(self.repo.path, 'master')
408
409
409 def test_local_merge(self):
410 def test_local_merge(self):
410 target_repo = self.get_empty_repo()
411 target_repo = self.get_empty_repo()
411 source_repo = self.get_clone_repo()
412 source_repo = self.get_clone_repo()
412
413
413 # Create a new branch in source repo
414 # Create a new branch in source repo
414 master_commit = source_repo.commit_ids[-1]
415 master_commit = source_repo.commit_ids[-1]
415 new_branch_commit = source_repo.commit_ids[-3]
416 new_branch_commit = source_repo.commit_ids[-3]
416 source_repo._checkout(new_branch_commit)
417 source_repo._checkout(new_branch_commit)
417 source_repo._checkout('new_branch', create=True)
418 source_repo._checkout('new_branch', create=True)
418
419
419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 # This is required as one cannot do a -ff-only merge in an empty repo.
420 target_repo._local_pull(source_repo.path, 'new_branch')
421 target_repo._local_pull(source_repo.path, 'new_branch')
421
422
422 target_repo._local_fetch(source_repo.path, 'master')
423 target_repo._local_fetch(source_repo.path, 'master')
423 merge_message = 'Merge message\n\nDescription:...'
424 merge_message = 'Merge message\n\nDescription:...'
424 user_name = 'Albert Einstein'
425 user_name = 'Albert Einstein'
425 user_email = 'albert@einstein.com'
426 user_email = 'albert@einstein.com'
426 target_repo._local_merge(merge_message, user_name, user_email,
427 target_repo._local_merge(merge_message, user_name, user_email,
427 target_repo._last_fetch_heads())
428 target_repo._last_fetch_heads())
428
429
429 target_repo = GitRepository(target_repo.path)
430 target_repo = GitRepository(target_repo.path)
430 assert target_repo.commit_ids[-2] == master_commit
431 assert target_repo.commit_ids[-2] == master_commit
431 last_commit = target_repo.get_commit(target_repo.head)
432 last_commit = target_repo.get_commit(target_repo.head)
432 assert last_commit.message.strip() == merge_message
433 assert last_commit.message.strip() == merge_message
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434
435
435 assert not os.path.exists(
436 assert not os.path.exists(
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437
438
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441
442
442 target_repo._local_fetch(self.repo.path, 'master')
443 target_repo._local_fetch(self.repo.path, 'master')
443 with pytest.raises(RepositoryError):
444 with pytest.raises(RepositoryError):
444 target_repo._local_merge(
445 target_repo._local_merge(
445 'merge_message', 'user name', 'user@name.com',
446 'merge_message', 'user name', 'user@name.com',
446 target_repo._last_fetch_heads())
447 target_repo._last_fetch_heads())
447
448
448 # Check we are not left in an intermediate merge state
449 # Check we are not left in an intermediate merge state
449 assert not os.path.exists(
450 assert not os.path.exists(
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451
452
452 def test_local_merge_into_empty_repo(self):
453 def test_local_merge_into_empty_repo(self):
453 target_repo = self.get_empty_repo()
454 target_repo = self.get_empty_repo()
454
455
455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 # This is required as one cannot do a -ff-only merge in an empty repo.
456 target_repo._local_fetch(self.repo.path, 'master')
457 target_repo._local_fetch(self.repo.path, 'master')
457 with pytest.raises(RepositoryError):
458 with pytest.raises(RepositoryError):
458 target_repo._local_merge(
459 target_repo._local_merge(
459 'merge_message', 'user name', 'user@name.com',
460 'merge_message', 'user name', 'user@name.com',
460 target_repo._last_fetch_heads())
461 target_repo._last_fetch_heads())
461
462
462 def test_local_merge_in_bare_repo(self):
463 def test_local_merge_in_bare_repo(self):
463 with pytest.raises(RepositoryError):
464 with pytest.raises(RepositoryError):
464 self.repo._local_merge(
465 self.repo._local_merge(
465 'merge_message', 'user name', 'user@name.com', None)
466 'merge_message', 'user name', 'user@name.com', None)
466
467
467 def test_local_push_non_bare(self):
468 def test_local_push_non_bare(self):
468 target_repo = self.get_empty_repo()
469 target_repo = self.get_empty_repo()
469
470
470 pushed_branch = 'pushed_branch'
471 pushed_branch = 'pushed_branch'
471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 self.repo._local_push('master', target_repo.path, pushed_branch)
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 # report any branches.
474 # report any branches.
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 f.write('ref: refs/heads/%s' % pushed_branch)
476 f.write('ref: refs/heads/%s' % pushed_branch)
476
477
477 target_repo = GitRepository(target_repo.path)
478 target_repo = GitRepository(target_repo.path)
478
479
479 assert (target_repo.branches[pushed_branch] ==
480 assert (target_repo.branches[pushed_branch] ==
480 self.repo.branches['master'])
481 self.repo.branches['master'])
481
482
482 def test_local_push_bare(self):
483 def test_local_push_bare(self):
483 target_repo = self.get_empty_repo(bare=True)
484 target_repo = self.get_empty_repo(bare=True)
484
485
485 pushed_branch = 'pushed_branch'
486 pushed_branch = 'pushed_branch'
486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 self.repo._local_push('master', target_repo.path, pushed_branch)
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 # report any branches.
489 # report any branches.
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 f.write('ref: refs/heads/%s' % pushed_branch)
491 f.write('ref: refs/heads/%s' % pushed_branch)
491
492
492 target_repo = GitRepository(target_repo.path)
493 target_repo = GitRepository(target_repo.path)
493
494
494 assert (target_repo.branches[pushed_branch] ==
495 assert (target_repo.branches[pushed_branch] ==
495 self.repo.branches['master'])
496 self.repo.branches['master'])
496
497
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 target_repo = self.get_clone_repo()
499 target_repo = self.get_clone_repo()
499
500
500 pushed_branch = 'pushed_branch'
501 pushed_branch = 'pushed_branch'
501 # Create a new branch in source repo
502 # Create a new branch in source repo
502 new_branch_commit = target_repo.commit_ids[-3]
503 new_branch_commit = target_repo.commit_ids[-3]
503 target_repo._checkout(new_branch_commit)
504 target_repo._checkout(new_branch_commit)
504 target_repo._checkout(pushed_branch, create=True)
505 target_repo._checkout(pushed_branch, create=True)
505
506
506 self.repo._local_push('master', target_repo.path, pushed_branch)
507 self.repo._local_push('master', target_repo.path, pushed_branch)
507
508
508 target_repo = GitRepository(target_repo.path)
509 target_repo = GitRepository(target_repo.path)
509
510
510 assert (target_repo.branches[pushed_branch] ==
511 assert (target_repo.branches[pushed_branch] ==
511 self.repo.branches['master'])
512 self.repo.branches['master'])
512
513
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 with pytest.raises(RepositoryError):
516 with pytest.raises(RepositoryError):
516 self.repo._local_push('master', target_repo.path, 'master')
517 self.repo._local_push('master', target_repo.path, 'master')
517
518
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 target_repo = self.get_empty_repo(bare=True)
520 target_repo = self.get_empty_repo(bare=True)
520
521
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 self.repo._local_push(
523 self.repo._local_push(
523 'master', target_repo.path, 'master', enable_hooks=True)
524 'master', target_repo.path, 'master', enable_hooks=True)
524 env = run_mock.call_args[1]['extra_env']
525 env = run_mock.call_args[1]['extra_env']
525 assert 'RC_SKIP_HOOKS' not in env
526 assert 'RC_SKIP_HOOKS' not in env
526
527
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 path_components = (
529 path_components = (
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 hook_path = os.path.join(repo_path, *path_components)
531 hook_path = os.path.join(repo_path, *path_components)
531 with open(hook_path, 'w') as f:
532 with open(hook_path, 'w') as f:
532 script_lines = [
533 script_lines = [
533 '#!%s' % sys.executable,
534 '#!%s' % sys.executable,
534 'import os',
535 'import os',
535 'import sys',
536 'import sys',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 'if os.environ.get("RC_SKIP_HOOKS"):',
537 ' sys.exit(0)',
538 ' sys.exit(0)',
538 'sys.exit(1)',
539 'sys.exit(1)',
539 ]
540 ]
540 f.write('\n'.join(script_lines))
541 f.write('\n'.join(script_lines))
541 os.chmod(hook_path, 0755)
542 os.chmod(hook_path, 0755)
542
543
543 def test_local_push_does_not_execute_hook(self):
544 def test_local_push_does_not_execute_hook(self):
544 target_repo = self.get_empty_repo()
545 target_repo = self.get_empty_repo()
545
546
546 pushed_branch = 'pushed_branch'
547 pushed_branch = 'pushed_branch'
547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 self._add_failing_hook(target_repo.path, 'pre-receive')
548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 self.repo._local_push('master', target_repo.path, pushed_branch)
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 # report any branches.
551 # report any branches.
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 f.write('ref: refs/heads/%s' % pushed_branch)
553 f.write('ref: refs/heads/%s' % pushed_branch)
553
554
554 target_repo = GitRepository(target_repo.path)
555 target_repo = GitRepository(target_repo.path)
555
556
556 assert (target_repo.branches[pushed_branch] ==
557 assert (target_repo.branches[pushed_branch] ==
557 self.repo.branches['master'])
558 self.repo.branches['master'])
558
559
559 def test_local_push_executes_hook(self):
560 def test_local_push_executes_hook(self):
560 target_repo = self.get_empty_repo(bare=True)
561 target_repo = self.get_empty_repo(bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 with pytest.raises(RepositoryError):
563 with pytest.raises(RepositoryError):
563 self.repo._local_push(
564 self.repo._local_push(
564 'master', target_repo.path, 'master', enable_hooks=True)
565 'master', target_repo.path, 'master', enable_hooks=True)
565
566
566 def test_maybe_prepare_merge_workspace(self):
567 def test_maybe_prepare_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 workspace = self.repo._maybe_prepare_merge_workspace(
568 'pr2', Reference('branch', 'master', 'unused'))
569 'pr2', Reference('branch', 'master', 'unused'))
569
570
570 assert os.path.isdir(workspace)
571 assert os.path.isdir(workspace)
571 workspace_repo = GitRepository(workspace)
572 workspace_repo = GitRepository(workspace)
572 assert workspace_repo.branches == self.repo.branches
573 assert workspace_repo.branches == self.repo.branches
573
574
574 # Calling it a second time should also succeed
575 # Calling it a second time should also succeed
575 workspace = self.repo._maybe_prepare_merge_workspace(
576 workspace = self.repo._maybe_prepare_merge_workspace(
576 'pr2', Reference('branch', 'master', 'unused'))
577 'pr2', Reference('branch', 'master', 'unused'))
577 assert os.path.isdir(workspace)
578 assert os.path.isdir(workspace)
578
579
579 def test_cleanup_merge_workspace(self):
580 def test_cleanup_merge_workspace(self):
580 workspace = self.repo._maybe_prepare_merge_workspace(
581 workspace = self.repo._maybe_prepare_merge_workspace(
581 'pr3', Reference('branch', 'master', 'unused'))
582 'pr3', Reference('branch', 'master', 'unused'))
582 self.repo.cleanup_merge_workspace('pr3')
583 self.repo.cleanup_merge_workspace('pr3')
583
584
584 assert not os.path.exists(workspace)
585 assert not os.path.exists(workspace)
585
586
586 def test_cleanup_merge_workspace_invalid_workspace_id(self):
587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
587 # No assert: because in case of an inexistent workspace this function
588 # No assert: because in case of an inexistent workspace this function
588 # should still succeed.
589 # should still succeed.
589 self.repo.cleanup_merge_workspace('pr4')
590 self.repo.cleanup_merge_workspace('pr4')
590
591
591 def test_set_refs(self):
592 def test_set_refs(self):
592 test_ref = 'refs/test-refs/abcde'
593 test_ref = 'refs/test-refs/abcde'
593 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594
595
595 self.repo.set_refs(test_ref, test_commit_id)
596 self.repo.set_refs(test_ref, test_commit_id)
596 stdout, _ = self.repo.run_git_command(['show-ref'])
597 stdout, _ = self.repo.run_git_command(['show-ref'])
597 assert test_ref in stdout
598 assert test_ref in stdout
598 assert test_commit_id in stdout
599 assert test_commit_id in stdout
599
600
600 def test_remove_ref(self):
601 def test_remove_ref(self):
601 test_ref = 'refs/test-refs/abcde'
602 test_ref = 'refs/test-refs/abcde'
602 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
603 self.repo.set_refs(test_ref, test_commit_id)
604 self.repo.set_refs(test_ref, test_commit_id)
604 stdout, _ = self.repo.run_git_command(['show-ref'])
605 stdout, _ = self.repo.run_git_command(['show-ref'])
605 assert test_ref in stdout
606 assert test_ref in stdout
606 assert test_commit_id in stdout
607 assert test_commit_id in stdout
607
608
608 self.repo.remove_ref(test_ref)
609 self.repo.remove_ref(test_ref)
609 stdout, _ = self.repo.run_git_command(['show-ref'])
610 stdout, _ = self.repo.run_git_command(['show-ref'])
610 assert test_ref not in stdout
611 assert test_ref not in stdout
611 assert test_commit_id not in stdout
612 assert test_commit_id not in stdout
612
613
613
614
614 class TestGitCommit(object):
615 class TestGitCommit(object):
615
616
616 @pytest.fixture(autouse=True)
617 @pytest.fixture(autouse=True)
617 def prepare(self):
618 def prepare(self):
618 self.repo = GitRepository(TEST_GIT_REPO)
619 self.repo = GitRepository(TEST_GIT_REPO)
619
620
620 def test_default_commit(self):
621 def test_default_commit(self):
621 tip = self.repo.get_commit()
622 tip = self.repo.get_commit()
622 assert tip == self.repo.get_commit(None)
623 assert tip == self.repo.get_commit(None)
623 assert tip == self.repo.get_commit('tip')
624 assert tip == self.repo.get_commit('tip')
624
625
625 def test_root_node(self):
626 def test_root_node(self):
626 tip = self.repo.get_commit()
627 tip = self.repo.get_commit()
627 assert tip.root is tip.get_node('')
628 assert tip.root is tip.get_node('')
628
629
629 def test_lazy_fetch(self):
630 def test_lazy_fetch(self):
630 """
631 """
631 Test if commit's nodes expands and are cached as we walk through
632 Test if commit's nodes expands and are cached as we walk through
632 the commit. This test is somewhat hard to write as order of tests
633 the commit. This test is somewhat hard to write as order of tests
633 is a key here. Written by running command after command in a shell.
634 is a key here. Written by running command after command in a shell.
634 """
635 """
635 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
636 assert commit_id in self.repo.commit_ids
637 assert commit_id in self.repo.commit_ids
637 commit = self.repo.get_commit(commit_id)
638 commit = self.repo.get_commit(commit_id)
638 assert len(commit.nodes) == 0
639 assert len(commit.nodes) == 0
639 root = commit.root
640 root = commit.root
640 assert len(commit.nodes) == 1
641 assert len(commit.nodes) == 1
641 assert len(root.nodes) == 8
642 assert len(root.nodes) == 8
642 # accessing root.nodes updates commit.nodes
643 # accessing root.nodes updates commit.nodes
643 assert len(commit.nodes) == 9
644 assert len(commit.nodes) == 9
644
645
645 docs = root.get_node('docs')
646 docs = root.get_node('docs')
646 # we haven't yet accessed anything new as docs dir was already cached
647 # we haven't yet accessed anything new as docs dir was already cached
647 assert len(commit.nodes) == 9
648 assert len(commit.nodes) == 9
648 assert len(docs.nodes) == 8
649 assert len(docs.nodes) == 8
649 # accessing docs.nodes updates commit.nodes
650 # accessing docs.nodes updates commit.nodes
650 assert len(commit.nodes) == 17
651 assert len(commit.nodes) == 17
651
652
652 assert docs is commit.get_node('docs')
653 assert docs is commit.get_node('docs')
653 assert docs is root.nodes[0]
654 assert docs is root.nodes[0]
654 assert docs is root.dirs[0]
655 assert docs is root.dirs[0]
655 assert docs is commit.get_node('docs')
656 assert docs is commit.get_node('docs')
656
657
657 def test_nodes_with_commit(self):
658 def test_nodes_with_commit(self):
658 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
659 commit = self.repo.get_commit(commit_id)
660 commit = self.repo.get_commit(commit_id)
660 root = commit.root
661 root = commit.root
661 docs = root.get_node('docs')
662 docs = root.get_node('docs')
662 assert docs is commit.get_node('docs')
663 assert docs is commit.get_node('docs')
663 api = docs.get_node('api')
664 api = docs.get_node('api')
664 assert api is commit.get_node('docs/api')
665 assert api is commit.get_node('docs/api')
665 index = api.get_node('index.rst')
666 index = api.get_node('index.rst')
666 assert index is commit.get_node('docs/api/index.rst')
667 assert index is commit.get_node('docs/api/index.rst')
667 assert index is commit.get_node('docs')\
668 assert index is commit.get_node('docs')\
668 .get_node('api')\
669 .get_node('api')\
669 .get_node('index.rst')
670 .get_node('index.rst')
670
671
671 def test_branch_and_tags(self):
672 def test_branch_and_tags(self):
672 """
673 """
673 rev0 = self.repo.commit_ids[0]
674 rev0 = self.repo.commit_ids[0]
674 commit0 = self.repo.get_commit(rev0)
675 commit0 = self.repo.get_commit(rev0)
675 assert commit0.branch == 'master'
676 assert commit0.branch == 'master'
676 assert commit0.tags == []
677 assert commit0.tags == []
677
678
678 rev10 = self.repo.commit_ids[10]
679 rev10 = self.repo.commit_ids[10]
679 commit10 = self.repo.get_commit(rev10)
680 commit10 = self.repo.get_commit(rev10)
680 assert commit10.branch == 'master'
681 assert commit10.branch == 'master'
681 assert commit10.tags == []
682 assert commit10.tags == []
682
683
683 rev44 = self.repo.commit_ids[44]
684 rev44 = self.repo.commit_ids[44]
684 commit44 = self.repo.get_commit(rev44)
685 commit44 = self.repo.get_commit(rev44)
685 assert commit44.branch == 'web-branch'
686 assert commit44.branch == 'web-branch'
686
687
687 tip = self.repo.get_commit('tip')
688 tip = self.repo.get_commit('tip')
688 assert 'tip' in tip.tags
689 assert 'tip' in tip.tags
689 """
690 """
690 # Those tests would fail - branches are now going
691 # Those tests would fail - branches are now going
691 # to be changed at main API in order to support git backend
692 # to be changed at main API in order to support git backend
692 pass
693 pass
693
694
694 def test_file_size(self):
695 def test_file_size(self):
695 to_check = (
696 to_check = (
696 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
697 'vcs/backends/BaseRepository.py', 502),
698 'vcs/backends/BaseRepository.py', 502),
698 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
699 'vcs/backends/hg.py', 854),
700 'vcs/backends/hg.py', 854),
700 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
701 'setup.py', 1068),
702 'setup.py', 1068),
702
703
703 ('d955cd312c17b02143c04fa1099a352b04368118',
704 ('d955cd312c17b02143c04fa1099a352b04368118',
704 'vcs/backends/base.py', 2921),
705 'vcs/backends/base.py', 2921),
705 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
706 'vcs/backends/base.py', 3936),
707 'vcs/backends/base.py', 3936),
707 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
708 'vcs/backends/base.py', 6189),
709 'vcs/backends/base.py', 6189),
709 )
710 )
710 for commit_id, path, size in to_check:
711 for commit_id, path, size in to_check:
711 node = self.repo.get_commit(commit_id).get_node(path)
712 node = self.repo.get_commit(commit_id).get_node(path)
712 assert node.is_file()
713 assert node.is_file()
713 assert node.size == size
714 assert node.size == size
714
715
715 def test_file_history_from_commits(self):
716 def test_file_history_from_commits(self):
716 node = self.repo[10].get_node('setup.py')
717 node = self.repo[10].get_node('setup.py')
717 commit_ids = [commit.raw_id for commit in node.history]
718 commit_ids = [commit.raw_id for commit in node.history]
718 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
719
720
720 node = self.repo[20].get_node('setup.py')
721 node = self.repo[20].get_node('setup.py')
721 node_ids = [commit.raw_id for commit in node.history]
722 node_ids = [commit.raw_id for commit in node.history]
722 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
723 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
724
725
725 # special case we check history from commit that has this particular
726 # special case we check history from commit that has this particular
726 # file changed this means we check if it's included as well
727 # file changed this means we check if it's included as well
727 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
728 .get_node('setup.py')
729 .get_node('setup.py')
729 node_ids = [commit.raw_id for commit in node.history]
730 node_ids = [commit.raw_id for commit in node.history]
730 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
731 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
732
733
733 def test_file_history(self):
734 def test_file_history(self):
734 # we can only check if those commits are present in the history
735 # we can only check if those commits are present in the history
735 # as we cannot update this test every time file is changed
736 # as we cannot update this test every time file is changed
736 files = {
737 files = {
737 'setup.py': [
738 'setup.py': [
738 '54386793436c938cff89326944d4c2702340037d',
739 '54386793436c938cff89326944d4c2702340037d',
739 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
740 '998ed409c795fec2012b1c0ca054d99888b22090',
741 '998ed409c795fec2012b1c0ca054d99888b22090',
741 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
742 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
743 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
744 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
745 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
746 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
747 ],
748 ],
748 'vcs/nodes.py': [
749 'vcs/nodes.py': [
749 '33fa3223355104431402a888fa77a4e9956feb3e',
750 '33fa3223355104431402a888fa77a4e9956feb3e',
750 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
751 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
752 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
753 'c877b68d18e792a66b7f4c529ea02c8f80801542',
754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
754 '4313566d2e417cb382948f8d9d7c765330356054',
755 '4313566d2e417cb382948f8d9d7c765330356054',
755 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
756 '54386793436c938cff89326944d4c2702340037d',
757 '54386793436c938cff89326944d4c2702340037d',
757 '54000345d2e78b03a99d561399e8e548de3f3203',
758 '54000345d2e78b03a99d561399e8e548de3f3203',
758 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
759 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
760 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
761 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
762 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
763 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
764 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
765 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
766 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
767 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
768 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
769 'f15c21f97864b4f071cddfbf2750ec2e23859414',
770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
770 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
771 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
772 '84dec09632a4458f79f50ddbbd155506c460b4f9',
773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
773 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
774 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
775 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
776 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
777 '6970b057cffe4aab0a792aa634c89f4bebf01441',
778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
778 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
779 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
780 ],
781 ],
781 'vcs/backends/git.py': [
782 'vcs/backends/git.py': [
782 '4cf116ad5a457530381135e2f4c453e68a1b0105',
783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
783 '9a751d84d8e9408e736329767387f41b36935153',
784 '9a751d84d8e9408e736329767387f41b36935153',
784 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
785 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
786 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
787 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
788 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
789 '54000345d2e78b03a99d561399e8e548de3f3203',
790 '54000345d2e78b03a99d561399e8e548de3f3203',
790 ],
791 ],
791 }
792 }
792 for path, commit_ids in files.items():
793 for path, commit_ids in files.items():
793 node = self.repo.get_commit(commit_ids[0]).get_node(path)
794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
794 node_ids = [commit.raw_id for commit in node.history]
795 node_ids = [commit.raw_id for commit in node.history]
795 assert set(commit_ids).issubset(set(node_ids)), (
796 assert set(commit_ids).issubset(set(node_ids)), (
796 "We assumed that %s is subset of commit_ids for which file %s "
797 "We assumed that %s is subset of commit_ids for which file %s "
797 "has been changed, and history of that node returned: %s"
798 "has been changed, and history of that node returned: %s"
798 % (commit_ids, path, node_ids))
799 % (commit_ids, path, node_ids))
799
800
800 def test_file_annotate(self):
801 def test_file_annotate(self):
801 files = {
802 files = {
802 'vcs/backends/__init__.py': {
803 'vcs/backends/__init__.py': {
803 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
804 'lines_no': 1,
805 'lines_no': 1,
805 'commits': [
806 'commits': [
806 'c1214f7e79e02fc37156ff215cd71275450cffc3',
807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
807 ],
808 ],
808 },
809 },
809 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
810 'lines_no': 21,
811 'lines_no': 21,
811 'commits': [
812 'commits': [
812 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
813 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 ],
834 ],
834 },
835 },
835 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
836 'lines_no': 32,
837 'lines_no': 32,
837 'commits': [
838 'commits': [
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '54000345d2e78b03a99d561399e8e548de3f3203',
846 '54000345d2e78b03a99d561399e8e548de3f3203',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
849 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
854 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
855 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
856 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 ],
871 ],
871 },
872 },
872 },
873 },
873 }
874 }
874
875
875 for fname, commit_dict in files.items():
876 for fname, commit_dict in files.items():
876 for commit_id, __ in commit_dict.items():
877 for commit_id, __ in commit_dict.items():
877 commit = self.repo.get_commit(commit_id)
878 commit = self.repo.get_commit(commit_id)
878
879
879 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
880 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
881 assert l1_1 == l1_2
882 assert l1_1 == l1_2
882 l1 = l1_1
883 l1 = l1_1
883 l2 = files[fname][commit_id]['commits']
884 l2 = files[fname][commit_id]['commits']
884 assert l1 == l2, (
885 assert l1 == l2, (
885 "The lists of commit_ids for %s@commit_id %s"
886 "The lists of commit_ids for %s@commit_id %s"
886 "from annotation list should match each other, "
887 "from annotation list should match each other, "
887 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
888
889
889 def test_files_state(self):
890 def test_files_state(self):
890 """
891 """
891 Tests state of FileNodes.
892 Tests state of FileNodes.
892 """
893 """
893 node = self.repo\
894 node = self.repo\
894 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
895 .get_node('vcs/utils/diffs.py')
896 .get_node('vcs/utils/diffs.py')
896 assert node.state, NodeState.ADDED
897 assert node.state, NodeState.ADDED
897 assert node.added
898 assert node.added
898 assert not node.changed
899 assert not node.changed
899 assert not node.not_changed
900 assert not node.not_changed
900 assert not node.removed
901 assert not node.removed
901
902
902 node = self.repo\
903 node = self.repo\
903 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
904 .get_node('.hgignore')
905 .get_node('.hgignore')
905 assert node.state, NodeState.CHANGED
906 assert node.state, NodeState.CHANGED
906 assert not node.added
907 assert not node.added
907 assert node.changed
908 assert node.changed
908 assert not node.not_changed
909 assert not node.not_changed
909 assert not node.removed
910 assert not node.removed
910
911
911 node = self.repo\
912 node = self.repo\
912 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
913 .get_node('setup.py')
914 .get_node('setup.py')
914 assert node.state, NodeState.NOT_CHANGED
915 assert node.state, NodeState.NOT_CHANGED
915 assert not node.added
916 assert not node.added
916 assert not node.changed
917 assert not node.changed
917 assert node.not_changed
918 assert node.not_changed
918 assert not node.removed
919 assert not node.removed
919
920
920 # If node has REMOVED state then trying to fetch it would raise
921 # If node has REMOVED state then trying to fetch it would raise
921 # CommitError exception
922 # CommitError exception
922 commit = self.repo.get_commit(
923 commit = self.repo.get_commit(
923 'fa6600f6848800641328adbf7811fd2372c02ab2')
924 'fa6600f6848800641328adbf7811fd2372c02ab2')
924 path = 'vcs/backends/BaseRepository.py'
925 path = 'vcs/backends/BaseRepository.py'
925 with pytest.raises(NodeDoesNotExistError):
926 with pytest.raises(NodeDoesNotExistError):
926 commit.get_node(path)
927 commit.get_node(path)
927 # but it would be one of ``removed`` (commit's attribute)
928 # but it would be one of ``removed`` (commit's attribute)
928 assert path in [rf.path for rf in commit.removed]
929 assert path in [rf.path for rf in commit.removed]
929
930
930 commit = self.repo.get_commit(
931 commit = self.repo.get_commit(
931 '54386793436c938cff89326944d4c2702340037d')
932 '54386793436c938cff89326944d4c2702340037d')
932 changed = [
933 changed = [
933 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
934 'vcs/nodes.py']
935 'vcs/nodes.py']
935 assert set(changed) == set([f.path for f in commit.changed])
936 assert set(changed) == set([f.path for f in commit.changed])
936
937
937 def test_unicode_branch_refs(self):
938 def test_unicode_branch_refs(self):
938 unicode_branches = {
939 unicode_branches = {
939 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
940 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
941 }
942 }
942 with mock.patch(
943 with mock.patch(
943 ("rhodecode.lib.vcs.backends.git.repository"
944 ("rhodecode.lib.vcs.backends.git.repository"
944 ".GitRepository._refs"),
945 ".GitRepository._refs"),
945 unicode_branches):
946 unicode_branches):
946 branches = self.repo.branches
947 branches = self.repo.branches
947
948
948 assert 'unicode' in branches
949 assert 'unicode' in branches
949 assert u'uniΓ§ΓΆβˆ‚e' in branches
950 assert u'uniΓ§ΓΆβˆ‚e' in branches
950
951
951 def test_unicode_tag_refs(self):
952 def test_unicode_tag_refs(self):
952 unicode_tags = {
953 unicode_tags = {
953 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
954 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 }
956 }
956 with mock.patch(
957 with mock.patch(
957 ("rhodecode.lib.vcs.backends.git.repository"
958 ("rhodecode.lib.vcs.backends.git.repository"
958 ".GitRepository._refs"),
959 ".GitRepository._refs"),
959 unicode_tags):
960 unicode_tags):
960 tags = self.repo.tags
961 tags = self.repo.tags
961
962
962 assert 'unicode' in tags
963 assert 'unicode' in tags
963 assert u'uniΓ§ΓΆβˆ‚e' in tags
964 assert u'uniΓ§ΓΆβˆ‚e' in tags
964
965
965 def test_commit_message_is_unicode(self):
966 def test_commit_message_is_unicode(self):
966 for commit in self.repo:
967 for commit in self.repo:
967 assert type(commit.message) == unicode
968 assert type(commit.message) == unicode
968
969
969 def test_commit_author_is_unicode(self):
970 def test_commit_author_is_unicode(self):
970 for commit in self.repo:
971 for commit in self.repo:
971 assert type(commit.author) == unicode
972 assert type(commit.author) == unicode
972
973
973 def test_repo_files_content_is_unicode(self):
974 def test_repo_files_content_is_unicode(self):
974 commit = self.repo.get_commit()
975 commit = self.repo.get_commit()
975 for node in commit.get_node('/'):
976 for node in commit.get_node('/'):
976 if node.is_file():
977 if node.is_file():
977 assert type(node.content) == unicode
978 assert type(node.content) == unicode
978
979
979 def test_wrong_path(self):
980 def test_wrong_path(self):
980 # There is 'setup.py' in the root dir but not there:
981 # There is 'setup.py' in the root dir but not there:
981 path = 'foo/bar/setup.py'
982 path = 'foo/bar/setup.py'
982 tip = self.repo.get_commit()
983 tip = self.repo.get_commit()
983 with pytest.raises(VCSError):
984 with pytest.raises(VCSError):
984 tip.get_node(path)
985 tip.get_node(path)
985
986
986 @pytest.mark.parametrize("author_email, commit_id", [
987 @pytest.mark.parametrize("author_email, commit_id", [
987 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
988 ('lukasz.balcerzak@python-center.pl',
989 ('lukasz.balcerzak@python-center.pl',
989 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
990 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
991 ])
992 ])
992 def test_author_email(self, author_email, commit_id):
993 def test_author_email(self, author_email, commit_id):
993 commit = self.repo.get_commit(commit_id)
994 commit = self.repo.get_commit(commit_id)
994 assert author_email == commit.author_email
995 assert author_email == commit.author_email
995
996
996 @pytest.mark.parametrize("author, commit_id", [
997 @pytest.mark.parametrize("author, commit_id", [
997 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
998 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
999 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1000 ])
1001 ])
1001 def test_author_username(self, author, commit_id):
1002 def test_author_username(self, author, commit_id):
1002 commit = self.repo.get_commit(commit_id)
1003 commit = self.repo.get_commit(commit_id)
1003 assert author == commit.author_name
1004 assert author == commit.author_name
1004
1005
1005
1006
1007 class TestLargeFileRepo(object):
1008
1009 def test_large_file(self, backend_git):
1010 conf = make_db_config()
1011 repo = backend_git.create_test_repo('largefiles', conf)
1012
1013 tip = repo.scm_instance().get_commit()
1014
1015 # extract stored LF node into the origin cache
1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1017
1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1019 oid_path = os.path.join(lfs_store, oid)
1020 oid_destination = os.path.join(
1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1022 shutil.copy(oid_path, oid_destination)
1023
1024 node = tip.get_node('1MB.zip')
1025
1026 lf_node = node.get_largefile_node()
1027
1028 assert lf_node.is_largefile() is True
1029 assert lf_node.size == 1024000
1030 assert lf_node.name == '1MB.zip'
1031
1032
1006 class TestGitSpecificWithRepo(BackendTestMixin):
1033 class TestGitSpecificWithRepo(BackendTestMixin):
1007
1034
1008 @classmethod
1035 @classmethod
1009 def _get_commits(cls):
1036 def _get_commits(cls):
1010 return [
1037 return [
1011 {
1038 {
1012 'message': 'Initial',
1039 'message': 'Initial',
1013 'author': 'Joe Doe <joe.doe@example.com>',
1040 'author': 'Joe Doe <joe.doe@example.com>',
1014 'date': datetime.datetime(2010, 1, 1, 20),
1041 'date': datetime.datetime(2010, 1, 1, 20),
1015 'added': [
1042 'added': [
1016 FileNode('foobar/static/js/admin/base.js', content='base'),
1043 FileNode('foobar/static/js/admin/base.js', content='base'),
1017 FileNode(
1044 FileNode(
1018 'foobar/static/admin', content='admin',
1045 'foobar/static/admin', content='admin',
1019 mode=0120000), # this is a link
1046 mode=0120000), # this is a link
1020 FileNode('foo', content='foo'),
1047 FileNode('foo', content='foo'),
1021 ],
1048 ],
1022 },
1049 },
1023 {
1050 {
1024 'message': 'Second',
1051 'message': 'Second',
1025 'author': 'Joe Doe <joe.doe@example.com>',
1052 'author': 'Joe Doe <joe.doe@example.com>',
1026 'date': datetime.datetime(2010, 1, 1, 22),
1053 'date': datetime.datetime(2010, 1, 1, 22),
1027 'added': [
1054 'added': [
1028 FileNode('foo2', content='foo2'),
1055 FileNode('foo2', content='foo2'),
1029 ],
1056 ],
1030 },
1057 },
1031 ]
1058 ]
1032
1059
1033 def test_paths_slow_traversing(self):
1060 def test_paths_slow_traversing(self):
1034 commit = self.repo.get_commit()
1061 commit = self.repo.get_commit()
1035 assert commit.get_node('foobar').get_node('static').get_node('js')\
1062 assert commit.get_node('foobar').get_node('static').get_node('js')\
1036 .get_node('admin').get_node('base.js').content == 'base'
1063 .get_node('admin').get_node('base.js').content == 'base'
1037
1064
1038 def test_paths_fast_traversing(self):
1065 def test_paths_fast_traversing(self):
1039 commit = self.repo.get_commit()
1066 commit = self.repo.get_commit()
1040 assert (
1067 assert (
1041 commit.get_node('foobar/static/js/admin/base.js').content ==
1068 commit.get_node('foobar/static/js/admin/base.js').content ==
1042 'base')
1069 'base')
1043
1070
1044 def test_get_diff_runs_git_command_with_hashes(self):
1071 def test_get_diff_runs_git_command_with_hashes(self):
1045 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1072 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1046 self.repo.get_diff(self.repo[0], self.repo[1])
1073 self.repo.get_diff(self.repo[0], self.repo[1])
1047 self.repo.run_git_command.assert_called_once_with(
1074 self.repo.run_git_command.assert_called_once_with(
1048 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1075 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1049 '--abbrev=40', self.repo._get_commit_id(0),
1076 '--abbrev=40', self.repo._get_commit_id(0),
1050 self.repo._get_commit_id(1)])
1077 self.repo._get_commit_id(1)])
1051
1078
1052 def test_get_diff_runs_git_command_with_str_hashes(self):
1079 def test_get_diff_runs_git_command_with_str_hashes(self):
1053 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1080 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1054 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1081 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1055 self.repo.run_git_command.assert_called_once_with(
1082 self.repo.run_git_command.assert_called_once_with(
1056 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1083 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1057 '--abbrev=40', self.repo._get_commit_id(1)])
1084 '--abbrev=40', self.repo._get_commit_id(1)])
1058
1085
1059 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1086 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1060 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1087 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1061 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1088 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1062 self.repo.run_git_command.assert_called_once_with(
1089 self.repo.run_git_command.assert_called_once_with(
1063 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1090 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1064 '--abbrev=40', self.repo._get_commit_id(0),
1091 '--abbrev=40', self.repo._get_commit_id(0),
1065 self.repo._get_commit_id(1), '--', 'foo'])
1092 self.repo._get_commit_id(1), '--', 'foo'])
1066
1093
1067
1094
1068 class TestGitRegression(BackendTestMixin):
1095 class TestGitRegression(BackendTestMixin):
1069
1096
1070 @classmethod
1097 @classmethod
1071 def _get_commits(cls):
1098 def _get_commits(cls):
1072 return [
1099 return [
1073 {
1100 {
1074 'message': 'Initial',
1101 'message': 'Initial',
1075 'author': 'Joe Doe <joe.doe@example.com>',
1102 'author': 'Joe Doe <joe.doe@example.com>',
1076 'date': datetime.datetime(2010, 1, 1, 20),
1103 'date': datetime.datetime(2010, 1, 1, 20),
1077 'added': [
1104 'added': [
1078 FileNode('bot/__init__.py', content='base'),
1105 FileNode('bot/__init__.py', content='base'),
1079 FileNode('bot/templates/404.html', content='base'),
1106 FileNode('bot/templates/404.html', content='base'),
1080 FileNode('bot/templates/500.html', content='base'),
1107 FileNode('bot/templates/500.html', content='base'),
1081 ],
1108 ],
1082 },
1109 },
1083 {
1110 {
1084 'message': 'Second',
1111 'message': 'Second',
1085 'author': 'Joe Doe <joe.doe@example.com>',
1112 'author': 'Joe Doe <joe.doe@example.com>',
1086 'date': datetime.datetime(2010, 1, 1, 22),
1113 'date': datetime.datetime(2010, 1, 1, 22),
1087 'added': [
1114 'added': [
1088 FileNode('bot/build/migrations/1.py', content='foo2'),
1115 FileNode('bot/build/migrations/1.py', content='foo2'),
1089 FileNode('bot/build/migrations/2.py', content='foo2'),
1116 FileNode('bot/build/migrations/2.py', content='foo2'),
1090 FileNode(
1117 FileNode(
1091 'bot/build/static/templates/f.html', content='foo2'),
1118 'bot/build/static/templates/f.html', content='foo2'),
1092 FileNode(
1119 FileNode(
1093 'bot/build/static/templates/f1.html', content='foo2'),
1120 'bot/build/static/templates/f1.html', content='foo2'),
1094 FileNode('bot/build/templates/err.html', content='foo2'),
1121 FileNode('bot/build/templates/err.html', content='foo2'),
1095 FileNode('bot/build/templates/err2.html', content='foo2'),
1122 FileNode('bot/build/templates/err2.html', content='foo2'),
1096 ],
1123 ],
1097 },
1124 },
1098 ]
1125 ]
1099
1126
1100 @pytest.mark.parametrize("path, expected_paths", [
1127 @pytest.mark.parametrize("path, expected_paths", [
1101 ('bot', [
1128 ('bot', [
1102 'bot/build',
1129 'bot/build',
1103 'bot/templates',
1130 'bot/templates',
1104 'bot/__init__.py']),
1131 'bot/__init__.py']),
1105 ('bot/build', [
1132 ('bot/build', [
1106 'bot/build/migrations',
1133 'bot/build/migrations',
1107 'bot/build/static',
1134 'bot/build/static',
1108 'bot/build/templates']),
1135 'bot/build/templates']),
1109 ('bot/build/static', [
1136 ('bot/build/static', [
1110 'bot/build/static/templates']),
1137 'bot/build/static/templates']),
1111 ('bot/build/static/templates', [
1138 ('bot/build/static/templates', [
1112 'bot/build/static/templates/f.html',
1139 'bot/build/static/templates/f.html',
1113 'bot/build/static/templates/f1.html']),
1140 'bot/build/static/templates/f1.html']),
1114 ('bot/build/templates', [
1141 ('bot/build/templates', [
1115 'bot/build/templates/err.html',
1142 'bot/build/templates/err.html',
1116 'bot/build/templates/err2.html']),
1143 'bot/build/templates/err2.html']),
1117 ('bot/templates/', [
1144 ('bot/templates/', [
1118 'bot/templates/404.html',
1145 'bot/templates/404.html',
1119 'bot/templates/500.html']),
1146 'bot/templates/500.html']),
1120 ])
1147 ])
1121 def test_similar_paths(self, path, expected_paths):
1148 def test_similar_paths(self, path, expected_paths):
1122 commit = self.repo.get_commit()
1149 commit = self.repo.get_commit()
1123 paths = [n.path for n in commit.get_nodes(path)]
1150 paths = [n.path for n in commit.get_nodes(path)]
1124 assert paths == expected_paths
1151 assert paths == expected_paths
1125
1152
1126
1153
1127 class TestDiscoverGitVersion:
1154 class TestDiscoverGitVersion:
1128
1155
1129 def test_returns_git_version(self, pylonsapp):
1156 def test_returns_git_version(self, pylonsapp):
1130 version = discover_git_version()
1157 version = discover_git_version()
1131 assert version
1158 assert version
1132
1159
1133 def test_returns_empty_string_without_vcsserver(self):
1160 def test_returns_empty_string_without_vcsserver(self):
1134 mock_connection = mock.Mock()
1161 mock_connection = mock.Mock()
1135 mock_connection.discover_git_version = mock.Mock(
1162 mock_connection.discover_git_version = mock.Mock(
1136 side_effect=Exception)
1163 side_effect=Exception)
1137 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1164 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1138 version = discover_git_version()
1165 version = discover_git_version()
1139 assert version == ''
1166 assert version == ''
1140
1167
1141
1168
1142 class TestGetSubmoduleUrl(object):
1169 class TestGetSubmoduleUrl(object):
1143 def test_submodules_file_found(self):
1170 def test_submodules_file_found(self):
1144 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1171 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1145 node = mock.Mock()
1172 node = mock.Mock()
1146 with mock.patch.object(
1173 with mock.patch.object(
1147 commit, 'get_node', return_value=node) as get_node_mock:
1174 commit, 'get_node', return_value=node) as get_node_mock:
1148 node.content = (
1175 node.content = (
1149 '[submodule "subrepo1"]\n'
1176 '[submodule "subrepo1"]\n'
1150 '\tpath = subrepo1\n'
1177 '\tpath = subrepo1\n'
1151 '\turl = https://code.rhodecode.com/dulwich\n'
1178 '\turl = https://code.rhodecode.com/dulwich\n'
1152 )
1179 )
1153 result = commit._get_submodule_url('subrepo1')
1180 result = commit._get_submodule_url('subrepo1')
1154 get_node_mock.assert_called_once_with('.gitmodules')
1181 get_node_mock.assert_called_once_with('.gitmodules')
1155 assert result == 'https://code.rhodecode.com/dulwich'
1182 assert result == 'https://code.rhodecode.com/dulwich'
1156
1183
1157 def test_complex_submodule_path(self):
1184 def test_complex_submodule_path(self):
1158 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1185 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1159 node = mock.Mock()
1186 node = mock.Mock()
1160 with mock.patch.object(
1187 with mock.patch.object(
1161 commit, 'get_node', return_value=node) as get_node_mock:
1188 commit, 'get_node', return_value=node) as get_node_mock:
1162 node.content = (
1189 node.content = (
1163 '[submodule "complex/subrepo/path"]\n'
1190 '[submodule "complex/subrepo/path"]\n'
1164 '\tpath = complex/subrepo/path\n'
1191 '\tpath = complex/subrepo/path\n'
1165 '\turl = https://code.rhodecode.com/dulwich\n'
1192 '\turl = https://code.rhodecode.com/dulwich\n'
1166 )
1193 )
1167 result = commit._get_submodule_url('complex/subrepo/path')
1194 result = commit._get_submodule_url('complex/subrepo/path')
1168 get_node_mock.assert_called_once_with('.gitmodules')
1195 get_node_mock.assert_called_once_with('.gitmodules')
1169 assert result == 'https://code.rhodecode.com/dulwich'
1196 assert result == 'https://code.rhodecode.com/dulwich'
1170
1197
1171 def test_submodules_file_not_found(self):
1198 def test_submodules_file_not_found(self):
1172 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1199 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1173 with mock.patch.object(
1200 with mock.patch.object(
1174 commit, 'get_node', side_effect=NodeDoesNotExistError):
1201 commit, 'get_node', side_effect=NodeDoesNotExistError):
1175 result = commit._get_submodule_url('complex/subrepo/path')
1202 result = commit._get_submodule_url('complex/subrepo/path')
1176 assert result is None
1203 assert result is None
1177
1204
1178 def test_path_not_found(self):
1205 def test_path_not_found(self):
1179 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1206 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1180 node = mock.Mock()
1207 node = mock.Mock()
1181 with mock.patch.object(
1208 with mock.patch.object(
1182 commit, 'get_node', return_value=node) as get_node_mock:
1209 commit, 'get_node', return_value=node) as get_node_mock:
1183 node.content = (
1210 node.content = (
1184 '[submodule "subrepo1"]\n'
1211 '[submodule "subrepo1"]\n'
1185 '\tpath = subrepo1\n'
1212 '\tpath = subrepo1\n'
1186 '\turl = https://code.rhodecode.com/dulwich\n'
1213 '\turl = https://code.rhodecode.com/dulwich\n'
1187 )
1214 )
1188 result = commit._get_submodule_url('subrepo2')
1215 result = commit._get_submodule_url('subrepo2')
1189 get_node_mock.assert_called_once_with('.gitmodules')
1216 get_node_mock.assert_called_once_with('.gitmodules')
1190 assert result is None
1217 assert result is None
1191
1218
1192 def test_returns_cached_values(self):
1219 def test_returns_cached_values(self):
1193 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1220 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1194 node = mock.Mock()
1221 node = mock.Mock()
1195 with mock.patch.object(
1222 with mock.patch.object(
1196 commit, 'get_node', return_value=node) as get_node_mock:
1223 commit, 'get_node', return_value=node) as get_node_mock:
1197 node.content = (
1224 node.content = (
1198 '[submodule "subrepo1"]\n'
1225 '[submodule "subrepo1"]\n'
1199 '\tpath = subrepo1\n'
1226 '\tpath = subrepo1\n'
1200 '\turl = https://code.rhodecode.com/dulwich\n'
1227 '\turl = https://code.rhodecode.com/dulwich\n'
1201 )
1228 )
1202 for _ in range(3):
1229 for _ in range(3):
1203 commit._get_submodule_url('subrepo1')
1230 commit._get_submodule_url('subrepo1')
1204 get_node_mock.assert_called_once_with('.gitmodules')
1231 get_node_mock.assert_called_once_with('.gitmodules')
1205
1232
1206 def test_get_node_returns_a_link(self):
1233 def test_get_node_returns_a_link(self):
1207 repository = mock.Mock()
1234 repository = mock.Mock()
1208 repository.alias = 'git'
1235 repository.alias = 'git'
1209 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1236 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1210 submodule_url = 'https://code.rhodecode.com/dulwich'
1237 submodule_url = 'https://code.rhodecode.com/dulwich'
1211 get_id_patch = mock.patch.object(
1238 get_id_patch = mock.patch.object(
1212 commit, '_get_id_for_path', return_value=(1, 'link'))
1239 commit, '_get_id_for_path', return_value=(1, 'link'))
1213 get_submodule_patch = mock.patch.object(
1240 get_submodule_patch = mock.patch.object(
1214 commit, '_get_submodule_url', return_value=submodule_url)
1241 commit, '_get_submodule_url', return_value=submodule_url)
1215
1242
1216 with get_id_patch, get_submodule_patch as submodule_mock:
1243 with get_id_patch, get_submodule_patch as submodule_mock:
1217 node = commit.get_node('/abcde')
1244 node = commit.get_node('/abcde')
1218
1245
1219 submodule_mock.assert_called_once_with('/abcde')
1246 submodule_mock.assert_called_once_with('/abcde')
1220 assert type(node) == SubModuleNode
1247 assert type(node) == SubModuleNode
1221 assert node.url == submodule_url
1248 assert node.url == submodule_url
1222
1249
1223 def test_get_nodes_returns_links(self):
1250 def test_get_nodes_returns_links(self):
1224 repository = mock.MagicMock()
1251 repository = mock.MagicMock()
1225 repository.alias = 'git'
1252 repository.alias = 'git'
1226 repository._remote.tree_items.return_value = [
1253 repository._remote.tree_items.return_value = [
1227 ('subrepo', 'stat', 1, 'link')
1254 ('subrepo', 'stat', 1, 'link')
1228 ]
1255 ]
1229 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1230 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 submodule_url = 'https://code.rhodecode.com/dulwich'
1231 get_id_patch = mock.patch.object(
1258 get_id_patch = mock.patch.object(
1232 commit, '_get_id_for_path', return_value=(1, 'tree'))
1259 commit, '_get_id_for_path', return_value=(1, 'tree'))
1233 get_submodule_patch = mock.patch.object(
1260 get_submodule_patch = mock.patch.object(
1234 commit, '_get_submodule_url', return_value=submodule_url)
1261 commit, '_get_submodule_url', return_value=submodule_url)
1235
1262
1236 with get_id_patch, get_submodule_patch as submodule_mock:
1263 with get_id_patch, get_submodule_patch as submodule_mock:
1237 nodes = commit.get_nodes('/abcde')
1264 nodes = commit.get_nodes('/abcde')
1238
1265
1239 submodule_mock.assert_called_once_with('/abcde/subrepo')
1266 submodule_mock.assert_called_once_with('/abcde/subrepo')
1240 assert len(nodes) == 1
1267 assert len(nodes) == 1
1241 assert type(nodes[0]) == SubModuleNode
1268 assert type(nodes[0]) == SubModuleNode
1242 assert nodes[0].url == submodule_url
1269 assert nodes[0].url == submodule_url
@@ -1,1172 +1,1180 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 import rhodecode.lib.vcs.conf.settings
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 CommitError, RepositoryError, VCSError, NodeDoesNotExistError,
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 CommitDoesNotExistError)
34 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
35 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE
36
35
37
36
38 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
39
38
40
39
41 def repo_path_generator():
40 def repo_path_generator():
42 """
41 """
43 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
44 """
43 """
45 i = 0
44 i = 0
46 while True:
45 while True:
47 i += 1
46 i += 1
48 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
49
48
50
49
51 REPO_PATH_GENERATOR = repo_path_generator()
50 REPO_PATH_GENERATOR = repo_path_generator()
52
51
53
52
54 @pytest.fixture(scope='class', autouse=True)
53 @pytest.fixture(scope='class', autouse=True)
55 def repo(request, pylonsapp):
54 def repo(request, pylonsapp):
56 repo = MercurialRepository(TEST_HG_REPO)
55 repo = MercurialRepository(TEST_HG_REPO)
57 if request.cls:
56 if request.cls:
58 request.cls.repo = repo
57 request.cls.repo = repo
59 return repo
58 return repo
60
59
61
60
62 class TestMercurialRepository:
61 class TestMercurialRepository:
63
62
64 # pylint: disable=protected-access
63 # pylint: disable=protected-access
65
64
66 def get_clone_repo(self):
65 def get_clone_repo(self):
67 """
66 """
68 Return a clone of the base repo.
67 Return a clone of the base repo.
69 """
68 """
70 clone_path = next(REPO_PATH_GENERATOR)
69 clone_path = next(REPO_PATH_GENERATOR)
71 repo_clone = MercurialRepository(
70 repo_clone = MercurialRepository(
72 clone_path, create=True, src_url=self.repo.path)
71 clone_path, create=True, src_url=self.repo.path)
73
72
74 return repo_clone
73 return repo_clone
75
74
76 def get_empty_repo(self):
75 def get_empty_repo(self):
77 """
76 """
78 Return an empty repo.
77 Return an empty repo.
79 """
78 """
80 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
81
80
82 def test_wrong_repo_path(self):
81 def test_wrong_repo_path(self):
83 wrong_repo_path = '/tmp/errorrepo'
82 wrong_repo_path = '/tmp/errorrepo'
84 with pytest.raises(RepositoryError):
83 with pytest.raises(RepositoryError):
85 MercurialRepository(wrong_repo_path)
84 MercurialRepository(wrong_repo_path)
86
85
87 def test_unicode_path_repo(self):
86 def test_unicode_path_repo(self):
88 with pytest.raises(VCSError):
87 with pytest.raises(VCSError):
89 MercurialRepository(u'iShouldFail')
88 MercurialRepository(u'iShouldFail')
90
89
91 def test_unicode_commit_id(self):
90 def test_unicode_commit_id(self):
92 with pytest.raises(CommitDoesNotExistError):
91 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit(u'unicode-commit-id')
92 self.repo.get_commit(u'unicode-commit-id')
94 with pytest.raises(CommitDoesNotExistError):
93 with pytest.raises(CommitDoesNotExistError):
95 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
96
95
97 def test_unicode_bookmark(self):
96 def test_unicode_bookmark(self):
98 self.repo.bookmark(u'unicode-bookmark')
97 self.repo.bookmark(u'unicode-bookmark')
99 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
100
99
101 def test_unicode_branch(self):
100 def test_unicode_branch(self):
102 with pytest.raises(KeyError):
101 with pytest.raises(KeyError):
103 self.repo.branches[u'unicode-branch']
102 self.repo.branches[u'unicode-branch']
104 with pytest.raises(KeyError):
103 with pytest.raises(KeyError):
105 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
106
105
107 def test_repo_clone(self):
106 def test_repo_clone(self):
108 if os.path.exists(TEST_HG_REPO_CLONE):
107 if os.path.exists(TEST_HG_REPO_CLONE):
109 self.fail(
108 self.fail(
110 'Cannot test mercurial clone repo as location %s already '
109 'Cannot test mercurial clone repo as location %s already '
111 'exists. You should manually remove it first.'
110 'exists. You should manually remove it first.'
112 % TEST_HG_REPO_CLONE)
111 % TEST_HG_REPO_CLONE)
113
112
114 repo = MercurialRepository(TEST_HG_REPO)
113 repo = MercurialRepository(TEST_HG_REPO)
115 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
116 src_url=TEST_HG_REPO)
115 src_url=TEST_HG_REPO)
117 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
118 # Checking hashes of commits should be enough
117 # Checking hashes of commits should be enough
119 for commit in repo.get_commits():
118 for commit in repo.get_commits():
120 raw_id = commit.raw_id
119 raw_id = commit.raw_id
121 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120 assert raw_id == repo_clone.get_commit(raw_id).raw_id
122
121
123 def test_repo_clone_with_update(self):
122 def test_repo_clone_with_update(self):
124 repo = MercurialRepository(TEST_HG_REPO)
123 repo = MercurialRepository(TEST_HG_REPO)
125 repo_clone = MercurialRepository(
124 repo_clone = MercurialRepository(
126 TEST_HG_REPO_CLONE + '_w_update',
125 TEST_HG_REPO_CLONE + '_w_update',
127 src_url=TEST_HG_REPO, update_after_clone=True)
126 src_url=TEST_HG_REPO, update_after_clone=True)
128 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
129
128
130 # check if current workdir was updated
129 # check if current workdir was updated
131 assert os.path.isfile(
130 assert os.path.isfile(
132 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
133
132
134 def test_repo_clone_without_update(self):
133 def test_repo_clone_without_update(self):
135 repo = MercurialRepository(TEST_HG_REPO)
134 repo = MercurialRepository(TEST_HG_REPO)
136 repo_clone = MercurialRepository(
135 repo_clone = MercurialRepository(
137 TEST_HG_REPO_CLONE + '_wo_update',
136 TEST_HG_REPO_CLONE + '_wo_update',
138 src_url=TEST_HG_REPO, update_after_clone=False)
137 src_url=TEST_HG_REPO, update_after_clone=False)
139 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
140 assert not os.path.isfile(
139 assert not os.path.isfile(
141 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
142
141
143 def test_commit_ids(self):
142 def test_commit_ids(self):
144 # there are 21 commits at bitbucket now
143 # there are 21 commits at bitbucket now
145 # so we can assume they would be available from now on
144 # so we can assume they would be available from now on
146 subset = set([
145 subset = set([
147 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
148 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
149 '6cba7170863a2411822803fa77a0a264f1310b35',
148 '6cba7170863a2411822803fa77a0a264f1310b35',
150 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 '56349e29c2af3ac913b28bde9a2c6154436e615b',
151 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 '2dda4e345facb0ccff1a191052dd1606dba6781d',
152 '6fff84722075f1607a30f436523403845f84cd9e',
151 '6fff84722075f1607a30f436523403845f84cd9e',
153 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
154 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
155 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
156 'be90031137367893f1c406e0a8683010fd115b79',
155 'be90031137367893f1c406e0a8683010fd115b79',
157 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 'db8e58be770518cbb2b1cdfa69146e47cd481481',
158 '84478366594b424af694a6c784cb991a16b87c21',
157 '84478366594b424af694a6c784cb991a16b87c21',
159 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 '17f8e105dddb9f339600389c6dc7175d395a535c',
160 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
161 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
162 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 '786facd2c61deb9cf91e9534735124fb8fc11842',
163 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
164 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 'aa6a0de05b7612707db567078e130a6cd114a9a7',
165 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 'eada5a770da98ab0dd7325e29d00e0714f228d09'
166 ])
165 ])
167 assert subset.issubset(set(self.repo.commit_ids))
166 assert subset.issubset(set(self.repo.commit_ids))
168
167
169 # check if we have the proper order of commits
168 # check if we have the proper order of commits
170 org = [
169 org = [
171 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
172 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
173 '6cba7170863a2411822803fa77a0a264f1310b35',
172 '6cba7170863a2411822803fa77a0a264f1310b35',
174 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 '56349e29c2af3ac913b28bde9a2c6154436e615b',
175 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 '2dda4e345facb0ccff1a191052dd1606dba6781d',
176 '6fff84722075f1607a30f436523403845f84cd9e',
175 '6fff84722075f1607a30f436523403845f84cd9e',
177 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
178 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
179 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
180 'be90031137367893f1c406e0a8683010fd115b79',
179 'be90031137367893f1c406e0a8683010fd115b79',
181 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 'db8e58be770518cbb2b1cdfa69146e47cd481481',
182 '84478366594b424af694a6c784cb991a16b87c21',
181 '84478366594b424af694a6c784cb991a16b87c21',
183 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 '17f8e105dddb9f339600389c6dc7175d395a535c',
184 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
185 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
186 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 '786facd2c61deb9cf91e9534735124fb8fc11842',
187 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
188 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 'aa6a0de05b7612707db567078e130a6cd114a9a7',
189 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 'eada5a770da98ab0dd7325e29d00e0714f228d09',
190 '2c1885c735575ca478bf9e17b0029dca68824458',
189 '2c1885c735575ca478bf9e17b0029dca68824458',
191 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
192 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
193 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 '4fb8326d78e5120da2c7468dcf7098997be385da',
194 '62b4a097164940bd66030c4db51687f3ec035eed',
193 '62b4a097164940bd66030c4db51687f3ec035eed',
195 '536c1a19428381cfea92ac44985304f6a8049569',
194 '536c1a19428381cfea92ac44985304f6a8049569',
196 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
197 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
198 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 'f8940bcb890a98c4702319fbe36db75ea309b475',
199 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
200 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
201 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 'ee87846a61c12153b51543bf860e1026c6d3dcba',
202 ]
201 ]
203 assert org == self.repo.commit_ids[:31]
202 assert org == self.repo.commit_ids[:31]
204
203
205 def test_iter_slice(self):
204 def test_iter_slice(self):
206 sliced = list(self.repo[:10])
205 sliced = list(self.repo[:10])
207 itered = list(self.repo)[:10]
206 itered = list(self.repo)[:10]
208 assert sliced == itered
207 assert sliced == itered
209
208
210 def test_slicing(self):
209 def test_slicing(self):
211 # 4 1 5 10 95
210 # 4 1 5 10 95
212 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
213 (10, 20, 10), (5, 100, 95)]:
212 (10, 20, 10), (5, 100, 95)]:
214 indexes = list(self.repo[sfrom:sto])
213 indexes = list(self.repo[sfrom:sto])
215 assert len(indexes) == size
214 assert len(indexes) == size
216 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
217 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
218
217
219 def test_branches(self):
218 def test_branches(self):
220 # TODO: Need more tests here
219 # TODO: Need more tests here
221
220
222 # active branches
221 # active branches
223 assert 'default' in self.repo.branches
222 assert 'default' in self.repo.branches
224 assert 'stable' in self.repo.branches
223 assert 'stable' in self.repo.branches
225
224
226 # closed
225 # closed
227 assert 'git' in self.repo._get_branches(closed=True)
226 assert 'git' in self.repo._get_branches(closed=True)
228 assert 'web' in self.repo._get_branches(closed=True)
227 assert 'web' in self.repo._get_branches(closed=True)
229
228
230 for name, id in self.repo.branches.items():
229 for name, id in self.repo.branches.items():
231 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230 assert isinstance(self.repo.get_commit(id), MercurialCommit)
232
231
233 def test_tip_in_tags(self):
232 def test_tip_in_tags(self):
234 # tip is always a tag
233 # tip is always a tag
235 assert 'tip' in self.repo.tags
234 assert 'tip' in self.repo.tags
236
235
237 def test_tip_commit_in_tags(self):
236 def test_tip_commit_in_tags(self):
238 tip = self.repo.get_commit()
237 tip = self.repo.get_commit()
239 assert self.repo.tags['tip'] == tip.raw_id
238 assert self.repo.tags['tip'] == tip.raw_id
240
239
241 def test_initial_commit(self):
240 def test_initial_commit(self):
242 init_commit = self.repo.get_commit(commit_idx=0)
241 init_commit = self.repo.get_commit(commit_idx=0)
243 init_author = init_commit.author
242 init_author = init_commit.author
244
243
245 assert init_commit.message == 'initial import'
244 assert init_commit.message == 'initial import'
246 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
247 assert init_author == init_commit.committer
246 assert init_author == init_commit.committer
248 assert sorted(init_commit._file_paths) == sorted([
247 assert sorted(init_commit._file_paths) == sorted([
249 'vcs/__init__.py',
248 'vcs/__init__.py',
250 'vcs/backends/BaseRepository.py',
249 'vcs/backends/BaseRepository.py',
251 'vcs/backends/__init__.py',
250 'vcs/backends/__init__.py',
252 ])
251 ])
253 assert sorted(init_commit._dir_paths) == sorted(
252 assert sorted(init_commit._dir_paths) == sorted(
254 ['', 'vcs', 'vcs/backends'])
253 ['', 'vcs', 'vcs/backends'])
255
254
256 assert init_commit._dir_paths + init_commit._file_paths == \
255 assert init_commit._dir_paths + init_commit._file_paths == \
257 init_commit._paths
256 init_commit._paths
258
257
259 with pytest.raises(NodeDoesNotExistError):
258 with pytest.raises(NodeDoesNotExistError):
260 init_commit.get_node(path='foobar')
259 init_commit.get_node(path='foobar')
261
260
262 node = init_commit.get_node('vcs/')
261 node = init_commit.get_node('vcs/')
263 assert hasattr(node, 'kind')
262 assert hasattr(node, 'kind')
264 assert node.kind == NodeKind.DIR
263 assert node.kind == NodeKind.DIR
265
264
266 node = init_commit.get_node('vcs')
265 node = init_commit.get_node('vcs')
267 assert hasattr(node, 'kind')
266 assert hasattr(node, 'kind')
268 assert node.kind == NodeKind.DIR
267 assert node.kind == NodeKind.DIR
269
268
270 node = init_commit.get_node('vcs/__init__.py')
269 node = init_commit.get_node('vcs/__init__.py')
271 assert hasattr(node, 'kind')
270 assert hasattr(node, 'kind')
272 assert node.kind == NodeKind.FILE
271 assert node.kind == NodeKind.FILE
273
272
274 def test_not_existing_commit(self):
273 def test_not_existing_commit(self):
275 # rawid
274 # rawid
276 with pytest.raises(RepositoryError):
275 with pytest.raises(RepositoryError):
277 self.repo.get_commit('abcd' * 10)
276 self.repo.get_commit('abcd' * 10)
278 # shortid
277 # shortid
279 with pytest.raises(RepositoryError):
278 with pytest.raises(RepositoryError):
280 self.repo.get_commit('erro' * 4)
279 self.repo.get_commit('erro' * 4)
281 # numeric
280 # numeric
282 with pytest.raises(RepositoryError):
281 with pytest.raises(RepositoryError):
283 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282 self.repo.get_commit(commit_idx=self.repo.count() + 1)
284
283
285 # Small chance we ever get to this one
284 # Small chance we ever get to this one
286 idx = pow(2, 30)
285 idx = pow(2, 30)
287 with pytest.raises(RepositoryError):
286 with pytest.raises(RepositoryError):
288 self.repo.get_commit(commit_idx=idx)
287 self.repo.get_commit(commit_idx=idx)
289
288
290 def test_commit10(self):
289 def test_commit10(self):
291 commit10 = self.repo.get_commit(commit_idx=10)
290 commit10 = self.repo.get_commit(commit_idx=10)
292 README = """===
291 README = """===
293 VCS
292 VCS
294 ===
293 ===
295
294
296 Various Version Control System management abstraction layer for Python.
295 Various Version Control System management abstraction layer for Python.
297
296
298 Introduction
297 Introduction
299 ------------
298 ------------
300
299
301 TODO: To be written...
300 TODO: To be written...
302
301
303 """
302 """
304 node = commit10.get_node('README.rst')
303 node = commit10.get_node('README.rst')
305 assert node.kind == NodeKind.FILE
304 assert node.kind == NodeKind.FILE
306 assert node.content == README
305 assert node.content == README
307
306
308 def test_local_clone(self):
307 def test_local_clone(self):
309 clone_path = next(REPO_PATH_GENERATOR)
308 clone_path = next(REPO_PATH_GENERATOR)
310 self.repo._local_clone(clone_path)
309 self.repo._local_clone(clone_path)
311 repo_clone = MercurialRepository(clone_path)
310 repo_clone = MercurialRepository(clone_path)
312
311
313 assert self.repo.commit_ids == repo_clone.commit_ids
312 assert self.repo.commit_ids == repo_clone.commit_ids
314
313
315 def test_local_clone_fails_if_target_exists(self):
314 def test_local_clone_fails_if_target_exists(self):
316 with pytest.raises(RepositoryError):
315 with pytest.raises(RepositoryError):
317 self.repo._local_clone(self.repo.path)
316 self.repo._local_clone(self.repo.path)
318
317
319 def test_update(self):
318 def test_update(self):
320 repo_clone = self.get_clone_repo()
319 repo_clone = self.get_clone_repo()
321 branches = repo_clone.branches
320 branches = repo_clone.branches
322
321
323 repo_clone._update('default')
322 repo_clone._update('default')
324 assert branches['default'] == repo_clone._identify()
323 assert branches['default'] == repo_clone._identify()
325 repo_clone._update('stable')
324 repo_clone._update('stable')
326 assert branches['stable'] == repo_clone._identify()
325 assert branches['stable'] == repo_clone._identify()
327
326
328 def test_local_pull_branch(self):
327 def test_local_pull_branch(self):
329 target_repo = self.get_empty_repo()
328 target_repo = self.get_empty_repo()
330 source_repo = self.get_clone_repo()
329 source_repo = self.get_clone_repo()
331
330
332 default = Reference(
331 default = Reference(
333 'branch', 'default', source_repo.branches['default'])
332 'branch', 'default', source_repo.branches['default'])
334 target_repo._local_pull(source_repo.path, default)
333 target_repo._local_pull(source_repo.path, default)
335 target_repo = MercurialRepository(target_repo.path)
334 target_repo = MercurialRepository(target_repo.path)
336 assert (target_repo.branches['default'] ==
335 assert (target_repo.branches['default'] ==
337 source_repo.branches['default'])
336 source_repo.branches['default'])
338
337
339 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 stable = Reference('branch', 'stable', source_repo.branches['stable'])
340 target_repo._local_pull(source_repo.path, stable)
339 target_repo._local_pull(source_repo.path, stable)
341 target_repo = MercurialRepository(target_repo.path)
340 target_repo = MercurialRepository(target_repo.path)
342 assert target_repo.branches['stable'] == source_repo.branches['stable']
341 assert target_repo.branches['stable'] == source_repo.branches['stable']
343
342
344 def test_local_pull_bookmark(self):
343 def test_local_pull_bookmark(self):
345 target_repo = self.get_empty_repo()
344 target_repo = self.get_empty_repo()
346 source_repo = self.get_clone_repo()
345 source_repo = self.get_clone_repo()
347
346
348 commits = list(source_repo.get_commits(branch_name='default'))
347 commits = list(source_repo.get_commits(branch_name='default'))
349 foo1_id = commits[-5].raw_id
348 foo1_id = commits[-5].raw_id
350 foo1 = Reference('book', 'foo1', foo1_id)
349 foo1 = Reference('book', 'foo1', foo1_id)
351 source_repo._update(foo1_id)
350 source_repo._update(foo1_id)
352 source_repo.bookmark('foo1')
351 source_repo.bookmark('foo1')
353
352
354 foo2_id = commits[-3].raw_id
353 foo2_id = commits[-3].raw_id
355 foo2 = Reference('book', 'foo2', foo2_id)
354 foo2 = Reference('book', 'foo2', foo2_id)
356 source_repo._update(foo2_id)
355 source_repo._update(foo2_id)
357 source_repo.bookmark('foo2')
356 source_repo.bookmark('foo2')
358
357
359 target_repo._local_pull(source_repo.path, foo1)
358 target_repo._local_pull(source_repo.path, foo1)
360 target_repo = MercurialRepository(target_repo.path)
359 target_repo = MercurialRepository(target_repo.path)
361 assert target_repo.branches['default'] == commits[-5].raw_id
360 assert target_repo.branches['default'] == commits[-5].raw_id
362
361
363 target_repo._local_pull(source_repo.path, foo2)
362 target_repo._local_pull(source_repo.path, foo2)
364 target_repo = MercurialRepository(target_repo.path)
363 target_repo = MercurialRepository(target_repo.path)
365 assert target_repo.branches['default'] == commits[-3].raw_id
364 assert target_repo.branches['default'] == commits[-3].raw_id
366
365
367 def test_local_pull_commit(self):
366 def test_local_pull_commit(self):
368 target_repo = self.get_empty_repo()
367 target_repo = self.get_empty_repo()
369 source_repo = self.get_clone_repo()
368 source_repo = self.get_clone_repo()
370
369
371 commits = list(source_repo.get_commits(branch_name='default'))
370 commits = list(source_repo.get_commits(branch_name='default'))
372 commit_id = commits[-5].raw_id
371 commit_id = commits[-5].raw_id
373 commit = Reference('rev', commit_id, commit_id)
372 commit = Reference('rev', commit_id, commit_id)
374 target_repo._local_pull(source_repo.path, commit)
373 target_repo._local_pull(source_repo.path, commit)
375 target_repo = MercurialRepository(target_repo.path)
374 target_repo = MercurialRepository(target_repo.path)
376 assert target_repo.branches['default'] == commit_id
375 assert target_repo.branches['default'] == commit_id
377
376
378 commit_id = commits[-3].raw_id
377 commit_id = commits[-3].raw_id
379 commit = Reference('rev', commit_id, commit_id)
378 commit = Reference('rev', commit_id, commit_id)
380 target_repo._local_pull(source_repo.path, commit)
379 target_repo._local_pull(source_repo.path, commit)
381 target_repo = MercurialRepository(target_repo.path)
380 target_repo = MercurialRepository(target_repo.path)
382 assert target_repo.branches['default'] == commit_id
381 assert target_repo.branches['default'] == commit_id
383
382
384 def test_local_pull_from_same_repo(self):
383 def test_local_pull_from_same_repo(self):
385 reference = Reference('branch', 'default', None)
384 reference = Reference('branch', 'default', None)
386 with pytest.raises(ValueError):
385 with pytest.raises(ValueError):
387 self.repo._local_pull(self.repo.path, reference)
386 self.repo._local_pull(self.repo.path, reference)
388
387
389 def test_validate_pull_reference_raises_on_missing_reference(
388 def test_validate_pull_reference_raises_on_missing_reference(
390 self, vcsbackend_hg):
389 self, vcsbackend_hg):
391 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
392 reference = Reference(
391 reference = Reference(
393 'book', 'invalid_reference', 'a' * 40)
392 'book', 'invalid_reference', 'a' * 40)
394
393
395 with pytest.raises(CommitDoesNotExistError):
394 with pytest.raises(CommitDoesNotExistError):
396 target_repo._validate_pull_reference(reference)
395 target_repo._validate_pull_reference(reference)
397
396
398 def test_heads(self):
397 def test_heads(self):
399 assert set(self.repo._heads()) == set(self.repo.branches.values())
398 assert set(self.repo._heads()) == set(self.repo.branches.values())
400
399
401 def test_ancestor(self):
400 def test_ancestor(self):
402 commits = [
401 commits = [
403 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 c.raw_id for c in self.repo.get_commits(branch_name='default')]
404 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
405 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
406
405
407 def test_local_push(self):
406 def test_local_push(self):
408 target_repo = self.get_empty_repo()
407 target_repo = self.get_empty_repo()
409
408
410 revisions = list(self.repo.get_commits(branch_name='default'))
409 revisions = list(self.repo.get_commits(branch_name='default'))
411 revision = revisions[-5].raw_id
410 revision = revisions[-5].raw_id
412 self.repo._local_push(revision, target_repo.path)
411 self.repo._local_push(revision, target_repo.path)
413
412
414 target_repo = MercurialRepository(target_repo.path)
413 target_repo = MercurialRepository(target_repo.path)
415
414
416 assert target_repo.branches['default'] == revision
415 assert target_repo.branches['default'] == revision
417
416
418 def test_hooks_can_be_enabled_for_local_push(self):
417 def test_hooks_can_be_enabled_for_local_push(self):
419 revision = 'deadbeef'
418 revision = 'deadbeef'
420 repo_path = 'test_group/test_repo'
419 repo_path = 'test_group/test_repo'
421 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 with mock.patch.object(self.repo, '_remote') as remote_mock:
422 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 self.repo._local_push(revision, repo_path, enable_hooks=True)
423 remote_mock.push.assert_called_once_with(
422 remote_mock.push.assert_called_once_with(
424 [revision], repo_path, hooks=True, push_branches=False)
423 [revision], repo_path, hooks=True, push_branches=False)
425
424
426 def test_local_merge(self, vcsbackend_hg):
425 def test_local_merge(self, vcsbackend_hg):
427 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
428 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 source_repo = vcsbackend_hg.clone_repo(target_repo)
429 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
430 target_repo = MercurialRepository(target_repo.path)
429 target_repo = MercurialRepository(target_repo.path)
431 target_rev = target_repo.branches['default']
430 target_rev = target_repo.branches['default']
432 target_ref = Reference(
431 target_ref = Reference(
433 type='branch', name='default', commit_id=target_rev)
432 type='branch', name='default', commit_id=target_rev)
434 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
435 source_repo = MercurialRepository(source_repo.path)
434 source_repo = MercurialRepository(source_repo.path)
436 source_rev = source_repo.branches['default']
435 source_rev = source_repo.branches['default']
437 source_ref = Reference(
436 source_ref = Reference(
438 type='branch', name='default', commit_id=source_rev)
437 type='branch', name='default', commit_id=source_rev)
439
438
440 target_repo._local_pull(source_repo.path, source_ref)
439 target_repo._local_pull(source_repo.path, source_ref)
441
440
442 merge_message = 'Merge message\n\nDescription:...'
441 merge_message = 'Merge message\n\nDescription:...'
443 user_name = 'Albert Einstein'
442 user_name = 'Albert Einstein'
444 user_email = 'albert@einstein.com'
443 user_email = 'albert@einstein.com'
445 merge_commit_id, needs_push = target_repo._local_merge(
444 merge_commit_id, needs_push = target_repo._local_merge(
446 target_ref, merge_message, user_name, user_email, source_ref)
445 target_ref, merge_message, user_name, user_email, source_ref)
447 assert needs_push
446 assert needs_push
448
447
449 target_repo = MercurialRepository(target_repo.path)
448 target_repo = MercurialRepository(target_repo.path)
450 assert target_repo.commit_ids[-3] == target_rev
449 assert target_repo.commit_ids[-3] == target_rev
451 assert target_repo.commit_ids[-2] == source_rev
450 assert target_repo.commit_ids[-2] == source_rev
452 last_commit = target_repo.get_commit(merge_commit_id)
451 last_commit = target_repo.get_commit(merge_commit_id)
453 assert last_commit.message.strip() == merge_message
452 assert last_commit.message.strip() == merge_message
454 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453 assert last_commit.author == '%s <%s>' % (user_name, user_email)
455
454
456 assert not os.path.exists(
455 assert not os.path.exists(
457 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
458
457
459 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
460 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
461 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 source_repo = vcsbackend_hg.clone_repo(target_repo)
462 target_rev = target_repo.branches['default']
461 target_rev = target_repo.branches['default']
463 target_ref = Reference(
462 target_ref = Reference(
464 type='branch', name='default', commit_id=target_rev)
463 type='branch', name='default', commit_id=target_rev)
465 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
466 source_repo = MercurialRepository(source_repo.path)
465 source_repo = MercurialRepository(source_repo.path)
467 source_rev = source_repo.branches['default']
466 source_rev = source_repo.branches['default']
468 source_ref = Reference(
467 source_ref = Reference(
469 type='branch', name='default', commit_id=source_rev)
468 type='branch', name='default', commit_id=source_rev)
470
469
471 target_repo._local_pull(source_repo.path, source_ref)
470 target_repo._local_pull(source_repo.path, source_ref)
472
471
473 merge_message = 'Merge message\n\nDescription:...'
472 merge_message = 'Merge message\n\nDescription:...'
474 user_name = 'Albert Einstein'
473 user_name = 'Albert Einstein'
475 user_email = 'albert@einstein.com'
474 user_email = 'albert@einstein.com'
476 merge_commit_id, needs_push = target_repo._local_merge(
475 merge_commit_id, needs_push = target_repo._local_merge(
477 target_ref, merge_message, user_name, user_email, source_ref)
476 target_ref, merge_message, user_name, user_email, source_ref)
478 assert merge_commit_id == source_rev
477 assert merge_commit_id == source_rev
479 assert needs_push
478 assert needs_push
480
479
481 target_repo = MercurialRepository(target_repo.path)
480 target_repo = MercurialRepository(target_repo.path)
482 assert target_repo.commit_ids[-2] == target_rev
481 assert target_repo.commit_ids[-2] == target_rev
483 assert target_repo.commit_ids[-1] == source_rev
482 assert target_repo.commit_ids[-1] == source_rev
484
483
485 assert not os.path.exists(
484 assert not os.path.exists(
486 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
487
486
488 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
489 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
490 target_rev = target_repo.branches['default']
489 target_rev = target_repo.branches['default']
491 target_ref = Reference(
490 target_ref = Reference(
492 type='branch', name='default', commit_id=target_rev)
491 type='branch', name='default', commit_id=target_rev)
493
492
494 merge_message = 'Merge message\n\nDescription:...'
493 merge_message = 'Merge message\n\nDescription:...'
495 user_name = 'Albert Einstein'
494 user_name = 'Albert Einstein'
496 user_email = 'albert@einstein.com'
495 user_email = 'albert@einstein.com'
497 merge_commit_id, needs_push = target_repo._local_merge(
496 merge_commit_id, needs_push = target_repo._local_merge(
498 target_ref, merge_message, user_name, user_email, target_ref)
497 target_ref, merge_message, user_name, user_email, target_ref)
499 assert merge_commit_id == target_rev
498 assert merge_commit_id == target_rev
500 assert not needs_push
499 assert not needs_push
501
500
502 target_repo = MercurialRepository(target_repo.path)
501 target_repo = MercurialRepository(target_repo.path)
503 assert target_repo.commit_ids[-1] == target_rev
502 assert target_repo.commit_ids[-1] == target_rev
504
503
505 assert not os.path.exists(
504 assert not os.path.exists(
506 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
507
506
508 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
509 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
510 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 source_repo = vcsbackend_hg.clone_repo(target_repo)
511 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
512 target_repo = MercurialRepository(target_repo.path)
511 target_repo = MercurialRepository(target_repo.path)
513 target_rev = target_repo.branches['default']
512 target_rev = target_repo.branches['default']
514 target_ref = Reference(
513 target_ref = Reference(
515 type='branch', name='default', commit_id=target_rev)
514 type='branch', name='default', commit_id=target_rev)
516 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
517 source_repo = MercurialRepository(source_repo.path)
516 source_repo = MercurialRepository(source_repo.path)
518 source_rev = source_repo.branches['default']
517 source_rev = source_repo.branches['default']
519 source_ref = Reference(
518 source_ref = Reference(
520 type='branch', name='default', commit_id=source_rev)
519 type='branch', name='default', commit_id=source_rev)
521
520
522 target_repo._local_pull(source_repo.path, source_ref)
521 target_repo._local_pull(source_repo.path, source_ref)
523 with pytest.raises(RepositoryError):
522 with pytest.raises(RepositoryError):
524 target_repo._local_merge(
523 target_repo._local_merge(
525 target_ref, 'merge_message', 'user name', 'user@name.com',
524 target_ref, 'merge_message', 'user name', 'user@name.com',
526 source_ref)
525 source_ref)
527
526
528 # Check we are not left in an intermediate merge state
527 # Check we are not left in an intermediate merge state
529 assert not os.path.exists(
528 assert not os.path.exists(
530 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
531
530
532 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
533 commits = [
532 commits = [
534 {'message': 'a'},
533 {'message': 'a'},
535 {'message': 'b', 'branch': 'b'},
534 {'message': 'b', 'branch': 'b'},
536 ]
535 ]
537 repo = backend_hg.create_repo(commits)
536 repo = backend_hg.create_repo(commits)
538 commit_ids = backend_hg.commit_ids
537 commit_ids = backend_hg.commit_ids
539 target_ref = Reference(
538 target_ref = Reference(
540 type='branch', name='default', commit_id=commit_ids['a'])
539 type='branch', name='default', commit_id=commit_ids['a'])
541 source_ref = Reference(
540 source_ref = Reference(
542 type='branch', name='b', commit_id=commit_ids['b'])
541 type='branch', name='b', commit_id=commit_ids['b'])
543 merge_message = 'Merge message\n\nDescription:...'
542 merge_message = 'Merge message\n\nDescription:...'
544 user_name = 'Albert Einstein'
543 user_name = 'Albert Einstein'
545 user_email = 'albert@einstein.com'
544 user_email = 'albert@einstein.com'
546 vcs_repo = repo.scm_instance()
545 vcs_repo = repo.scm_instance()
547 merge_commit_id, needs_push = vcs_repo._local_merge(
546 merge_commit_id, needs_push = vcs_repo._local_merge(
548 target_ref, merge_message, user_name, user_email, source_ref)
547 target_ref, merge_message, user_name, user_email, source_ref)
549 assert merge_commit_id != source_ref.commit_id
548 assert merge_commit_id != source_ref.commit_id
550 assert needs_push is True
549 assert needs_push is True
551 commit = vcs_repo.get_commit(merge_commit_id)
550 commit = vcs_repo.get_commit(merge_commit_id)
552 assert commit.merge is True
551 assert commit.merge is True
553 assert commit.message == merge_message
552 assert commit.message == merge_message
554
553
555 def test_maybe_prepare_merge_workspace(self):
554 def test_maybe_prepare_merge_workspace(self):
556 workspace = self.repo._maybe_prepare_merge_workspace('pr2', 'unused')
555 workspace = self.repo._maybe_prepare_merge_workspace('pr2', 'unused')
557
556
558 assert os.path.isdir(workspace)
557 assert os.path.isdir(workspace)
559 workspace_repo = MercurialRepository(workspace)
558 workspace_repo = MercurialRepository(workspace)
560 assert workspace_repo.branches == self.repo.branches
559 assert workspace_repo.branches == self.repo.branches
561
560
562 # Calling it a second time should also succeed
561 # Calling it a second time should also succeed
563 workspace = self.repo._maybe_prepare_merge_workspace('pr2', 'unused')
562 workspace = self.repo._maybe_prepare_merge_workspace('pr2', 'unused')
564 assert os.path.isdir(workspace)
563 assert os.path.isdir(workspace)
565
564
566 def test_cleanup_merge_workspace(self):
565 def test_cleanup_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace('pr3', 'unused')
566 workspace = self.repo._maybe_prepare_merge_workspace('pr3', 'unused')
568 self.repo.cleanup_merge_workspace('pr3')
567 self.repo.cleanup_merge_workspace('pr3')
569
568
570 assert not os.path.exists(workspace)
569 assert not os.path.exists(workspace)
571
570
572 def test_cleanup_merge_workspace_invalid_workspace_id(self):
571 def test_cleanup_merge_workspace_invalid_workspace_id(self):
573 # No assert: because in case of an inexistent workspace this function
572 # No assert: because in case of an inexistent workspace this function
574 # should still succeed.
573 # should still succeed.
575 self.repo.cleanup_merge_workspace('pr4')
574 self.repo.cleanup_merge_workspace('pr4')
576
575
577 def test_merge_target_is_bookmark(self, vcsbackend_hg):
576 def test_merge_target_is_bookmark(self, vcsbackend_hg):
578 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
577 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
579 source_repo = vcsbackend_hg.clone_repo(target_repo)
578 source_repo = vcsbackend_hg.clone_repo(target_repo)
580 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
579 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
581 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
580 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
582 imc = source_repo.in_memory_commit
581 imc = source_repo.in_memory_commit
583 imc.add(FileNode('file_x', content=source_repo.name))
582 imc.add(FileNode('file_x', content=source_repo.name))
584 imc.commit(
583 imc.commit(
585 message=u'Automatic commit from repo merge test',
584 message=u'Automatic commit from repo merge test',
586 author=u'Automatic')
585 author=u'Automatic')
587 target_commit = target_repo.get_commit()
586 target_commit = target_repo.get_commit()
588 source_commit = source_repo.get_commit()
587 source_commit = source_repo.get_commit()
589 default_branch = target_repo.DEFAULT_BRANCH_NAME
588 default_branch = target_repo.DEFAULT_BRANCH_NAME
590 bookmark_name = 'bookmark'
589 bookmark_name = 'bookmark'
591 target_repo._update(default_branch)
590 target_repo._update(default_branch)
592 target_repo.bookmark(bookmark_name)
591 target_repo.bookmark(bookmark_name)
593 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
592 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
594 source_ref = Reference('branch', default_branch, source_commit.raw_id)
593 source_ref = Reference('branch', default_branch, source_commit.raw_id)
595 workspace = 'test-merge'
594 workspace = 'test-merge'
596
595
597 merge_response = target_repo.merge(
596 merge_response = target_repo.merge(
598 target_ref, source_repo, source_ref, workspace,
597 target_ref, source_repo, source_ref, workspace,
599 'test user', 'test@rhodecode.com', 'merge message 1',
598 'test user', 'test@rhodecode.com', 'merge message 1',
600 dry_run=False)
599 dry_run=False)
601 expected_merge_response = MergeResponse(
600 expected_merge_response = MergeResponse(
602 True, True, merge_response.merge_ref,
601 True, True, merge_response.merge_ref,
603 MergeFailureReason.NONE)
602 MergeFailureReason.NONE)
604 assert merge_response == expected_merge_response
603 assert merge_response == expected_merge_response
605
604
606 target_repo = backends.get_backend(vcsbackend_hg.alias)(
605 target_repo = backends.get_backend(vcsbackend_hg.alias)(
607 target_repo.path)
606 target_repo.path)
608 target_commits = list(target_repo.get_commits())
607 target_commits = list(target_repo.get_commits())
609 commit_ids = [c.raw_id for c in target_commits[:-1]]
608 commit_ids = [c.raw_id for c in target_commits[:-1]]
610 assert source_ref.commit_id in commit_ids
609 assert source_ref.commit_id in commit_ids
611 assert target_ref.commit_id in commit_ids
610 assert target_ref.commit_id in commit_ids
612
611
613 merge_commit = target_commits[-1]
612 merge_commit = target_commits[-1]
614 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
613 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
615 assert merge_commit.message.strip() == 'merge message 1'
614 assert merge_commit.message.strip() == 'merge message 1'
616 assert merge_commit.author == 'test user <test@rhodecode.com>'
615 assert merge_commit.author == 'test user <test@rhodecode.com>'
617
616
618 # Check the bookmark was updated in the target repo
617 # Check the bookmark was updated in the target repo
619 assert (
618 assert (
620 target_repo.bookmarks[bookmark_name] ==
619 target_repo.bookmarks[bookmark_name] ==
621 merge_response.merge_ref.commit_id)
620 merge_response.merge_ref.commit_id)
622
621
623 def test_merge_source_is_bookmark(self, vcsbackend_hg):
622 def test_merge_source_is_bookmark(self, vcsbackend_hg):
624 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
623 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
625 source_repo = vcsbackend_hg.clone_repo(target_repo)
624 source_repo = vcsbackend_hg.clone_repo(target_repo)
626 imc = source_repo.in_memory_commit
625 imc = source_repo.in_memory_commit
627 imc.add(FileNode('file_x', content=source_repo.name))
626 imc.add(FileNode('file_x', content=source_repo.name))
628 imc.commit(
627 imc.commit(
629 message=u'Automatic commit from repo merge test',
628 message=u'Automatic commit from repo merge test',
630 author=u'Automatic')
629 author=u'Automatic')
631 target_commit = target_repo.get_commit()
630 target_commit = target_repo.get_commit()
632 source_commit = source_repo.get_commit()
631 source_commit = source_repo.get_commit()
633 default_branch = target_repo.DEFAULT_BRANCH_NAME
632 default_branch = target_repo.DEFAULT_BRANCH_NAME
634 bookmark_name = 'bookmark'
633 bookmark_name = 'bookmark'
635 target_ref = Reference('branch', default_branch, target_commit.raw_id)
634 target_ref = Reference('branch', default_branch, target_commit.raw_id)
636 source_repo._update(default_branch)
635 source_repo._update(default_branch)
637 source_repo.bookmark(bookmark_name)
636 source_repo.bookmark(bookmark_name)
638 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
637 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
639 workspace = 'test-merge'
638 workspace = 'test-merge'
640
639
641 merge_response = target_repo.merge(
640 merge_response = target_repo.merge(
642 target_ref, source_repo, source_ref, workspace,
641 target_ref, source_repo, source_ref, workspace,
643 'test user', 'test@rhodecode.com', 'merge message 1',
642 'test user', 'test@rhodecode.com', 'merge message 1',
644 dry_run=False)
643 dry_run=False)
645 expected_merge_response = MergeResponse(
644 expected_merge_response = MergeResponse(
646 True, True, merge_response.merge_ref,
645 True, True, merge_response.merge_ref,
647 MergeFailureReason.NONE)
646 MergeFailureReason.NONE)
648 assert merge_response == expected_merge_response
647 assert merge_response == expected_merge_response
649
648
650 target_repo = backends.get_backend(vcsbackend_hg.alias)(
649 target_repo = backends.get_backend(vcsbackend_hg.alias)(
651 target_repo.path)
650 target_repo.path)
652 target_commits = list(target_repo.get_commits())
651 target_commits = list(target_repo.get_commits())
653 commit_ids = [c.raw_id for c in target_commits]
652 commit_ids = [c.raw_id for c in target_commits]
654 assert source_ref.commit_id == commit_ids[-1]
653 assert source_ref.commit_id == commit_ids[-1]
655 assert target_ref.commit_id == commit_ids[-2]
654 assert target_ref.commit_id == commit_ids[-2]
656
655
657 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
656 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
658 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
657 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
659 source_repo = vcsbackend_hg.clone_repo(target_repo)
658 source_repo = vcsbackend_hg.clone_repo(target_repo)
660 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
659 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
661 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
660 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
662
661
663 # add an extra head to the target repo
662 # add an extra head to the target repo
664 imc = target_repo.in_memory_commit
663 imc = target_repo.in_memory_commit
665 imc.add(FileNode('file_x', content='foo'))
664 imc.add(FileNode('file_x', content='foo'))
666 commits = list(target_repo.get_commits())
665 commits = list(target_repo.get_commits())
667 imc.commit(
666 imc.commit(
668 message=u'Automatic commit from repo merge test',
667 message=u'Automatic commit from repo merge test',
669 author=u'Automatic', parents=commits[0:1])
668 author=u'Automatic', parents=commits[0:1])
670
669
671 target_commit = target_repo.get_commit()
670 target_commit = target_repo.get_commit()
672 source_commit = source_repo.get_commit()
671 source_commit = source_repo.get_commit()
673 default_branch = target_repo.DEFAULT_BRANCH_NAME
672 default_branch = target_repo.DEFAULT_BRANCH_NAME
674 target_repo._update(default_branch)
673 target_repo._update(default_branch)
675
674
676 target_ref = Reference('branch', default_branch, target_commit.raw_id)
675 target_ref = Reference('branch', default_branch, target_commit.raw_id)
677 source_ref = Reference('branch', default_branch, source_commit.raw_id)
676 source_ref = Reference('branch', default_branch, source_commit.raw_id)
678 workspace = 'test-merge'
677 workspace = 'test-merge'
679
678
680 assert len(target_repo._heads(branch='default')) == 2
679 assert len(target_repo._heads(branch='default')) == 2
681 expected_merge_response = MergeResponse(
680 expected_merge_response = MergeResponse(
682 False, False, None,
681 False, False, None,
683 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
682 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
684 merge_response = target_repo.merge(
683 merge_response = target_repo.merge(
685 target_ref, source_repo, source_ref, workspace,
684 target_ref, source_repo, source_ref, workspace,
686 'test user', 'test@rhodecode.com', 'merge message 1',
685 'test user', 'test@rhodecode.com', 'merge message 1',
687 dry_run=False)
686 dry_run=False)
688 assert merge_response == expected_merge_response
687 assert merge_response == expected_merge_response
689
688
690 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
689 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
691 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
690 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
692 source_repo = vcsbackend_hg.clone_repo(target_repo)
691 source_repo = vcsbackend_hg.clone_repo(target_repo)
693 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
692 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
694 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
693 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
695 imc = source_repo.in_memory_commit
694 imc = source_repo.in_memory_commit
696 imc.add(FileNode('file_x', content=source_repo.name))
695 imc.add(FileNode('file_x', content=source_repo.name))
697 imc.commit(
696 imc.commit(
698 message=u'Automatic commit from repo merge test',
697 message=u'Automatic commit from repo merge test',
699 author=u'Automatic')
698 author=u'Automatic')
700 target_commit = target_repo.get_commit()
699 target_commit = target_repo.get_commit()
701 source_commit = source_repo.get_commit()
700 source_commit = source_repo.get_commit()
702
701
703 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
702 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
704
703
705 default_branch = target_repo.DEFAULT_BRANCH_NAME
704 default_branch = target_repo.DEFAULT_BRANCH_NAME
706 bookmark_name = 'bookmark'
705 bookmark_name = 'bookmark'
707 source_repo._update(default_branch)
706 source_repo._update(default_branch)
708 source_repo.bookmark(bookmark_name)
707 source_repo.bookmark(bookmark_name)
709
708
710 target_ref = Reference('branch', default_branch, target_commit.raw_id)
709 target_ref = Reference('branch', default_branch, target_commit.raw_id)
711 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
710 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
712 workspace = 'test-merge'
711 workspace = 'test-merge'
713
712
714 merge_response = target_repo.merge(
713 merge_response = target_repo.merge(
715 target_ref, source_repo, source_ref, workspace,
714 target_ref, source_repo, source_ref, workspace,
716 'test user', 'test@rhodecode.com', 'merge message 1',
715 'test user', 'test@rhodecode.com', 'merge message 1',
717 dry_run=False, use_rebase=True)
716 dry_run=False, use_rebase=True)
718
717
719 expected_merge_response = MergeResponse(
718 expected_merge_response = MergeResponse(
720 True, True, merge_response.merge_ref,
719 True, True, merge_response.merge_ref,
721 MergeFailureReason.NONE)
720 MergeFailureReason.NONE)
722 assert merge_response == expected_merge_response
721 assert merge_response == expected_merge_response
723
722
724 target_repo = backends.get_backend(vcsbackend_hg.alias)(
723 target_repo = backends.get_backend(vcsbackend_hg.alias)(
725 target_repo.path)
724 target_repo.path)
726 last_commit = target_repo.get_commit()
725 last_commit = target_repo.get_commit()
727 assert last_commit.message == source_commit.message
726 assert last_commit.message == source_commit.message
728 assert last_commit.author == source_commit.author
727 assert last_commit.author == source_commit.author
729 # This checks that we effectively did a rebase
728 # This checks that we effectively did a rebase
730 assert last_commit.raw_id != source_commit.raw_id
729 assert last_commit.raw_id != source_commit.raw_id
731
730
732 # Check the target has only 4 commits: 2 were already in target and
731 # Check the target has only 4 commits: 2 were already in target and
733 # only two should have been added
732 # only two should have been added
734 assert len(target_repo.commit_ids) == 2 + 2
733 assert len(target_repo.commit_ids) == 2 + 2
735
734
736
735
737 class TestGetShadowInstance(object):
736 class TestGetShadowInstance(object):
738
737
739 @pytest.fixture
738 @pytest.fixture
740 def repo(self, vcsbackend_hg, monkeypatch):
739 def repo(self, vcsbackend_hg, monkeypatch):
741 repo = vcsbackend_hg.repo
740 repo = vcsbackend_hg.repo
742 monkeypatch.setattr(repo, 'config', mock.Mock())
741 monkeypatch.setattr(repo, 'config', mock.Mock())
743 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
742 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
744 return repo
743 return repo
745
744
746 def test_passes_config(self, repo):
745 def test_passes_config(self, repo):
747 shadow = repo._get_shadow_instance(repo.path)
746 shadow = repo._get_shadow_instance(repo.path)
748 assert shadow.config == repo.config.copy()
747 assert shadow.config == repo.config.copy()
749
748
750 def test_disables_hooks(self, repo):
749 def test_disables_hooks(self, repo):
751 shadow = repo._get_shadow_instance(repo.path)
750 shadow = repo._get_shadow_instance(repo.path)
752 shadow.config.clear_section.assert_called_once_with('hooks')
751 shadow.config.clear_section.assert_called_once_with('hooks')
753
752
754 def test_allows_to_keep_hooks(self, repo):
753 def test_allows_to_keep_hooks(self, repo):
755 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
754 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
756 assert not shadow.config.clear_section.called
755 assert not shadow.config.clear_section.called
757
756
758
757
759 class TestMercurialCommit(object):
758 class TestMercurialCommit(object):
760
759
761 def _test_equality(self, commit):
760 def _test_equality(self, commit):
762 idx = commit.idx
761 idx = commit.idx
763 assert commit == self.repo.get_commit(commit_idx=idx)
762 assert commit == self.repo.get_commit(commit_idx=idx)
764
763
765 def test_equality(self):
764 def test_equality(self):
766 indexes = [0, 10, 20]
765 indexes = [0, 10, 20]
767 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
766 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
768 for commit in commits:
767 for commit in commits:
769 self._test_equality(commit)
768 self._test_equality(commit)
770
769
771 def test_default_commit(self):
770 def test_default_commit(self):
772 tip = self.repo.get_commit('tip')
771 tip = self.repo.get_commit('tip')
773 assert tip == self.repo.get_commit()
772 assert tip == self.repo.get_commit()
774 assert tip == self.repo.get_commit(commit_id=None)
773 assert tip == self.repo.get_commit(commit_id=None)
775 assert tip == self.repo.get_commit(commit_idx=None)
774 assert tip == self.repo.get_commit(commit_idx=None)
776 assert tip == list(self.repo[-1:])[0]
775 assert tip == list(self.repo[-1:])[0]
777
776
778 def test_root_node(self):
777 def test_root_node(self):
779 tip = self.repo.get_commit('tip')
778 tip = self.repo.get_commit('tip')
780 assert tip.root is tip.get_node('')
779 assert tip.root is tip.get_node('')
781
780
782 def test_lazy_fetch(self):
781 def test_lazy_fetch(self):
783 """
782 """
784 Test if commit's nodes expands and are cached as we walk through
783 Test if commit's nodes expands and are cached as we walk through
785 the commit. This test is somewhat hard to write as order of tests
784 the commit. This test is somewhat hard to write as order of tests
786 is a key here. Written by running command after command in a shell.
785 is a key here. Written by running command after command in a shell.
787 """
786 """
788 commit = self.repo.get_commit(commit_idx=45)
787 commit = self.repo.get_commit(commit_idx=45)
789 assert len(commit.nodes) == 0
788 assert len(commit.nodes) == 0
790 root = commit.root
789 root = commit.root
791 assert len(commit.nodes) == 1
790 assert len(commit.nodes) == 1
792 assert len(root.nodes) == 8
791 assert len(root.nodes) == 8
793 # accessing root.nodes updates commit.nodes
792 # accessing root.nodes updates commit.nodes
794 assert len(commit.nodes) == 9
793 assert len(commit.nodes) == 9
795
794
796 docs = root.get_node('docs')
795 docs = root.get_node('docs')
797 # we haven't yet accessed anything new as docs dir was already cached
796 # we haven't yet accessed anything new as docs dir was already cached
798 assert len(commit.nodes) == 9
797 assert len(commit.nodes) == 9
799 assert len(docs.nodes) == 8
798 assert len(docs.nodes) == 8
800 # accessing docs.nodes updates commit.nodes
799 # accessing docs.nodes updates commit.nodes
801 assert len(commit.nodes) == 17
800 assert len(commit.nodes) == 17
802
801
803 assert docs is commit.get_node('docs')
802 assert docs is commit.get_node('docs')
804 assert docs is root.nodes[0]
803 assert docs is root.nodes[0]
805 assert docs is root.dirs[0]
804 assert docs is root.dirs[0]
806 assert docs is commit.get_node('docs')
805 assert docs is commit.get_node('docs')
807
806
808 def test_nodes_with_commit(self):
807 def test_nodes_with_commit(self):
809 commit = self.repo.get_commit(commit_idx=45)
808 commit = self.repo.get_commit(commit_idx=45)
810 root = commit.root
809 root = commit.root
811 docs = root.get_node('docs')
810 docs = root.get_node('docs')
812 assert docs is commit.get_node('docs')
811 assert docs is commit.get_node('docs')
813 api = docs.get_node('api')
812 api = docs.get_node('api')
814 assert api is commit.get_node('docs/api')
813 assert api is commit.get_node('docs/api')
815 index = api.get_node('index.rst')
814 index = api.get_node('index.rst')
816 assert index is commit.get_node('docs/api/index.rst')
815 assert index is commit.get_node('docs/api/index.rst')
817 assert index is commit.get_node(
816 assert index is commit.get_node(
818 'docs').get_node('api').get_node('index.rst')
817 'docs').get_node('api').get_node('index.rst')
819
818
820 def test_branch_and_tags(self):
819 def test_branch_and_tags(self):
821 commit0 = self.repo.get_commit(commit_idx=0)
820 commit0 = self.repo.get_commit(commit_idx=0)
822 assert commit0.branch == 'default'
821 assert commit0.branch == 'default'
823 assert commit0.tags == []
822 assert commit0.tags == []
824
823
825 commit10 = self.repo.get_commit(commit_idx=10)
824 commit10 = self.repo.get_commit(commit_idx=10)
826 assert commit10.branch == 'default'
825 assert commit10.branch == 'default'
827 assert commit10.tags == []
826 assert commit10.tags == []
828
827
829 commit44 = self.repo.get_commit(commit_idx=44)
828 commit44 = self.repo.get_commit(commit_idx=44)
830 assert commit44.branch == 'web'
829 assert commit44.branch == 'web'
831
830
832 tip = self.repo.get_commit('tip')
831 tip = self.repo.get_commit('tip')
833 assert 'tip' in tip.tags
832 assert 'tip' in tip.tags
834
833
835 def test_bookmarks(self):
834 def test_bookmarks(self):
836 commit0 = self.repo.get_commit(commit_idx=0)
835 commit0 = self.repo.get_commit(commit_idx=0)
837 assert commit0.bookmarks == []
836 assert commit0.bookmarks == []
838
837
839 def _test_file_size(self, idx, path, size):
838 def _test_file_size(self, idx, path, size):
840 node = self.repo.get_commit(commit_idx=idx).get_node(path)
839 node = self.repo.get_commit(commit_idx=idx).get_node(path)
841 assert node.is_file()
840 assert node.is_file()
842 assert node.size == size
841 assert node.size == size
843
842
844 def test_file_size(self):
843 def test_file_size(self):
845 to_check = (
844 to_check = (
846 (10, 'setup.py', 1068),
845 (10, 'setup.py', 1068),
847 (20, 'setup.py', 1106),
846 (20, 'setup.py', 1106),
848 (60, 'setup.py', 1074),
847 (60, 'setup.py', 1074),
849
848
850 (10, 'vcs/backends/base.py', 2921),
849 (10, 'vcs/backends/base.py', 2921),
851 (20, 'vcs/backends/base.py', 3936),
850 (20, 'vcs/backends/base.py', 3936),
852 (60, 'vcs/backends/base.py', 6189),
851 (60, 'vcs/backends/base.py', 6189),
853 )
852 )
854 for idx, path, size in to_check:
853 for idx, path, size in to_check:
855 self._test_file_size(idx, path, size)
854 self._test_file_size(idx, path, size)
856
855
857 def test_file_history_from_commits(self):
856 def test_file_history_from_commits(self):
858 node = self.repo[10].get_node('setup.py')
857 node = self.repo[10].get_node('setup.py')
859 commit_ids = [commit.raw_id for commit in node.history]
858 commit_ids = [commit.raw_id for commit in node.history]
860 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
859 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
861
860
862 node = self.repo[20].get_node('setup.py')
861 node = self.repo[20].get_node('setup.py')
863 node_ids = [commit.raw_id for commit in node.history]
862 node_ids = [commit.raw_id for commit in node.history]
864 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
863 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
865 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
864 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
866
865
867 # special case we check history from commit that has this particular
866 # special case we check history from commit that has this particular
868 # file changed this means we check if it's included as well
867 # file changed this means we check if it's included as well
869 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
868 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
870 .get_node('setup.py')
869 .get_node('setup.py')
871 node_ids = [commit.raw_id for commit in node.history]
870 node_ids = [commit.raw_id for commit in node.history]
872 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
873 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
874
873
875 def test_file_history(self):
874 def test_file_history(self):
876 # we can only check if those commits are present in the history
875 # we can only check if those commits are present in the history
877 # as we cannot update this test every time file is changed
876 # as we cannot update this test every time file is changed
878 files = {
877 files = {
879 'setup.py': [7, 18, 45, 46, 47, 69, 77],
878 'setup.py': [7, 18, 45, 46, 47, 69, 77],
880 'vcs/nodes.py': [
879 'vcs/nodes.py': [
881 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
880 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
882 'vcs/backends/hg.py': [
881 'vcs/backends/hg.py': [
883 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
882 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
884 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
883 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
885 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
884 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
886 }
885 }
887 for path, indexes in files.items():
886 for path, indexes in files.items():
888 tip = self.repo.get_commit(commit_idx=indexes[-1])
887 tip = self.repo.get_commit(commit_idx=indexes[-1])
889 node = tip.get_node(path)
888 node = tip.get_node(path)
890 node_indexes = [commit.idx for commit in node.history]
889 node_indexes = [commit.idx for commit in node.history]
891 assert set(indexes).issubset(set(node_indexes)), (
890 assert set(indexes).issubset(set(node_indexes)), (
892 "We assumed that %s is subset of commits for which file %s "
891 "We assumed that %s is subset of commits for which file %s "
893 "has been changed, and history of that node returned: %s"
892 "has been changed, and history of that node returned: %s"
894 % (indexes, path, node_indexes))
893 % (indexes, path, node_indexes))
895
894
896 def test_file_annotate(self):
895 def test_file_annotate(self):
897 files = {
896 files = {
898 'vcs/backends/__init__.py': {
897 'vcs/backends/__init__.py': {
899 89: {
898 89: {
900 'lines_no': 31,
899 'lines_no': 31,
901 'commits': [
900 'commits': [
902 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
901 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
903 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
902 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
904 32, 32, 32, 32, 37, 32, 37, 37, 32,
903 32, 32, 32, 32, 37, 32, 37, 37, 32,
905 32, 32
904 32, 32
906 ]
905 ]
907 },
906 },
908 20: {
907 20: {
909 'lines_no': 1,
908 'lines_no': 1,
910 'commits': [4]
909 'commits': [4]
911 },
910 },
912 55: {
911 55: {
913 'lines_no': 31,
912 'lines_no': 31,
914 'commits': [
913 'commits': [
915 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
914 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
916 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
915 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
917 32, 32, 32, 32, 37, 32, 37, 37, 32,
916 32, 32, 32, 32, 37, 32, 37, 37, 32,
918 32, 32
917 32, 32
919 ]
918 ]
920 }
919 }
921 },
920 },
922 'vcs/exceptions.py': {
921 'vcs/exceptions.py': {
923 89: {
922 89: {
924 'lines_no': 18,
923 'lines_no': 18,
925 'commits': [
924 'commits': [
926 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
925 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
927 16, 16, 17, 16, 16, 18, 18, 18
926 16, 16, 17, 16, 16, 18, 18, 18
928 ]
927 ]
929 },
928 },
930 20: {
929 20: {
931 'lines_no': 18,
930 'lines_no': 18,
932 'commits': [
931 'commits': [
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
932 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 16, 16, 17, 16, 16, 18, 18, 18
933 16, 16, 17, 16, 16, 18, 18, 18
935 ]
934 ]
936 },
935 },
937 55: {
936 55: {
938 'lines_no': 18,
937 'lines_no': 18,
939 'commits': [
938 'commits': [
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
939 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 17, 16, 16, 18, 18, 18
940 17, 16, 16, 18, 18, 18
942 ]
941 ]
943 }
942 }
944 },
943 },
945 'MANIFEST.in': {
944 'MANIFEST.in': {
946 89: {
945 89: {
947 'lines_no': 5,
946 'lines_no': 5,
948 'commits': [7, 7, 7, 71, 71]
947 'commits': [7, 7, 7, 71, 71]
949 },
948 },
950 20: {
949 20: {
951 'lines_no': 3,
950 'lines_no': 3,
952 'commits': [7, 7, 7]
951 'commits': [7, 7, 7]
953 },
952 },
954 55: {
953 55: {
955 'lines_no': 3,
954 'lines_no': 3,
956 'commits': [7, 7, 7]
955 'commits': [7, 7, 7]
957 }
956 }
958 }
957 }
959 }
958 }
960
959
961 for fname, commit_dict in files.items():
960 for fname, commit_dict in files.items():
962 for idx, __ in commit_dict.items():
961 for idx, __ in commit_dict.items():
963 commit = self.repo.get_commit(commit_idx=idx)
962 commit = self.repo.get_commit(commit_idx=idx)
964 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
963 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
965 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
964 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
966 assert l1_1 == l1_2
965 assert l1_1 == l1_2
967 l1 = l1_2 = [
966 l1 = l1_2 = [
968 x[2]().idx for x in commit.get_file_annotate(fname)]
967 x[2]().idx for x in commit.get_file_annotate(fname)]
969 l2 = files[fname][idx]['commits']
968 l2 = files[fname][idx]['commits']
970 assert l1 == l2, (
969 assert l1 == l2, (
971 "The lists of commit for %s@commit_id%s"
970 "The lists of commit for %s@commit_id%s"
972 "from annotation list should match each other,"
971 "from annotation list should match each other,"
973 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
972 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
974
973
975 def test_commit_state(self):
974 def test_commit_state(self):
976 """
975 """
977 Tests which files have been added/changed/removed at particular commit
976 Tests which files have been added/changed/removed at particular commit
978 """
977 """
979
978
980 # commit_id 46ad32a4f974:
979 # commit_id 46ad32a4f974:
981 # hg st --rev 46ad32a4f974
980 # hg st --rev 46ad32a4f974
982 # changed: 13
981 # changed: 13
983 # added: 20
982 # added: 20
984 # removed: 1
983 # removed: 1
985 changed = set([
984 changed = set([
986 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
985 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
987 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
986 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
988 'vcs/__init__.py', 'vcs/backends/__init__.py',
987 'vcs/__init__.py', 'vcs/backends/__init__.py',
989 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
988 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
990 'vcs/utils/__init__.py'])
989 'vcs/utils/__init__.py'])
991
990
992 added = set([
991 added = set([
993 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
992 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
994 'docs/api/index.rst', 'docs/api/nodes.rst',
993 'docs/api/index.rst', 'docs/api/nodes.rst',
995 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
994 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
996 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
995 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
997 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
996 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
998 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
997 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
999 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
998 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1000 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
999 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1001 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1000 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1002 'vcs/web/simplevcs/views.py'])
1001 'vcs/web/simplevcs/views.py'])
1003
1002
1004 removed = set(['docs/api.rst'])
1003 removed = set(['docs/api.rst'])
1005
1004
1006 commit64 = self.repo.get_commit('46ad32a4f974')
1005 commit64 = self.repo.get_commit('46ad32a4f974')
1007 assert set((node.path for node in commit64.added)) == added
1006 assert set((node.path for node in commit64.added)) == added
1008 assert set((node.path for node in commit64.changed)) == changed
1007 assert set((node.path for node in commit64.changed)) == changed
1009 assert set((node.path for node in commit64.removed)) == removed
1008 assert set((node.path for node in commit64.removed)) == removed
1010
1009
1011 # commit_id b090f22d27d6:
1010 # commit_id b090f22d27d6:
1012 # hg st --rev b090f22d27d6
1011 # hg st --rev b090f22d27d6
1013 # changed: 13
1012 # changed: 13
1014 # added: 20
1013 # added: 20
1015 # removed: 1
1014 # removed: 1
1016 commit88 = self.repo.get_commit('b090f22d27d6')
1015 commit88 = self.repo.get_commit('b090f22d27d6')
1017 assert set((node.path for node in commit88.added)) == set()
1016 assert set((node.path for node in commit88.added)) == set()
1018 assert set((node.path for node in commit88.changed)) == \
1017 assert set((node.path for node in commit88.changed)) == \
1019 set(['.hgignore'])
1018 set(['.hgignore'])
1020 assert set((node.path for node in commit88.removed)) == set()
1019 assert set((node.path for node in commit88.removed)) == set()
1021
1020
1022 #
1021 #
1023 # 85:
1022 # 85:
1024 # added: 2 [
1023 # added: 2 [
1025 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1024 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1026 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1025 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1027 # removed: 1 ['vcs/utils/web.py']
1026 # removed: 1 ['vcs/utils/web.py']
1028 commit85 = self.repo.get_commit(commit_idx=85)
1027 commit85 = self.repo.get_commit(commit_idx=85)
1029 assert set((node.path for node in commit85.added)) == set([
1028 assert set((node.path for node in commit85.added)) == set([
1030 'vcs/utils/diffs.py',
1029 'vcs/utils/diffs.py',
1031 'vcs/web/simplevcs/views/diffs.py'])
1030 'vcs/web/simplevcs/views/diffs.py'])
1032 assert set((node.path for node in commit85.changed)) == set([
1031 assert set((node.path for node in commit85.changed)) == set([
1033 'vcs/web/simplevcs/models.py',
1032 'vcs/web/simplevcs/models.py',
1034 'vcs/web/simplevcs/utils.py',
1033 'vcs/web/simplevcs/utils.py',
1035 'vcs/web/simplevcs/views/__init__.py',
1034 'vcs/web/simplevcs/views/__init__.py',
1036 'vcs/web/simplevcs/views/repository.py',
1035 'vcs/web/simplevcs/views/repository.py',
1037 ])
1036 ])
1038 assert set((node.path for node in commit85.removed)) == \
1037 assert set((node.path for node in commit85.removed)) == \
1039 set(['vcs/utils/web.py'])
1038 set(['vcs/utils/web.py'])
1040
1039
1041 def test_files_state(self):
1040 def test_files_state(self):
1042 """
1041 """
1043 Tests state of FileNodes.
1042 Tests state of FileNodes.
1044 """
1043 """
1045 commit = self.repo.get_commit(commit_idx=85)
1044 commit = self.repo.get_commit(commit_idx=85)
1046 node = commit.get_node('vcs/utils/diffs.py')
1045 node = commit.get_node('vcs/utils/diffs.py')
1047 assert node.state, NodeState.ADDED
1046 assert node.state, NodeState.ADDED
1048 assert node.added
1047 assert node.added
1049 assert not node.changed
1048 assert not node.changed
1050 assert not node.not_changed
1049 assert not node.not_changed
1051 assert not node.removed
1050 assert not node.removed
1052
1051
1053 commit = self.repo.get_commit(commit_idx=88)
1052 commit = self.repo.get_commit(commit_idx=88)
1054 node = commit.get_node('.hgignore')
1053 node = commit.get_node('.hgignore')
1055 assert node.state, NodeState.CHANGED
1054 assert node.state, NodeState.CHANGED
1056 assert not node.added
1055 assert not node.added
1057 assert node.changed
1056 assert node.changed
1058 assert not node.not_changed
1057 assert not node.not_changed
1059 assert not node.removed
1058 assert not node.removed
1060
1059
1061 commit = self.repo.get_commit(commit_idx=85)
1060 commit = self.repo.get_commit(commit_idx=85)
1062 node = commit.get_node('setup.py')
1061 node = commit.get_node('setup.py')
1063 assert node.state, NodeState.NOT_CHANGED
1062 assert node.state, NodeState.NOT_CHANGED
1064 assert not node.added
1063 assert not node.added
1065 assert not node.changed
1064 assert not node.changed
1066 assert node.not_changed
1065 assert node.not_changed
1067 assert not node.removed
1066 assert not node.removed
1068
1067
1069 # If node has REMOVED state then trying to fetch it would raise
1068 # If node has REMOVED state then trying to fetch it would raise
1070 # CommitError exception
1069 # CommitError exception
1071 commit = self.repo.get_commit(commit_idx=2)
1070 commit = self.repo.get_commit(commit_idx=2)
1072 path = 'vcs/backends/BaseRepository.py'
1071 path = 'vcs/backends/BaseRepository.py'
1073 with pytest.raises(NodeDoesNotExistError):
1072 with pytest.raises(NodeDoesNotExistError):
1074 commit.get_node(path)
1073 commit.get_node(path)
1075 # but it would be one of ``removed`` (commit's attribute)
1074 # but it would be one of ``removed`` (commit's attribute)
1076 assert path in [rf.path for rf in commit.removed]
1075 assert path in [rf.path for rf in commit.removed]
1077
1076
1078 def test_commit_message_is_unicode(self):
1077 def test_commit_message_is_unicode(self):
1079 for cm in self.repo:
1078 for cm in self.repo:
1080 assert type(cm.message) == unicode
1079 assert type(cm.message) == unicode
1081
1080
1082 def test_commit_author_is_unicode(self):
1081 def test_commit_author_is_unicode(self):
1083 for cm in self.repo:
1082 for cm in self.repo:
1084 assert type(cm.author) == unicode
1083 assert type(cm.author) == unicode
1085
1084
1086 def test_repo_files_content_is_unicode(self):
1085 def test_repo_files_content_is_unicode(self):
1087 test_commit = self.repo.get_commit(commit_idx=100)
1086 test_commit = self.repo.get_commit(commit_idx=100)
1088 for node in test_commit.get_node('/'):
1087 for node in test_commit.get_node('/'):
1089 if node.is_file():
1088 if node.is_file():
1090 assert type(node.content) == unicode
1089 assert type(node.content) == unicode
1091
1090
1092 def test_wrong_path(self):
1091 def test_wrong_path(self):
1093 # There is 'setup.py' in the root dir but not there:
1092 # There is 'setup.py' in the root dir but not there:
1094 path = 'foo/bar/setup.py'
1093 path = 'foo/bar/setup.py'
1095 with pytest.raises(VCSError):
1094 with pytest.raises(VCSError):
1096 self.repo.get_commit().get_node(path)
1095 self.repo.get_commit().get_node(path)
1097
1096
1098 def test_large_file(self):
1099 # TODO: valid large file
1100 tip = self.repo.get_commit()
1101 with pytest.raises(CommitError):
1102 tip.get_largefile_node("invalid")
1103
1104 def test_author_email(self):
1097 def test_author_email(self):
1105 assert 'marcin@python-blog.com' == \
1098 assert 'marcin@python-blog.com' == \
1106 self.repo.get_commit('b986218ba1c9').author_email
1099 self.repo.get_commit('b986218ba1c9').author_email
1107 assert 'lukasz.balcerzak@python-center.pl' == \
1100 assert 'lukasz.balcerzak@python-center.pl' == \
1108 self.repo.get_commit('3803844fdbd3').author_email
1101 self.repo.get_commit('3803844fdbd3').author_email
1109 assert '' == self.repo.get_commit('84478366594b').author_email
1102 assert '' == self.repo.get_commit('84478366594b').author_email
1110
1103
1111 def test_author_username(self):
1104 def test_author_username(self):
1112 assert 'Marcin Kuzminski' == \
1105 assert 'Marcin Kuzminski' == \
1113 self.repo.get_commit('b986218ba1c9').author_name
1106 self.repo.get_commit('b986218ba1c9').author_name
1114 assert 'Lukasz Balcerzak' == \
1107 assert 'Lukasz Balcerzak' == \
1115 self.repo.get_commit('3803844fdbd3').author_name
1108 self.repo.get_commit('3803844fdbd3').author_name
1116 assert 'marcink' == \
1109 assert 'marcink' == \
1117 self.repo.get_commit('84478366594b').author_name
1110 self.repo.get_commit('84478366594b').author_name
1118
1111
1119
1112
1113 class TestLargeFileRepo(object):
1114
1115 def test_large_file(self, backend_hg):
1116 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1117
1118 tip = repo.scm_instance().get_commit()
1119 node = tip.get_node('.hglf/thisfileislarge')
1120
1121 lf_node = node.get_largefile_node()
1122
1123 assert lf_node.is_largefile() is True
1124 assert lf_node.size == 1024000
1125 assert lf_node.name == '.hglf/thisfileislarge'
1126
1127
1120 class TestGetBranchName(object):
1128 class TestGetBranchName(object):
1121 def test_returns_ref_name_when_type_is_branch(self):
1129 def test_returns_ref_name_when_type_is_branch(self):
1122 ref = self._create_ref('branch', 'fake-name')
1130 ref = self._create_ref('branch', 'fake-name')
1123 result = self.repo._get_branch_name(ref)
1131 result = self.repo._get_branch_name(ref)
1124 assert result == ref.name
1132 assert result == ref.name
1125
1133
1126 @pytest.mark.parametrize("type_", ("book", "tag"))
1134 @pytest.mark.parametrize("type_", ("book", "tag"))
1127 def test_queries_remote_when_type_is_not_branch(self, type_):
1135 def test_queries_remote_when_type_is_not_branch(self, type_):
1128 ref = self._create_ref(type_, 'wrong-fake-name')
1136 ref = self._create_ref(type_, 'wrong-fake-name')
1129 with mock.patch.object(self.repo, "_remote") as remote_mock:
1137 with mock.patch.object(self.repo, "_remote") as remote_mock:
1130 remote_mock.ctx_branch.return_value = "fake-name"
1138 remote_mock.ctx_branch.return_value = "fake-name"
1131 result = self.repo._get_branch_name(ref)
1139 result = self.repo._get_branch_name(ref)
1132 assert result == "fake-name"
1140 assert result == "fake-name"
1133 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1141 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1134
1142
1135 def _create_ref(self, type_, name):
1143 def _create_ref(self, type_, name):
1136 ref = mock.Mock()
1144 ref = mock.Mock()
1137 ref.type = type_
1145 ref.type = type_
1138 ref.name = 'wrong-fake-name'
1146 ref.name = 'wrong-fake-name'
1139 ref.commit_id = "deadbeef"
1147 ref.commit_id = "deadbeef"
1140 return ref
1148 return ref
1141
1149
1142
1150
1143 class TestIsTheSameBranch(object):
1151 class TestIsTheSameBranch(object):
1144 def test_returns_true_when_branches_are_equal(self):
1152 def test_returns_true_when_branches_are_equal(self):
1145 source_ref = mock.Mock(name="source-ref")
1153 source_ref = mock.Mock(name="source-ref")
1146 target_ref = mock.Mock(name="target-ref")
1154 target_ref = mock.Mock(name="target-ref")
1147 branch_name_patcher = mock.patch.object(
1155 branch_name_patcher = mock.patch.object(
1148 self.repo, "_get_branch_name", return_value="default")
1156 self.repo, "_get_branch_name", return_value="default")
1149 with branch_name_patcher as branch_name_mock:
1157 with branch_name_patcher as branch_name_mock:
1150 result = self.repo._is_the_same_branch(source_ref, target_ref)
1158 result = self.repo._is_the_same_branch(source_ref, target_ref)
1151
1159
1152 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1160 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1153 assert branch_name_mock.call_args_list == expected_calls
1161 assert branch_name_mock.call_args_list == expected_calls
1154 assert result is True
1162 assert result is True
1155
1163
1156 def test_returns_false_when_branches_are_not_equal(self):
1164 def test_returns_false_when_branches_are_not_equal(self):
1157 source_ref = mock.Mock(name="source-ref")
1165 source_ref = mock.Mock(name="source-ref")
1158 source_ref.name = "source-branch"
1166 source_ref.name = "source-branch"
1159 target_ref = mock.Mock(name="target-ref")
1167 target_ref = mock.Mock(name="target-ref")
1160 source_ref.name = "target-branch"
1168 source_ref.name = "target-branch"
1161
1169
1162 def side_effect(ref):
1170 def side_effect(ref):
1163 return ref.name
1171 return ref.name
1164
1172
1165 branch_name_patcher = mock.patch.object(
1173 branch_name_patcher = mock.patch.object(
1166 self.repo, "_get_branch_name", side_effect=side_effect)
1174 self.repo, "_get_branch_name", side_effect=side_effect)
1167 with branch_name_patcher as branch_name_mock:
1175 with branch_name_patcher as branch_name_mock:
1168 result = self.repo._is_the_same_branch(source_ref, target_ref)
1176 result = self.repo._is_the_same_branch(source_ref, target_ref)
1169
1177
1170 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1178 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1171 assert branch_name_mock.call_args_list == expected_calls
1179 assert branch_name_mock.call_args_list == expected_calls
1172 assert result is False
1180 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now