# HG changeset patch # User Marcin Kuzminski # Date 2020-07-20 11:39:21 # Node ID 279b329376a8c9a642e6a95991c93a5d6cb85be3 # Parent 96b3ab254a3bbfcf7dbea9bcd3e2bc920232cb9a # Parent 043990e141f21d6f2ec6440eda925a9df2e59dcd release: Merge default into stable for release preparation diff --git a/.bumpversion.cfg b/.bumpversion.cfg --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,5 @@ [bumpversion] -current_version = 4.19.3 +current_version = 4.20.0 message = release: Bump version {current_version} to {new_version} [bumpversion:file:rhodecode/VERSION] - diff --git a/.release.cfg b/.release.cfg --- a/.release.cfg +++ b/.release.cfg @@ -5,25 +5,20 @@ done = false done = true [task:rc_tools_pinned] -done = true [task:fixes_on_stable] -done = true [task:pip2nix_generated] -done = true [task:changelog_updated] -done = true [task:generate_api_docs] -done = true + +[task:updated_translation] [release] -state = prepared -version = 4.19.3 - -[task:updated_translation] +state = in_progress +version = 4.20.0 [task:generate_js_routes] diff --git a/docs/admin/system_admin/admin-tricks.rst b/docs/admin/system_admin/admin-tricks.rst --- a/docs/admin/system_admin/admin-tricks.rst +++ b/docs/admin/system_admin/admin-tricks.rst @@ -238,7 +238,7 @@ following URL: ``{instance-URL}/_admin/p pong[rce-7880] => 203.0.113.23 .. _Markdown: http://daringfireball.net/projects/markdown/ -.. _reStructured Text: http://docutils.sourceforge.net/docs/index.html +.. _reStructured Text: http://docutils.sourceforge.io/docs/index.html Unarchiving a repository diff --git a/docs/admin/system_admin/apache/apache-conf-example.rst b/docs/admin/system_admin/apache/apache-conf-example.rst --- a/docs/admin/system_admin/apache/apache-conf-example.rst +++ b/docs/admin/system_admin/apache/apache-conf-example.rst @@ -75,7 +75,7 @@ Below config if for an Apache Reverse Pr # Url to running RhodeCode instance. This is shown as `- URL:` when # running rccontrol status. - ProxyPass / http://127.0.0.1:10002/ timeout=7200 Keepalive=On + ProxyPass / http://127.0.0.1:10002/ connectiontimeout=7200 timeout=7200 Keepalive=On ProxyPassReverse / http://127.0.0.1:10002/ # strict http prevents from https -> http downgrade diff --git a/docs/api/methods/pull-request-methods.rst b/docs/api/methods/pull-request-methods.rst --- a/docs/api/methods/pull-request-methods.rst +++ b/docs/api/methods/pull-request-methods.rst @@ -252,6 +252,7 @@ get_pull_request_comments }, "comment_text": "Example text", "comment_type": null, + "comment_last_version: 0, "pull_request_version": null, "comment_commit_id": None, "comment_pull_request_id": diff --git a/docs/api/methods/repo-methods.rst b/docs/api/methods/repo-methods.rst --- a/docs/api/methods/repo-methods.rst +++ b/docs/api/methods/repo-methods.rst @@ -173,6 +173,37 @@ delete_repo error: null +edit_comment +------------ + +.. py:function:: edit_comment(apiuser, message, comment_id, version, userid=>) + + Edit comment on the pull request or commit, + specified by the `comment_id` and version. Initially version should be 0 + + :param apiuser: This is filled automatically from the |authtoken|. + :type apiuser: AuthUser + :param comment_id: Specify the comment_id for editing + :type comment_id: int + :param version: version of the comment that will be created, starts from 0 + :type version: int + :param message: The text content of the comment. + :type message: str + :param userid: Comment on the pull request as this user + :type userid: Optional(str or int) + + Example output: + + .. code-block:: bash + + id : + result : { + "comment": "", + "version": "", + }, + error : null + + fork_repo --------- @@ -236,6 +267,40 @@ fork_repo error: null +get_comment +----------- + +.. py:function:: get_comment(apiuser, comment_id) + + Get single comment from repository or pull_request + + :param apiuser: This is filled automatically from the |authtoken|. + :type apiuser: AuthUser + :param comment_id: comment id found in the URL of comment + :type comment_id: str or int + + Example error output: + + .. code-block:: bash + + { + "id" : , + "result" : { + "comment_author": , + "comment_created_on": "2017-02-01T14:38:16.309", + "comment_f_path": "file.txt", + "comment_id": 282, + "comment_lineno": "n1", + "comment_resolved_by": null, + "comment_status": [], + "comment_text": "This file needs a header", + "comment_type": "todo", + "comment_last_version: 0 + }, + "error" : null + } + + get_repo -------- @@ -436,7 +501,8 @@ get_repo_comments "comment_resolved_by": null, "comment_status": [], "comment_text": "This file needs a header", - "comment_type": "todo" + "comment_type": "todo", + "comment_last_version: 0 } ], "error" : null diff --git a/docs/install/setup-email.rst b/docs/install/setup-email.rst --- a/docs/install/setup-email.rst +++ b/docs/install/setup-email.rst @@ -11,16 +11,22 @@ use the below example to insert it. Once configured you can check the settings for your |RCE| instance on the :menuselection:`Admin --> Settings --> Email` page. +Please be aware that both section should be changed the `[DEFAULT]` for main applications +email config, and `[server:main]` for exception tracking email + .. code-block:: ini - ################################################################################ - ## Uncomment and replace with the email address which should receive ## - ## any error reports after an application crash ## - ## Additionally these settings will be used by the RhodeCode mailing system ## - ################################################################################ - #email_to = admin@localhost + [DEFAULT] + ; ######################################################################## + ; EMAIL CONFIGURATION + ; These settings will be used by the RhodeCode mailing system + ; ######################################################################## + + ; prefix all emails subjects with given prefix, helps filtering out emails + #email_prefix = [RhodeCode] + + ; email FROM address all mails will be sent #app_email_from = rhodecode-noreply@localhost - #email_prefix = [RhodeCode] #smtp_server = mail.server.com #smtp_username = @@ -28,3 +34,12 @@ Once configured you can check the settin #smtp_port = #smtp_use_tls = false #smtp_use_ssl = true + + [server:main] + ; Send email with exception details when it happens + #exception_tracker.send_email = true + + ; Comma separated list of recipients for exception emails, + ; e.g admin@rhodecode.com,devops@rhodecode.com + ; Can be left empty, then emails will be sent to ALL super-admins + #exception_tracker.send_email_recipients = diff --git a/docs/release-notes/release-notes-4.20.0.rst b/docs/release-notes/release-notes-4.20.0.rst new file mode 100644 --- /dev/null +++ b/docs/release-notes/release-notes-4.20.0.rst @@ -0,0 +1,74 @@ +|RCE| 4.20.0 |RNS| +------------------ + +Release Date +^^^^^^^^^^^^ + +- 2020-07-20 + + +New Features +^^^^^^^^^^^^ + +- Comments: users can now edit comments body. + Editing is versioned and all older versions are kept for auditing. +- Pull requests: changed the order of close-branch after merge, + so branch heads are no longer left open after the merge. +- Diffs: added diff navigation to improve UX when browsing the full context diffs. +- Emails: set the `References` header for threading in emails with different subjects. + Only some Email clients supports this. +- Emails: added logic to allow overwriting the default email titles via rcextensions. +- Markdown: support summary/details tags to allow setting a link with expansion menu. +- Integrations: added `store_file` integration. This allows storing + selected files from repository on disk on push. + + +General +^^^^^^^ + +- License: individual users can hide license flash messages warning about upcoming + license expiration. +- Downloads: the default download commit is now the landing revision set in repo settings. +- Auth-tokens: expose all roles with explanation to help users understand it better. +- Pull requests: make auto generated title for pull requests show also source Ref type + eg. branch feature1, instead of just name of the branch. +- UI: added secondary action instead of two buttons on files page, and download page. +- Emails: reduce excessive warning logs on pre-mailer. + + +Security +^^^^^^^^ + +- Branch permissions: protect from XSS on branch rules forbidden flash message. + + +Performance +^^^^^^^^^^^ + + + +Fixes +^^^^^ + +- Pull requests: detect missing commits on diffs from new PR ancestor logic. This fixes + problem with older PRs opened before 4.19.X that had special ancestor set, which could + lead in some cases to crash when viewing older pull requests. +- Permissions: fixed a case when a duplicate permission made repository settings active on archived repository. +- Permissions: fixed missing user info on global and repository permissions pages. +- Permissions: allow users to update settings for repository groups they still own, + or have admin perms, when they don't change their name. +- Permissions: flush all when running remap and rescan. +- Repositories: fixed a bug for repo groups that didn't pre-fill the repo group from GET param. +- Repositories: allow updating repository settings for users without + store-in-root permissions in case repository name didn't change. +- Comments: fixed line display icons. +- Summary: fixed summary page total commits count. + + +Upgrade notes +^^^^^^^^^^^^^ + +- Schedule feature update. +- On Mercurial repositories we changed the order of commits when the close branch on merge features is used. + Before the commits was made after a merge leaving an open head. + This backward incompatible change now reverses that order, which is the correct way of doing it. diff --git a/docs/release-notes/release-notes.rst b/docs/release-notes/release-notes.rst --- a/docs/release-notes/release-notes.rst +++ b/docs/release-notes/release-notes.rst @@ -9,6 +9,7 @@ Release Notes .. toctree:: :maxdepth: 1 + release-notes-4.20.0.rst release-notes-4.19.3.rst release-notes-4.19.2.rst release-notes-4.19.1.rst diff --git a/grunt_config.json b/grunt_config.json --- a/grunt_config.json +++ b/grunt_config.json @@ -51,9 +51,12 @@ "<%= dirs.js.src %>/plugins/jquery.pjax.js", "<%= dirs.js.src %>/plugins/jquery.dataTables.js", "<%= dirs.js.src %>/plugins/flavoured_checkbox.js", + "<%= dirs.js.src %>/plugins/within_viewport.js", "<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js", "<%= dirs.js.src %>/plugins/jquery.autocomplete.js", "<%= dirs.js.src %>/plugins/jquery.debounce.js", + "<%= dirs.js.src %>/plugins/jquery.scrollstop.js", + "<%= dirs.js.src %>/plugins/jquery.within-viewport.js", "<%= dirs.js.node_modules %>/mark.js/dist/jquery.mark.min.js", "<%= dirs.js.src %>/plugins/jquery.timeago.js", "<%= dirs.js.src %>/plugins/jquery.timeago-extension.js", diff --git a/pkgs/python-packages.nix b/pkgs/python-packages.nix --- a/pkgs/python-packages.nix +++ b/pkgs/python-packages.nix @@ -1819,7 +1819,7 @@ self: super: { }; }; "rhodecode-enterprise-ce" = super.buildPythonPackage { - name = "rhodecode-enterprise-ce-4.19.3"; + name = "rhodecode-enterprise-ce-4.20.0"; buildInputs = [ self."pytest" self."py" diff --git a/pytest.ini b/pytest.ini --- a/pytest.ini +++ b/pytest.ini @@ -10,6 +10,8 @@ vcsserver_config_http = rhodecode/tests/ addopts = --pdbcls=IPython.terminal.debugger:TerminalPdb --strict-markers + --capture=no + --show-capture=no markers = vcs_operations: Mark tests depending on a running RhodeCode instance. diff --git a/rhodecode/VERSION b/rhodecode/VERSION --- a/rhodecode/VERSION +++ b/rhodecode/VERSION @@ -1,1 +1,1 @@ -4.19.3 \ No newline at end of file +4.20.0 \ No newline at end of file diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py --- a/rhodecode/__init__.py +++ b/rhodecode/__init__.py @@ -48,7 +48,7 @@ PYRAMID_SETTINGS = {} EXTENSIONS = {} __version__ = ('.'.join((str(each) for each in VERSION[:3]))) -__dbversion__ = 107 # defines current db version for migrations +__dbversion__ = 108 # defines current db version for migrations __platform__ = platform.system() __license__ = 'AGPLv3, and Commercial License' __author__ = 'RhodeCode GmbH' diff --git a/rhodecode/api/tests/test_api.py b/rhodecode/api/tests/test_api.py --- a/rhodecode/api/tests/test_api.py +++ b/rhodecode/api/tests/test_api.py @@ -88,7 +88,8 @@ class TestApi(object): response = api_call(self.app, params) expected = 'No such method: comment. ' \ 'Similar methods: changeset_comment, comment_pull_request, ' \ - 'get_pull_request_comments, comment_commit, get_repo_comments' + 'get_pull_request_comments, comment_commit, edit_comment, ' \ + 'get_comment, get_repo_comments' assert_error(id_, expected, given=response.body) def test_api_disabled_user(self, request): diff --git a/rhodecode/api/tests/test_comment_pull_request.py b/rhodecode/api/tests/test_comment_pull_request.py --- a/rhodecode/api/tests/test_comment_pull_request.py +++ b/rhodecode/api/tests/test_comment_pull_request.py @@ -21,7 +21,7 @@ import pytest from rhodecode.model.comment import CommentsModel -from rhodecode.model.db import UserLog, User +from rhodecode.model.db import UserLog, User, ChangesetComment from rhodecode.model.pull_request import PullRequestModel from rhodecode.tests import TEST_USER_ADMIN_LOGIN from rhodecode.api.tests.utils import ( @@ -218,8 +218,20 @@ class TestCommentPullRequest(object): assert_error(id_, expected, given=response.body) @pytest.mark.backends("git", "hg") - def test_api_comment_pull_request_non_admin_with_userid_error( - self, pr_util): + def test_api_comment_pull_request_non_admin_with_userid_error(self, pr_util): + pull_request = pr_util.create_pull_request() + id_, params = build_data( + self.apikey_regular, 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request.pull_request_id, + userid=TEST_USER_ADMIN_LOGIN) + response = api_call(self.app, params) + + expected = 'userid is not the same as your user' + assert_error(id_, expected, given=response.body) + + @pytest.mark.backends("git", "hg") + def test_api_comment_pull_request_non_admin_with_userid_error(self, pr_util): pull_request = pr_util.create_pull_request() id_, params = build_data( self.apikey_regular, 'comment_pull_request', @@ -244,3 +256,135 @@ class TestCommentPullRequest(object): expected = 'Invalid commit_id `XXX` for this pull request.' assert_error(id_, expected, given=response.body) + + @pytest.mark.backends("git", "hg") + def test_api_edit_comment(self, pr_util): + pull_request = pr_util.create_pull_request() + + id_, params = build_data( + self.apikey, + 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request.pull_request_id, + message='test message', + ) + response = api_call(self.app, params) + json_response = response.json + comment_id = json_response['result']['comment_id'] + + message_after_edit = 'just message' + id_, params = build_data( + self.apikey, + 'edit_comment', + comment_id=comment_id, + message=message_after_edit, + version=0, + ) + response = api_call(self.app, params) + json_response = response.json + assert json_response['result']['version'] == 1 + + text_form_db = ChangesetComment.get(comment_id).text + assert message_after_edit == text_form_db + + @pytest.mark.backends("git", "hg") + def test_api_edit_comment_wrong_version(self, pr_util): + pull_request = pr_util.create_pull_request() + + id_, params = build_data( + self.apikey, 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request.pull_request_id, + message='test message') + response = api_call(self.app, params) + json_response = response.json + comment_id = json_response['result']['comment_id'] + + message_after_edit = 'just message' + id_, params = build_data( + self.apikey_regular, + 'edit_comment', + comment_id=comment_id, + message=message_after_edit, + version=1, + ) + response = api_call(self.app, params) + expected = 'comment ({}) version ({}) mismatch'.format(comment_id, 1) + assert_error(id_, expected, given=response.body) + + @pytest.mark.backends("git", "hg") + def test_api_edit_comment_wrong_version(self, pr_util): + pull_request = pr_util.create_pull_request() + + id_, params = build_data( + self.apikey, 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request.pull_request_id, + message='test message') + response = api_call(self.app, params) + json_response = response.json + comment_id = json_response['result']['comment_id'] + + id_, params = build_data( + self.apikey, + 'edit_comment', + comment_id=comment_id, + message='', + version=0, + ) + response = api_call(self.app, params) + expected = "comment ({}) can't be changed with empty string".format(comment_id, 1) + assert_error(id_, expected, given=response.body) + + @pytest.mark.backends("git", "hg") + def test_api_edit_comment_wrong_user_set_by_non_admin(self, pr_util): + pull_request = pr_util.create_pull_request() + pull_request_id = pull_request.pull_request_id + id_, params = build_data( + self.apikey, + 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request_id, + message='test message' + ) + response = api_call(self.app, params) + json_response = response.json + comment_id = json_response['result']['comment_id'] + + id_, params = build_data( + self.apikey_regular, + 'edit_comment', + comment_id=comment_id, + message='just message', + version=0, + userid=TEST_USER_ADMIN_LOGIN + ) + response = api_call(self.app, params) + expected = 'userid is not the same as your user' + assert_error(id_, expected, given=response.body) + + @pytest.mark.backends("git", "hg") + def test_api_edit_comment_wrong_user_with_permissions_to_edit_comment(self, pr_util): + pull_request = pr_util.create_pull_request() + pull_request_id = pull_request.pull_request_id + id_, params = build_data( + self.apikey, + 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request_id, + message='test message' + ) + response = api_call(self.app, params) + json_response = response.json + comment_id = json_response['result']['comment_id'] + + id_, params = build_data( + self.apikey_regular, + 'edit_comment', + comment_id=comment_id, + message='just message', + version=0, + ) + response = api_call(self.app, params) + expected = "you don't have access to edit this comment" + assert_error(id_, expected, given=response.body) diff --git a/rhodecode/api/tests/test_create_repo_group.py b/rhodecode/api/tests/test_create_repo_group.py --- a/rhodecode/api/tests/test_create_repo_group.py +++ b/rhodecode/api/tests/test_create_repo_group.py @@ -233,8 +233,8 @@ class TestCreateRepoGroup(object): expected = { 'repo_group': - 'Parent repository group `{}` does not exist'.format( - repo_group_name)} + u"You do not have the permissions to store " + u"repository groups inside repository group `{}`".format(repo_group_name)} try: assert_error(id_, expected, given=response.body) finally: diff --git a/rhodecode/api/tests/test_get_method.py b/rhodecode/api/tests/test_get_method.py --- a/rhodecode/api/tests/test_get_method.py +++ b/rhodecode/api/tests/test_get_method.py @@ -37,8 +37,10 @@ class TestGetMethod(object): id_, params = build_data(self.apikey, 'get_method', pattern='*comment*') response = api_call(self.app, params) - expected = ['changeset_comment', 'comment_pull_request', - 'get_pull_request_comments', 'comment_commit', 'get_repo_comments'] + expected = [ + 'changeset_comment', 'comment_pull_request', 'get_pull_request_comments', + 'comment_commit', 'edit_comment', 'get_comment', 'get_repo_comments' + ] assert_ok(id_, expected, given=response.body) def test_get_methods_on_single_match(self): diff --git a/rhodecode/api/tests/test_get_pull_request_comments.py b/rhodecode/api/tests/test_get_pull_request_comments.py --- a/rhodecode/api/tests/test_get_pull_request_comments.py +++ b/rhodecode/api/tests/test_get_pull_request_comments.py @@ -61,6 +61,7 @@ class TestGetPullRequestComments(object) 'comment_type': 'note', 'comment_resolved_by': None, 'pull_request_version': None, + 'comment_last_version': 0, 'comment_commit_id': None, 'comment_pull_request_id': pull_request.pull_request_id } diff --git a/rhodecode/api/tests/test_get_repo_comments.py b/rhodecode/api/tests/test_get_repo_comments.py --- a/rhodecode/api/tests/test_get_repo_comments.py +++ b/rhodecode/api/tests/test_get_repo_comments.py @@ -42,26 +42,27 @@ def make_repo_comments_factory(request): comments = [] # general - CommentsModel().create( + comment = CommentsModel().create( text='General Comment', repo=repo, user=user, commit_id=commit_id, comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False) + comments.append(comment) # inline - CommentsModel().create( + comment = CommentsModel().create( text='Inline Comment', repo=repo, user=user, commit_id=commit_id, f_path=file_0, line_no='n1', comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False) + comments.append(comment) # todo - CommentsModel().create( + comment = CommentsModel().create( text='INLINE TODO Comment', repo=repo, user=user, commit_id=commit_id, f_path=file_0, line_no='n1', comment_type=ChangesetComment.COMMENT_TYPE_TODO, send_email=False) + comments.append(comment) - @request.addfinalizer - def cleanup(): - for comment in comments: - Session().delete(comment) + return comments + return Make() @@ -108,3 +109,34 @@ class TestGetRepo(object): id_, params = build_data(self.apikey, 'get_repo_comments', **api_call_params) response = api_call(self.app, params) assert_error(id_, expected, given=response.body) + + def test_api_get_comment(self, make_repo_comments_factory, backend_hg): + commits = [{'message': 'A'}, {'message': 'B'}] + repo = backend_hg.create_repo(commits=commits) + + comments = make_repo_comments_factory.make_comments(repo) + comment_ids = [x.comment_id for x in comments] + Session().commit() + + for comment_id in comment_ids: + id_, params = build_data(self.apikey, 'get_comment', + **{'comment_id': comment_id}) + response = api_call(self.app, params) + result = assert_call_ok(id_, given=response.body) + assert result['comment_id'] == comment_id + + def test_api_get_comment_no_access(self, make_repo_comments_factory, backend_hg, user_util): + commits = [{'message': 'A'}, {'message': 'B'}] + repo = backend_hg.create_repo(commits=commits) + comments = make_repo_comments_factory.make_comments(repo) + comment_id = comments[0].comment_id + + test_user = user_util.create_user() + user_util.grant_user_permission_to_repo(repo, test_user, 'repository.none') + + id_, params = build_data(test_user.api_key, 'get_comment', + **{'comment_id': comment_id}) + response = api_call(self.app, params) + assert_error(id_, + expected='comment `{}` does not exist'.format(comment_id), + given=response.body) diff --git a/rhodecode/api/views/pull_request_api.py b/rhodecode/api/views/pull_request_api.py --- a/rhodecode/api/views/pull_request_api.py +++ b/rhodecode/api/views/pull_request_api.py @@ -21,7 +21,6 @@ import logging -from rhodecode import events from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError from rhodecode.api.utils import ( has_superadmin_permission, Optional, OAttr, get_repo_or_error, @@ -36,8 +35,7 @@ from rhodecode.model.db import Session, from rhodecode.model.pull_request import PullRequestModel, MergeCheck from rhodecode.model.settings import SettingsModel from rhodecode.model.validation_schema import Invalid -from rhodecode.model.validation_schema.schemas.reviewer_schema import( - ReviewerListSchema) +from rhodecode.model.validation_schema.schemas.reviewer_schema import ReviewerListSchema log = logging.getLogger(__name__) @@ -292,10 +290,11 @@ def merge_pull_request( else: repo = pull_request.target_repo auth_user = apiuser + if not isinstance(userid, Optional): - if (has_superadmin_permission(apiuser) or - HasRepoPermissionAnyApi('repository.admin')( - user=apiuser, repo_name=repo.repo_name)): + is_repo_admin = HasRepoPermissionAnyApi('repository.admin')( + user=apiuser, repo_name=repo.repo_name) + if has_superadmin_permission(apiuser) or is_repo_admin: apiuser = get_user_or_error(userid) auth_user = apiuser.AuthUser() else: @@ -379,6 +378,7 @@ def get_pull_request_comments( }, "comment_text": "Example text", "comment_type": null, + "comment_last_version: 0, "pull_request_version": null, "comment_commit_id": None, "comment_pull_request_id": @@ -510,9 +510,9 @@ def comment_pull_request( auth_user = apiuser if not isinstance(userid, Optional): - if (has_superadmin_permission(apiuser) or - HasRepoPermissionAnyApi('repository.admin')( - user=apiuser, repo_name=repo.repo_name)): + is_repo_admin = HasRepoPermissionAnyApi('repository.admin')( + user=apiuser, repo_name=repo.repo_name) + if has_superadmin_permission(apiuser) or is_repo_admin: apiuser = get_user_or_error(userid) auth_user = apiuser.AuthUser() else: @@ -979,10 +979,10 @@ def close_pull_request( else: repo = pull_request.target_repo + is_repo_admin = HasRepoPermissionAnyApi('repository.admin')( + user=apiuser, repo_name=repo.repo_name) if not isinstance(userid, Optional): - if (has_superadmin_permission(apiuser) or - HasRepoPermissionAnyApi('repository.admin')( - user=apiuser, repo_name=repo.repo_name)): + if has_superadmin_permission(apiuser) or is_repo_admin: apiuser = get_user_or_error(userid) else: raise JSONRPCError('userid is not the same as your user') diff --git a/rhodecode/api/views/repo_api.py b/rhodecode/api/views/repo_api.py --- a/rhodecode/api/views/repo_api.py +++ b/rhodecode/api/views/repo_api.py @@ -31,11 +31,15 @@ from rhodecode.api.utils import ( validate_set_owner_permissions) from rhodecode.lib import audit_logger, rc_cache from rhodecode.lib import repo_maintenance -from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi +from rhodecode.lib.auth import ( + HasPermissionAnyApi, HasUserGroupPermissionAnyApi, + HasRepoPermissionAnyApi) from rhodecode.lib.celerylib.utils import get_task_id -from rhodecode.lib.utils2 import str2bool, time_to_datetime, safe_str, safe_int, safe_unicode +from rhodecode.lib.utils2 import ( + str2bool, time_to_datetime, safe_str, safe_int, safe_unicode) from rhodecode.lib.ext_json import json -from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError +from rhodecode.lib.exceptions import ( + StatusChangeOnClosedPullRequestError, CommentVersionMismatch) from rhodecode.lib.vcs import RepositoryError from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError from rhodecode.model.changeset_status import ChangesetStatusModel @@ -44,6 +48,7 @@ from rhodecode.model.db import ( Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, ChangesetComment) from rhodecode.model.permission import PermissionModel +from rhodecode.model.pull_request import PullRequestModel from rhodecode.model.repo import RepoModel from rhodecode.model.scm import ScmModel, RepoList from rhodecode.model.settings import SettingsModel, VcsSettingsModel @@ -1719,7 +1724,8 @@ def get_repo_comments(request, apiuser, "comment_resolved_by": null, "comment_status": [], "comment_text": "This file needs a header", - "comment_type": "todo" + "comment_type": "todo", + "comment_last_version: 0 } ], "error" : null @@ -1752,6 +1758,157 @@ def get_repo_comments(request, apiuser, @jsonrpc_method() +def get_comment(request, apiuser, comment_id): + """ + Get single comment from repository or pull_request + + :param apiuser: This is filled automatically from the |authtoken|. + :type apiuser: AuthUser + :param comment_id: comment id found in the URL of comment + :type comment_id: str or int + + Example error output: + + .. code-block:: bash + + { + "id" : , + "result" : { + "comment_author": , + "comment_created_on": "2017-02-01T14:38:16.309", + "comment_f_path": "file.txt", + "comment_id": 282, + "comment_lineno": "n1", + "comment_resolved_by": null, + "comment_status": [], + "comment_text": "This file needs a header", + "comment_type": "todo", + "comment_last_version: 0 + }, + "error" : null + } + + """ + + comment = ChangesetComment.get(comment_id) + if not comment: + raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) + + perms = ('repository.read', 'repository.write', 'repository.admin') + has_comment_perm = HasRepoPermissionAnyApi(*perms)\ + (user=apiuser, repo_name=comment.repo.repo_name) + + if not has_comment_perm: + raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) + + return comment + + +@jsonrpc_method() +def edit_comment(request, apiuser, message, comment_id, version, + userid=Optional(OAttr('apiuser'))): + """ + Edit comment on the pull request or commit, + specified by the `comment_id` and version. Initially version should be 0 + + :param apiuser: This is filled automatically from the |authtoken|. + :type apiuser: AuthUser + :param comment_id: Specify the comment_id for editing + :type comment_id: int + :param version: version of the comment that will be created, starts from 0 + :type version: int + :param message: The text content of the comment. + :type message: str + :param userid: Comment on the pull request as this user + :type userid: Optional(str or int) + + Example output: + + .. code-block:: bash + + id : + result : { + "comment": "", + "version": "", + }, + error : null + """ + + auth_user = apiuser + comment = ChangesetComment.get(comment_id) + if not comment: + raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) + + is_super_admin = has_superadmin_permission(apiuser) + is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ + (user=apiuser, repo_name=comment.repo.repo_name) + + if not isinstance(userid, Optional): + if is_super_admin or is_repo_admin: + apiuser = get_user_or_error(userid) + auth_user = apiuser.AuthUser() + else: + raise JSONRPCError('userid is not the same as your user') + + comment_author = comment.author.user_id == auth_user.user_id + if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): + raise JSONRPCError("you don't have access to edit this comment") + + try: + comment_history = CommentsModel().edit( + comment_id=comment_id, + text=message, + auth_user=auth_user, + version=version, + ) + Session().commit() + except CommentVersionMismatch: + raise JSONRPCError( + 'comment ({}) version ({}) mismatch'.format(comment_id, version) + ) + if not comment_history and not message: + raise JSONRPCError( + "comment ({}) can't be changed with empty string".format(comment_id) + ) + + if comment.pull_request: + pull_request = comment.pull_request + PullRequestModel().trigger_pull_request_hook( + pull_request, apiuser, 'comment_edit', + data={'comment': comment}) + else: + db_repo = comment.repo + commit_id = comment.revision + commit = db_repo.get_commit(commit_id) + CommentsModel().trigger_commit_comment_hook( + db_repo, apiuser, 'edit', + data={'comment': comment, 'commit': commit}) + + data = { + 'comment': comment, + 'version': comment_history.version if comment_history else None, + } + return data + + +# TODO(marcink): write this with all required logic for deleting a comments in PR or commits +# @jsonrpc_method() +# def delete_comment(request, apiuser, comment_id): +# auth_user = apiuser +# +# comment = ChangesetComment.get(comment_id) +# if not comment: +# raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) +# +# is_super_admin = has_superadmin_permission(apiuser) +# is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ +# (user=apiuser, repo_name=comment.repo.repo_name) +# +# comment_author = comment.author.user_id == auth_user.user_id +# if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): +# raise JSONRPCError("you don't have access to edit this comment") + +@jsonrpc_method() def grant_user_permission(request, apiuser, repoid, userid, perm): """ Grant permissions for the specified user on the given repository, diff --git a/rhodecode/apps/admin/views/repo_groups.py b/rhodecode/apps/admin/views/repo_groups.py --- a/rhodecode/apps/admin/views/repo_groups.py +++ b/rhodecode/apps/admin/views/repo_groups.py @@ -69,6 +69,7 @@ class AdminRepoGroupsView(BaseAppView, D c.repo_groups = RepoGroup.groups_choices( groups=groups_with_admin_rights, show_empty_group=allow_empty_group) + c.personal_repo_group = self._rhodecode_user.personal_repo_group def _can_create_repo_group(self, parent_group_id=None): is_admin = HasPermissionAny('hg.admin')('group create controller') @@ -261,15 +262,28 @@ class AdminRepoGroupsView(BaseAppView, D # perm check for admin, create_group perm or admin of parent_group parent_group_id = safe_int(self.request.GET.get('parent_group')) + _gr = RepoGroup.get(parent_group_id) if not self._can_create_repo_group(parent_group_id): raise HTTPForbidden() self._load_form_data(c) defaults = {} # Future proof for default of repo group + + parent_group_choice = '-1' + if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group: + parent_group_choice = self._rhodecode_user.personal_repo_group + + if parent_group_id and _gr: + if parent_group_id in [x[0] for x in c.repo_groups]: + parent_group_choice = safe_unicode(parent_group_id) + + defaults.update({'group_parent_id': parent_group_choice}) + data = render( 'rhodecode:templates/admin/repo_groups/repo_group_add.mako', self._get_template_context(c), self.request) + html = formencode.htmlfill.render( data, defaults=defaults, diff --git a/rhodecode/apps/admin/views/repositories.py b/rhodecode/apps/admin/views/repositories.py --- a/rhodecode/apps/admin/views/repositories.py +++ b/rhodecode/apps/admin/views/repositories.py @@ -169,8 +169,8 @@ class AdminReposView(BaseAppView, DataGr c = self.load_default_context() new_repo = self.request.GET.get('repo', '') - parent_group = safe_int(self.request.GET.get('parent_group')) - _gr = RepoGroup.get(parent_group) + parent_group_id = safe_int(self.request.GET.get('parent_group')) + _gr = RepoGroup.get(parent_group_id) if not HasPermissionAny('hg.admin', 'hg.create.repository')(): # you're not super admin nor have global create permissions, @@ -196,9 +196,9 @@ class AdminReposView(BaseAppView, DataGr if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group: parent_group_choice = self._rhodecode_user.personal_repo_group - if parent_group and _gr: - if parent_group in [x[0] for x in c.repo_groups]: - parent_group_choice = safe_unicode(parent_group) + if parent_group_id and _gr: + if parent_group_id in [x[0] for x in c.repo_groups]: + parent_group_choice = safe_unicode(parent_group_id) defaults.update({'repo_group': parent_group_choice}) diff --git a/rhodecode/apps/admin/views/settings.py b/rhodecode/apps/admin/views/settings.py --- a/rhodecode/apps/admin/views/settings.py +++ b/rhodecode/apps/admin/views/settings.py @@ -47,6 +47,7 @@ from rhodecode.model.db import RhodeCode from rhodecode.model.forms import (ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm, LabsSettingsForm, IssueTrackerPatternsForm) +from rhodecode.model.permission import PermissionModel from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.scm import ScmModel @@ -253,8 +254,7 @@ class AdminSettingsView(BaseAppView): c.active = 'mapping' rm_obsolete = self.request.POST.get('destroy', False) invalidate_cache = self.request.POST.get('invalidate', False) - log.debug( - 'rescanning repo location with destroy obsolete=%s', rm_obsolete) + log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete) if invalidate_cache: log.debug('invalidating all repositories cache') @@ -263,6 +263,8 @@ class AdminSettingsView(BaseAppView): filesystem_repos = ScmModel().repo_scan() added, removed = repo2db_mapper(filesystem_repos, rm_obsolete) + PermissionModel().trigger_permission_flush() + _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-' h.flash(_('Repositories successfully ' 'rescanned added: %s ; removed: %s') % @@ -576,8 +578,7 @@ class AdminSettingsView(BaseAppView): 'user': self._rhodecode_db_user } - (subject, headers, email_body, - email_body_plaintext) = EmailNotificationModel().render_email( + (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs) recipients = [test_email] if test_email else None diff --git a/rhodecode/apps/debug_style/views.py b/rhodecode/apps/debug_style/views.py --- a/rhodecode/apps/debug_style/views.py +++ b/rhodecode/apps/debug_style/views.py @@ -376,8 +376,7 @@ users: description edit fixes } template_type = email_id.split('+')[0] - (c.subject, c.headers, c.email_body, - c.email_body_plaintext) = EmailNotificationModel().render_email( + (c.subject, c.email_body, c.email_body_plaintext) = EmailNotificationModel().render_email( template_type, **email_kwargs.get(email_id, {})) test_email = self.request.GET.get('email') diff --git a/rhodecode/apps/gist/tests/test_admin_gists.py b/rhodecode/apps/gist/tests/test_admin_gists.py --- a/rhodecode/apps/gist/tests/test_admin_gists.py +++ b/rhodecode/apps/gist/tests/test_admin_gists.py @@ -302,7 +302,7 @@ class TestGistsController(TestController assert_response = response.assert_response() assert_response.element_equals_to( 'div.rc-user span.user', - 'test_admin') + 'test_admin') response.mustcontain('gist-desc') @@ -328,7 +328,7 @@ class TestGistsController(TestController assert_response = response.assert_response() assert_response.element_equals_to( 'div.rc-user span.user', - 'test_admin') + 'test_admin') response.mustcontain('gist-desc') def test_show_as_raw(self, create_gist): diff --git a/rhodecode/apps/repository/__init__.py b/rhodecode/apps/repository/__init__.py --- a/rhodecode/apps/repository/__init__.py +++ b/rhodecode/apps/repository/__init__.py @@ -79,6 +79,10 @@ def includeme(config): pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) config.add_route( + name='repo_commit_comment_history_view', + pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True) + + config.add_route( name='repo_commit_comment_attachment_upload', pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True) @@ -86,6 +90,10 @@ def includeme(config): name='repo_commit_comment_delete', pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) + config.add_route( + name='repo_commit_comment_edit', + pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True) + # still working url for backward compat. config.add_route( name='repo_commit_raw_deprecated', @@ -328,6 +336,11 @@ def includeme(config): repo_route=True) config.add_route( + name='pullrequest_comment_edit', + pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit', + repo_route=True, repo_accepted_types=['hg', 'git']) + + config.add_route( name='pullrequest_comment_delete', pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', repo_route=True, repo_accepted_types=['hg', 'git']) diff --git a/rhodecode/apps/repository/tests/test_repo_commit_comments.py b/rhodecode/apps/repository/tests/test_repo_commit_comments.py --- a/rhodecode/apps/repository/tests/test_repo_commit_comments.py +++ b/rhodecode/apps/repository/tests/test_repo_commit_comments.py @@ -35,6 +35,7 @@ def route_path(name, params=None, **kwar 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', + 'repo_commit_comment_edit': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit', }[name].format(**kwargs) if params: @@ -268,6 +269,164 @@ class TestRepoCommitCommentsView(TestCon repo_name=backend.repo_name, commit_id=commit_id)) assert_comment_links(response, 0, 0) + def test_edit(self, backend): + self.log_user() + commit_id = backend.repo.get_commit('300').raw_id + text = u'CommentOnCommit' + + params = {'text': text, 'csrf_token': self.csrf_token} + self.app.post( + route_path( + 'repo_commit_comment_create', + repo_name=backend.repo_name, commit_id=commit_id), + params=params) + + comments = ChangesetComment.query().all() + assert len(comments) == 1 + comment_id = comments[0].comment_id + test_text = 'test_text' + self.app.post( + route_path( + 'repo_commit_comment_edit', + repo_name=backend.repo_name, + commit_id=commit_id, + comment_id=comment_id, + ), + params={ + 'csrf_token': self.csrf_token, + 'text': test_text, + 'version': '0', + }) + + text_form_db = ChangesetComment.query().filter( + ChangesetComment.comment_id == comment_id).first().text + assert test_text == text_form_db + + def test_edit_without_change(self, backend): + self.log_user() + commit_id = backend.repo.get_commit('300').raw_id + text = u'CommentOnCommit' + + params = {'text': text, 'csrf_token': self.csrf_token} + self.app.post( + route_path( + 'repo_commit_comment_create', + repo_name=backend.repo_name, commit_id=commit_id), + params=params) + + comments = ChangesetComment.query().all() + assert len(comments) == 1 + comment_id = comments[0].comment_id + + response = self.app.post( + route_path( + 'repo_commit_comment_edit', + repo_name=backend.repo_name, + commit_id=commit_id, + comment_id=comment_id, + ), + params={ + 'csrf_token': self.csrf_token, + 'text': text, + 'version': '0', + }, + status=404, + ) + assert response.status_int == 404 + + def test_edit_try_edit_already_edited(self, backend): + self.log_user() + commit_id = backend.repo.get_commit('300').raw_id + text = u'CommentOnCommit' + + params = {'text': text, 'csrf_token': self.csrf_token} + self.app.post( + route_path( + 'repo_commit_comment_create', + repo_name=backend.repo_name, commit_id=commit_id + ), + params=params, + ) + + comments = ChangesetComment.query().all() + assert len(comments) == 1 + comment_id = comments[0].comment_id + test_text = 'test_text' + self.app.post( + route_path( + 'repo_commit_comment_edit', + repo_name=backend.repo_name, + commit_id=commit_id, + comment_id=comment_id, + ), + params={ + 'csrf_token': self.csrf_token, + 'text': test_text, + 'version': '0', + } + ) + test_text_v2 = 'test_v2' + response = self.app.post( + route_path( + 'repo_commit_comment_edit', + repo_name=backend.repo_name, + commit_id=commit_id, + comment_id=comment_id, + ), + params={ + 'csrf_token': self.csrf_token, + 'text': test_text_v2, + 'version': '0', + }, + status=409, + ) + assert response.status_int == 409 + + text_form_db = ChangesetComment.query().filter( + ChangesetComment.comment_id == comment_id).first().text + + assert test_text == text_form_db + assert test_text_v2 != text_form_db + + def test_edit_forbidden_for_immutable_comments(self, backend): + self.log_user() + commit_id = backend.repo.get_commit('300').raw_id + text = u'CommentOnCommit' + + params = {'text': text, 'csrf_token': self.csrf_token, 'version': '0'} + self.app.post( + route_path( + 'repo_commit_comment_create', + repo_name=backend.repo_name, + commit_id=commit_id, + ), + params=params + ) + + comments = ChangesetComment.query().all() + assert len(comments) == 1 + comment_id = comments[0].comment_id + + comment = ChangesetComment.get(comment_id) + comment.immutable_state = ChangesetComment.OP_IMMUTABLE + Session().add(comment) + Session().commit() + + response = self.app.post( + route_path( + 'repo_commit_comment_edit', + repo_name=backend.repo_name, + commit_id=commit_id, + comment_id=comment_id, + ), + params={ + 'csrf_token': self.csrf_token, + 'text': 'test_text', + }, + status=403, + ) + assert response.status_int == 403 + def test_delete_forbidden_for_immutable_comments(self, backend): self.log_user() commit_id = backend.repo.get_commit('300').raw_id diff --git a/rhodecode/apps/repository/tests/test_repo_pullrequests.py b/rhodecode/apps/repository/tests/test_repo_pullrequests.py --- a/rhodecode/apps/repository/tests/test_repo_pullrequests.py +++ b/rhodecode/apps/repository/tests/test_repo_pullrequests.py @@ -30,6 +30,7 @@ from rhodecode.model.db import ( from rhodecode.model.meta import Session from rhodecode.model.pull_request import PullRequestModel from rhodecode.model.user import UserModel +from rhodecode.model.comment import CommentsModel from rhodecode.tests import ( assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) @@ -54,6 +55,7 @@ def route_path(name, params=None, **kwar 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', + 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit', }[name].format(**kwargs) if params: @@ -114,6 +116,223 @@ class TestPullrequestsView(object): if range_diff == "1": response.mustcontain('Turn off: Show the diff as commit range') + def test_show_versions_of_pr(self, backend, csrf_token): + commits = [ + {'message': 'initial-commit', + 'added': [FileNode('test-file.txt', 'LINE1\n')]}, + + {'message': 'commit-1', + 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]}, + # Above is the initial version of PR that changes a single line + + # from now on we'll add 3x commit adding a nother line on each step + {'message': 'commit-2', + 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]}, + + {'message': 'commit-3', + 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]}, + + {'message': 'commit-4', + 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]}, + ] + + commit_ids = backend.create_master_repo(commits) + target = backend.create_repo(heads=['initial-commit']) + source = backend.create_repo(heads=['commit-1']) + source_repo_name = source.repo_name + target_repo_name = target.repo_name + + target_ref = 'branch:{branch}:{commit_id}'.format( + branch=backend.default_branch_name, commit_id=commit_ids['initial-commit']) + source_ref = 'branch:{branch}:{commit_id}'.format( + branch=backend.default_branch_name, commit_id=commit_ids['commit-1']) + + response = self.app.post( + route_path('pullrequest_create', repo_name=source.repo_name), + [ + ('source_repo', source.repo_name), + ('source_ref', source_ref), + ('target_repo', target.repo_name), + ('target_ref', target_ref), + ('common_ancestor', commit_ids['initial-commit']), + ('pullrequest_title', 'Title'), + ('pullrequest_desc', 'Description'), + ('description_renderer', 'markdown'), + ('__start__', 'review_members:sequence'), + ('__start__', 'reviewer:mapping'), + ('user_id', '1'), + ('__start__', 'reasons:sequence'), + ('reason', 'Some reason'), + ('__end__', 'reasons:sequence'), + ('__start__', 'rules:sequence'), + ('__end__', 'rules:sequence'), + ('mandatory', 'False'), + ('__end__', 'reviewer:mapping'), + ('__end__', 'review_members:sequence'), + ('__start__', 'revisions:sequence'), + ('revisions', commit_ids['commit-1']), + ('__end__', 'revisions:sequence'), + ('user', ''), + ('csrf_token', csrf_token), + ], + status=302) + + location = response.headers['Location'] + + pull_request_id = location.rsplit('/', 1)[1] + assert pull_request_id != 'new' + pull_request = PullRequest.get(int(pull_request_id)) + + pull_request_id = pull_request.pull_request_id + + # Show initial version of PR + response = self.app.get( + route_path('pullrequest_show', + repo_name=target_repo_name, + pull_request_id=pull_request_id)) + + response.mustcontain('commit-1') + response.mustcontain(no=['commit-2']) + response.mustcontain(no=['commit-3']) + response.mustcontain(no=['commit-4']) + + response.mustcontain('cb-addition">LINE2') + response.mustcontain(no=['LINE3']) + response.mustcontain(no=['LINE4']) + response.mustcontain(no=['LINE5']) + + # update PR #1 + source_repo = Repository.get_by_repo_name(source_repo_name) + backend.pull_heads(source_repo, heads=['commit-2']) + response = self.app.post( + route_path('pullrequest_update', + repo_name=target_repo_name, pull_request_id=pull_request_id), + params={'update_commits': 'true', 'csrf_token': csrf_token}) + + # update PR #2 + source_repo = Repository.get_by_repo_name(source_repo_name) + backend.pull_heads(source_repo, heads=['commit-3']) + response = self.app.post( + route_path('pullrequest_update', + repo_name=target_repo_name, pull_request_id=pull_request_id), + params={'update_commits': 'true', 'csrf_token': csrf_token}) + + # update PR #3 + source_repo = Repository.get_by_repo_name(source_repo_name) + backend.pull_heads(source_repo, heads=['commit-4']) + response = self.app.post( + route_path('pullrequest_update', + repo_name=target_repo_name, pull_request_id=pull_request_id), + params={'update_commits': 'true', 'csrf_token': csrf_token}) + + # Show final version ! + response = self.app.get( + route_path('pullrequest_show', + repo_name=target_repo_name, + pull_request_id=pull_request_id)) + + # 3 updates, and the latest == 4 + response.mustcontain('4 versions available for this pull request') + response.mustcontain(no=['rhodecode diff rendering error']) + + # initial show must have 3 commits, and 3 adds + response.mustcontain('commit-1') + response.mustcontain('commit-2') + response.mustcontain('commit-3') + response.mustcontain('commit-4') + + response.mustcontain('cb-addition">LINE2') + response.mustcontain('cb-addition">LINE3') + response.mustcontain('cb-addition">LINE4') + response.mustcontain('cb-addition">LINE5') + + # fetch versions + pr = PullRequest.get(pull_request_id) + versions = [x.pull_request_version_id for x in pr.versions.all()] + assert len(versions) == 3 + + # show v1,v2,v3,v4 + def cb_line(text): + return 'cb-addition">{}'.format(text) + + def cb_context(text): + return '' \ + '{}'.format(text) + + commit_tests = { + # in response, not in response + 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']), + 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']), + 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']), + 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []), + } + diff_tests = { + 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']), + 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']), + 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']), + 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []), + } + for idx, ver in enumerate(versions, 1): + + response = self.app.get( + route_path('pullrequest_show', + repo_name=target_repo_name, + pull_request_id=pull_request_id, + params={'version': ver})) + + response.mustcontain(no=['rhodecode diff rendering error']) + response.mustcontain('Showing changes at v{}'.format(idx)) + + yes, no = commit_tests[idx] + for y in yes: + response.mustcontain(y) + for n in no: + response.mustcontain(no=n) + + yes, no = diff_tests[idx] + for y in yes: + response.mustcontain(cb_line(y)) + for n in no: + response.mustcontain(no=n) + + # show diff between versions + diff_compare_tests = { + 1: (['LINE3'], ['LINE1', 'LINE2']), + 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']), + 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']), + } + for idx, ver in enumerate(versions, 1): + adds, context = diff_compare_tests[idx] + + to_ver = ver+1 + if idx == 3: + to_ver = 'latest' + + response = self.app.get( + route_path('pullrequest_show', + repo_name=target_repo_name, + pull_request_id=pull_request_id, + params={'from_version': versions[0], 'version': to_ver})) + + response.mustcontain(no=['rhodecode diff rendering error']) + + for a in adds: + response.mustcontain(cb_line(a)) + for c in context: + response.mustcontain(cb_context(c)) + + # test version v2 -> v3 + response = self.app.get( + route_path('pullrequest_show', + repo_name=target_repo_name, + pull_request_id=pull_request_id, + params={'from_version': versions[1], 'version': versions[2]})) + + response.mustcontain(cb_context('LINE1')) + response.mustcontain(cb_context('LINE2')) + response.mustcontain(cb_context('LINE3')) + response.mustcontain(cb_line('LINE4')) + def test_close_status_visibility(self, pr_util, user_util, csrf_token): # Logout response = self.app.post( @@ -338,8 +557,8 @@ class TestPullrequestsView(object): response = self.app.post( route_path('pullrequest_comment_create', - repo_name=pull_request.target_repo.scm_instance().name, - pull_request_id=pull_request.pull_request_id), + repo_name=pull_request.target_repo.scm_instance().name, + pull_request_id=pull_request.pull_request_id), params={ 'close_pull_request': 'true', 'csrf_token': csrf_token}, @@ -355,6 +574,222 @@ class TestPullrequestsView(object): pull_request.source_repo, pull_request=pull_request) assert status == ChangesetStatus.STATUS_REJECTED + def test_comment_and_close_pull_request_try_edit_comment( + self, pr_util, csrf_token, xhr_header + ): + pull_request = pr_util.create_pull_request() + pull_request_id = pull_request.pull_request_id + target_scm = pull_request.target_repo.scm_instance() + target_scm_name = target_scm.name + + response = self.app.post( + route_path( + 'pullrequest_comment_create', + repo_name=target_scm_name, + pull_request_id=pull_request_id, + ), + params={ + 'close_pull_request': 'true', + 'csrf_token': csrf_token, + }, + extra_environ=xhr_header) + + assert response.json + + pull_request = PullRequest.get(pull_request_id) + target_scm = pull_request.target_repo.scm_instance() + target_scm_name = target_scm.name + assert pull_request.is_closed() + + # check only the latest status, not the review status + status = ChangesetStatusModel().get_status( + pull_request.source_repo, pull_request=pull_request) + assert status == ChangesetStatus.STATUS_REJECTED + + comment_id = response.json.get('comment_id', None) + test_text = 'test' + response = self.app.post( + route_path( + 'pullrequest_comment_edit', + repo_name=target_scm_name, + pull_request_id=pull_request_id, + comment_id=comment_id, + ), + extra_environ=xhr_header, + params={ + 'csrf_token': csrf_token, + 'text': test_text, + }, + status=403, + ) + assert response.status_int == 403 + + def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header): + pull_request = pr_util.create_pull_request() + target_scm = pull_request.target_repo.scm_instance() + target_scm_name = target_scm.name + + response = self.app.post( + route_path( + 'pullrequest_comment_create', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id), + params={ + 'csrf_token': csrf_token, + 'text': 'init', + }, + extra_environ=xhr_header, + ) + assert response.json + + comment_id = response.json.get('comment_id', None) + assert comment_id + test_text = 'test' + self.app.post( + route_path( + 'pullrequest_comment_edit', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id, + comment_id=comment_id, + ), + extra_environ=xhr_header, + params={ + 'csrf_token': csrf_token, + 'text': test_text, + 'version': '0', + }, + + ) + text_form_db = ChangesetComment.query().filter( + ChangesetComment.comment_id == comment_id).first().text + assert test_text == text_form_db + + def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header): + pull_request = pr_util.create_pull_request() + target_scm = pull_request.target_repo.scm_instance() + target_scm_name = target_scm.name + + response = self.app.post( + route_path( + 'pullrequest_comment_create', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id), + params={ + 'csrf_token': csrf_token, + 'text': 'init', + }, + extra_environ=xhr_header, + ) + assert response.json + + comment_id = response.json.get('comment_id', None) + assert comment_id + test_text = 'init' + response = self.app.post( + route_path( + 'pullrequest_comment_edit', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id, + comment_id=comment_id, + ), + extra_environ=xhr_header, + params={ + 'csrf_token': csrf_token, + 'text': test_text, + 'version': '0', + }, + status=404, + + ) + assert response.status_int == 404 + + def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header): + pull_request = pr_util.create_pull_request() + target_scm = pull_request.target_repo.scm_instance() + target_scm_name = target_scm.name + + response = self.app.post( + route_path( + 'pullrequest_comment_create', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id), + params={ + 'csrf_token': csrf_token, + 'text': 'init', + }, + extra_environ=xhr_header, + ) + assert response.json + comment_id = response.json.get('comment_id', None) + assert comment_id + + test_text = 'test' + self.app.post( + route_path( + 'pullrequest_comment_edit', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id, + comment_id=comment_id, + ), + extra_environ=xhr_header, + params={ + 'csrf_token': csrf_token, + 'text': test_text, + 'version': '0', + }, + + ) + test_text_v2 = 'test_v2' + response = self.app.post( + route_path( + 'pullrequest_comment_edit', + repo_name=target_scm_name, + pull_request_id=pull_request.pull_request_id, + comment_id=comment_id, + ), + extra_environ=xhr_header, + params={ + 'csrf_token': csrf_token, + 'text': test_text_v2, + 'version': '0', + }, + status=409, + ) + assert response.status_int == 409 + + text_form_db = ChangesetComment.query().filter( + ChangesetComment.comment_id == comment_id).first().text + + assert test_text == text_form_db + assert test_text_v2 != text_form_db + + def test_comment_and_comment_edit_permissions_forbidden( + self, autologin_regular_user, user_regular, user_admin, pr_util, + csrf_token, xhr_header): + pull_request = pr_util.create_pull_request( + author=user_admin.username, enable_notifications=False) + comment = CommentsModel().create( + text='test', + repo=pull_request.target_repo.scm_instance().name, + user=user_admin, + pull_request=pull_request, + ) + response = self.app.post( + route_path( + 'pullrequest_comment_edit', + repo_name=pull_request.target_repo.scm_instance().name, + pull_request_id=pull_request.pull_request_id, + comment_id=comment.comment_id, + ), + extra_environ=xhr_header, + params={ + 'csrf_token': csrf_token, + 'text': 'test_text', + }, + status=403, + ) + assert response.status_int == 403 + def test_create_pull_request(self, backend, csrf_token): commits = [ {'message': 'ancestor'}, diff --git a/rhodecode/apps/repository/views/repo_commits.py b/rhodecode/apps/repository/views/repo_commits.py --- a/rhodecode/apps/repository/views/repo_commits.py +++ b/rhodecode/apps/repository/views/repo_commits.py @@ -20,9 +20,9 @@ import logging -import collections -from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden +from pyramid.httpexceptions import ( + HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict) from pyramid.view import view_config from pyramid.renderers import render from pyramid.response import Response @@ -39,13 +39,14 @@ from rhodecode.lib.compat import Ordered from rhodecode.lib.diffs import ( cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, get_diff_whitespace_flag) -from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError +from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch import rhodecode.lib.helpers as h from rhodecode.lib.utils2 import safe_unicode, str2bool from rhodecode.lib.vcs.backends.base import EmptyCommit from rhodecode.lib.vcs.exceptions import ( RepositoryError, CommitDoesNotExistError) -from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore +from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \ + ChangesetCommentHistory from rhodecode.model.changeset_status import ChangesetStatusModel from rhodecode.model.comment import CommentsModel from rhodecode.model.meta import Session @@ -431,6 +432,34 @@ class RepoCommitsView(RepoAppView): 'repository.read', 'repository.write', 'repository.admin') @CSRFRequired() @view_config( + route_name='repo_commit_comment_history_view', request_method='POST', + renderer='string', xhr=True) + def repo_commit_comment_history_view(self): + c = self.load_default_context() + + comment_history_id = self.request.matchdict['comment_history_id'] + comment_history = ChangesetCommentHistory.get_or_404(comment_history_id) + is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id + + if is_repo_comment: + c.comment_history = comment_history + + rendered_comment = render( + 'rhodecode:templates/changeset/comment_history.mako', + self._get_template_context(c) + , self.request) + return rendered_comment + else: + log.warning('No permissions for user %s to show comment_history_id: %s', + self._rhodecode_db_user, comment_history_id) + raise HTTPNotFound() + + @LoginRequired() + @NotAnonymous() + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') + @CSRFRequired() + @view_config( route_name='repo_commit_comment_attachment_upload', request_method='POST', renderer='json_ext', xhr=True) def repo_commit_comment_attachment_upload(self): @@ -545,7 +574,7 @@ class RepoCommitsView(RepoAppView): is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) super_admin = h.HasPermissionAny('hg.admin')() comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) - is_repo_comment = comment.repo.repo_name == self.db_repo_name + is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id comment_repo_admin = is_repo_admin and is_repo_comment if super_admin or comment_owner or comment_repo_admin: @@ -558,6 +587,90 @@ class RepoCommitsView(RepoAppView): raise HTTPNotFound() @LoginRequired() + @NotAnonymous() + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') + @CSRFRequired() + @view_config( + route_name='repo_commit_comment_edit', request_method='POST', + renderer='json_ext') + def repo_commit_comment_edit(self): + self.load_default_context() + + comment_id = self.request.matchdict['comment_id'] + comment = ChangesetComment.get_or_404(comment_id) + + if comment.immutable: + # don't allow deleting comments that are immutable + raise HTTPForbidden() + + is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) + super_admin = h.HasPermissionAny('hg.admin')() + comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) + is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id + comment_repo_admin = is_repo_admin and is_repo_comment + + if super_admin or comment_owner or comment_repo_admin: + text = self.request.POST.get('text') + version = self.request.POST.get('version') + if text == comment.text: + log.warning( + 'Comment(repo): ' + 'Trying to create new version ' + 'with the same comment body {}'.format( + comment_id, + ) + ) + raise HTTPNotFound() + + if version.isdigit(): + version = int(version) + else: + log.warning( + 'Comment(repo): Wrong version type {} {} ' + 'for comment {}'.format( + version, + type(version), + comment_id, + ) + ) + raise HTTPNotFound() + + try: + comment_history = CommentsModel().edit( + comment_id=comment_id, + text=text, + auth_user=self._rhodecode_user, + version=version, + ) + except CommentVersionMismatch: + raise HTTPConflict() + + if not comment_history: + raise HTTPNotFound() + + commit_id = self.request.matchdict['commit_id'] + commit = self.db_repo.get_commit(commit_id) + CommentsModel().trigger_commit_comment_hook( + self.db_repo, self._rhodecode_user, 'edit', + data={'comment': comment, 'commit': commit}) + + Session().commit() + return { + 'comment_history_id': comment_history.comment_history_id, + 'comment_id': comment.comment_id, + 'comment_version': comment_history.version, + 'comment_author_username': comment_history.author.username, + 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), + 'comment_created_on': h.age_component(comment_history.created_on, + time_is_local=True), + } + else: + log.warning('No permissions for user %s to edit comment_id: %s', + self._rhodecode_db_user, comment_id) + raise HTTPNotFound() + + @LoginRequired() @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') @view_config( diff --git a/rhodecode/apps/repository/views/repo_files.py b/rhodecode/apps/repository/views/repo_files.py --- a/rhodecode/apps/repository/views/repo_files.py +++ b/rhodecode/apps/repository/views/repo_files.py @@ -125,7 +125,7 @@ class RepoFilesView(RepoAppView): self.db_repo_name, branch_name) if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']: message = _('Branch `{}` changes forbidden by rule {}.').format( - h.escape(branch_name), rule) + h.escape(branch_name), h.escape(rule)) h.flash(message, 'warning') if json_mode: diff --git a/rhodecode/apps/repository/views/repo_pull_requests.py b/rhodecode/apps/repository/views/repo_pull_requests.py --- a/rhodecode/apps/repository/views/repo_pull_requests.py +++ b/rhodecode/apps/repository/views/repo_pull_requests.py @@ -25,7 +25,7 @@ import formencode import formencode.htmlfill import peppercorn from pyramid.httpexceptions import ( - HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest) + HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict) from pyramid.view import view_config from pyramid.renderers import render @@ -34,6 +34,7 @@ from rhodecode.apps._base import RepoApp from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream from rhodecode.lib.base import vcs_operation_context from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist +from rhodecode.lib.exceptions import CommentVersionMismatch from rhodecode.lib.ext_json import json from rhodecode.lib.auth import ( LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, @@ -213,9 +214,12 @@ class RepoPullRequestsView(RepoAppView, ancestor_commit, source_ref_id, target_ref_id, target_commit, source_commit, diff_limit, file_limit, - fulldiff, hide_whitespace_changes, diff_context): + fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True): - target_ref_id = ancestor_commit.raw_id + if use_ancestor: + # we might want to not use it for versions + target_ref_id = ancestor_commit.raw_id + vcs_diff = PullRequestModel().get_diff( source_repo, source_ref_id, target_ref_id, hide_whitespace_changes, diff_context) @@ -568,7 +572,6 @@ class RepoPullRequestsView(RepoAppView, c.commit_ranges.append(comm) c.missing_requirements = missing_requirements - c.ancestor_commit = ancestor_commit c.statuses = source_repo.statuses( [x.raw_id for x in c.commit_ranges]) @@ -593,6 +596,10 @@ class RepoPullRequestsView(RepoAppView, else: c.inline_comments = display_inline_comments + use_ancestor = True + if from_version_normalized != version_normalized: + use_ancestor = False + has_proper_diff_cache = cached_diff and cached_diff.get('commits') if not force_recache and has_proper_diff_cache: c.diffset = cached_diff['diff'] @@ -604,7 +611,10 @@ class RepoPullRequestsView(RepoAppView, source_ref_id, target_ref_id, target_commit, source_commit, diff_limit, file_limit, c.fulldiff, - hide_whitespace_changes, diff_context) + hide_whitespace_changes, diff_context, + use_ancestor=use_ancestor + ) + # save cached diff if caching_enabled: cache_diff(cache_file_path, c.diffset, diff_commit_cache) @@ -1524,3 +1534,104 @@ class RepoPullRequestsView(RepoAppView, log.warning('No permissions for user %s to delete comment_id: %s', self._rhodecode_db_user, comment_id) raise HTTPNotFound() + + @LoginRequired() + @NotAnonymous() + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') + @CSRFRequired() + @view_config( + route_name='pullrequest_comment_edit', request_method='POST', + renderer='json_ext') + def pull_request_comment_edit(self): + self.load_default_context() + + pull_request = PullRequest.get_or_404( + self.request.matchdict['pull_request_id'] + ) + comment = ChangesetComment.get_or_404( + self.request.matchdict['comment_id'] + ) + comment_id = comment.comment_id + + if comment.immutable: + # don't allow deleting comments that are immutable + raise HTTPForbidden() + + if pull_request.is_closed(): + log.debug('comment: forbidden because pull request is closed') + raise HTTPForbidden() + + if not comment: + log.debug('Comment with id:%s not found, skipping', comment_id) + # comment already deleted in another call probably + return True + + if comment.pull_request.is_closed(): + # don't allow deleting comments on closed pull request + raise HTTPForbidden() + + is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) + super_admin = h.HasPermissionAny('hg.admin')() + comment_owner = comment.author.user_id == self._rhodecode_user.user_id + is_repo_comment = comment.repo.repo_name == self.db_repo_name + comment_repo_admin = is_repo_admin and is_repo_comment + + if super_admin or comment_owner or comment_repo_admin: + text = self.request.POST.get('text') + version = self.request.POST.get('version') + if text == comment.text: + log.warning( + 'Comment(PR): ' + 'Trying to create new version ' + 'with the same comment body {}'.format( + comment_id, + ) + ) + raise HTTPNotFound() + + if version.isdigit(): + version = int(version) + else: + log.warning( + 'Comment(PR): Wrong version type {} {} ' + 'for comment {}'.format( + version, + type(version), + comment_id, + ) + ) + raise HTTPNotFound() + + try: + comment_history = CommentsModel().edit( + comment_id=comment_id, + text=text, + auth_user=self._rhodecode_user, + version=version, + ) + except CommentVersionMismatch: + raise HTTPConflict() + + if not comment_history: + raise HTTPNotFound() + + Session().commit() + + PullRequestModel().trigger_pull_request_hook( + pull_request, self._rhodecode_user, 'comment_edit', + data={'comment': comment}) + + return { + 'comment_history_id': comment_history.comment_history_id, + 'comment_id': comment.comment_id, + 'comment_version': comment_history.version, + 'comment_author_username': comment_history.author.username, + 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), + 'comment_created_on': h.age_component(comment_history.created_on, + time_is_local=True), + } + else: + log.warning('No permissions for user %s to edit comment_id: %s', + self._rhodecode_db_user, comment_id) + raise HTTPNotFound() diff --git a/rhodecode/authentication/base.py b/rhodecode/authentication/base.py --- a/rhodecode/authentication/base.py +++ b/rhodecode/authentication/base.py @@ -743,7 +743,7 @@ def authenticate(username, password, env log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)', plugin.get_id(), plugin_cache_active, cache_ttl) - user_id = user.user_id if user else None + user_id = user.user_id if user else 'no-user' # don't cache for empty users plugin_cache_active = plugin_cache_active and user_id cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) diff --git a/rhodecode/config/rcextensions/__init__.py b/rhodecode/config/rcextensions/__init__.py --- a/rhodecode/config/rcextensions/__init__.py +++ b/rhodecode/config/rcextensions/__init__.py @@ -26,6 +26,7 @@ from .hooks import ( _pre_create_user_hook, _create_user_hook, _comment_commit_repo_hook, + _comment_edit_commit_repo_hook, _delete_repo_hook, _delete_user_hook, _pre_push_hook, @@ -35,6 +36,7 @@ from .hooks import ( _create_pull_request_hook, _review_pull_request_hook, _comment_pull_request_hook, + _comment_edit_pull_request_hook, _update_pull_request_hook, _merge_pull_request_hook, _close_pull_request_hook, @@ -43,6 +45,7 @@ from .hooks import ( # set as module attributes, we use those to call hooks. *do not change this* CREATE_REPO_HOOK = _create_repo_hook COMMENT_COMMIT_REPO_HOOK = _comment_commit_repo_hook +COMMENT_EDIT_COMMIT_REPO_HOOK = _comment_edit_commit_repo_hook CREATE_REPO_GROUP_HOOK = _create_repo_group_hook PRE_CREATE_USER_HOOK = _pre_create_user_hook CREATE_USER_HOOK = _create_user_hook @@ -55,6 +58,7 @@ PULL_HOOK = _pull_hook CREATE_PULL_REQUEST = _create_pull_request_hook REVIEW_PULL_REQUEST = _review_pull_request_hook COMMENT_PULL_REQUEST = _comment_pull_request_hook +COMMENT_EDIT_PULL_REQUEST = _comment_edit_pull_request_hook UPDATE_PULL_REQUEST = _update_pull_request_hook MERGE_PULL_REQUEST = _merge_pull_request_hook CLOSE_PULL_REQUEST = _close_pull_request_hook diff --git a/rhodecode/config/rcextensions/examples/custom_integration_templates.py b/rhodecode/config/rcextensions/examples/custom_integration_templates.py --- a/rhodecode/config/rcextensions/examples/custom_integration_templates.py +++ b/rhodecode/config/rcextensions/examples/custom_integration_templates.py @@ -1,5 +1,6 @@ -# This code allows override the integrations templates. -# Put this into the __init__.py file of rcextensions to override the templates +# Below code examples allows override the integrations templates, or email titles. +# Append selected parts at the end of the __init__.py file of rcextensions directory +# to override the templates # EMAIL Integration @@ -185,3 +186,18 @@ message: ``` ''') + + +# Example to modify emails default title +from rhodecode.model import notification + +notification.EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '{updating_user} updated pull request. !{pr_id}: "{pr_title}"' +notification.EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '{user} requested a pull request review. !{pr_id}: "{pr_title}"' + +notification.EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '{mention_prefix}{user} left a {comment_type} on pull request !{pr_id}: "{pr_title}"' +notification.EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '{mention_prefix}[status: {status}] {user} left a {comment_type} on pull request !{pr_id}: "{pr_title}"' +notification.EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '{mention_prefix}{user} left a {comment_type} on file `{comment_file}` in pull request !{pr_id}: "{pr_title}"' + +notification.EMAIL_COMMENT_SUBJECT_TEMPLATE = '{mention_prefix}{user} left a {comment_type} on commit `{commit_id}`' +notification.EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '{mention_prefix}[status: {status}] {user} left a {comment_type} on commit `{commit_id}`' +notification.EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '{mention_prefix}{user} left a {comment_type} on file `{comment_file}` in commit `{commit_id}`' diff --git a/rhodecode/config/rcextensions/hooks.py b/rhodecode/config/rcextensions/hooks.py --- a/rhodecode/config/rcextensions/hooks.py +++ b/rhodecode/config/rcextensions/hooks.py @@ -83,6 +83,33 @@ def _comment_commit_repo_hook(*args, **k @has_kwargs({ + 'repo_name': '', + 'repo_type': '', + 'description': '', + 'private': '', + 'created_on': '', + 'enable_downloads': '', + 'repo_id': '', + 'user_id': '', + 'enable_statistics': '', + 'clone_uri': '', + 'fork_id': '', + 'group_id': '', + 'created_by': '', + 'repository': '', + 'comment': '', + 'commit': '' +}) +def _comment_edit_commit_repo_hook(*args, **kwargs): + """ + POST CREATE REPOSITORY COMMENT ON COMMIT HOOK. This function will be executed after + a comment is made on this repository commit. + + """ + return HookResponse(0, '') + + +@has_kwargs({ 'group_name': '', 'group_parent_id': '', 'group_description': '', @@ -408,6 +435,38 @@ def _comment_pull_request_hook(*args, ** 'scm': 'type of version control "git", "hg", "svn"', 'username': 'username of actor who triggered this event', 'ip': 'ip address of actor who triggered this hook', + + 'action': '', + 'repository': 'repository name', + 'pull_request_id': '', + 'url': '', + 'title': '', + 'description': '', + 'status': '', + 'comment': '', + 'created_on': '', + 'updated_on': '', + 'commit_ids': '', + 'review_status': '', + 'mergeable': '', + 'source': '', + 'target': '', + 'author': '', + 'reviewers': '', +}) +def _comment_edit_pull_request_hook(*args, **kwargs): + """ + This hook will be executed after comment is made on a pull request + """ + return HookResponse(0, '') + + +@has_kwargs({ + 'server_url': 'url of instance that triggered this hook', + 'config': 'path to .ini config used', + 'scm': 'type of version control "git", "hg", "svn"', + 'username': 'username of actor who triggered this event', + 'ip': 'ip address of actor who triggered this hook', 'action': '', 'repository': 'repository name', 'pull_request_id': '', diff --git a/rhodecode/config/routing_links.py b/rhodecode/config/routing_links.py --- a/rhodecode/config/routing_links.py +++ b/rhodecode/config/routing_links.py @@ -74,7 +74,7 @@ link_config = [ }, { "name": "rst_help", - "target": "http://docutils.sourceforge.net/docs/user/rst/quickref.html", + "target": "http://docutils.sourceforge.io/docs/user/rst/quickref.html", "external_target": "https://docutils.sourceforge.io/docs/user/rst/quickref.html", }, { diff --git a/rhodecode/events/__init__.py b/rhodecode/events/__init__.py --- a/rhodecode/events/__init__.py +++ b/rhodecode/events/__init__.py @@ -53,7 +53,8 @@ from rhodecode.events.user import ( # p ) from rhodecode.events.repo import ( # pragma: no cover - RepoEvent, RepoCommitCommentEvent, + RepoEvent, + RepoCommitCommentEvent, RepoCommitCommentEditEvent, RepoPreCreateEvent, RepoCreateEvent, RepoPreDeleteEvent, RepoDeleteEvent, RepoPrePushEvent, RepoPushEvent, @@ -72,8 +73,8 @@ from rhodecode.events.pullrequest import PullRequestCreateEvent, PullRequestUpdateEvent, PullRequestCommentEvent, + PullRequestCommentEditEvent, PullRequestReviewEvent, PullRequestMergeEvent, PullRequestCloseEvent, - PullRequestCommentEvent, ) diff --git a/rhodecode/events/pullrequest.py b/rhodecode/events/pullrequest.py --- a/rhodecode/events/pullrequest.py +++ b/rhodecode/events/pullrequest.py @@ -19,8 +19,7 @@ import logging from rhodecode.translation import lazy_ugettext -from rhodecode.events.repo import ( - RepoEvent, _commits_as_dict, _issues_as_dict) +from rhodecode.events.repo import (RepoEvent, _commits_as_dict, _issues_as_dict) log = logging.getLogger(__name__) @@ -155,6 +154,7 @@ class PullRequestCommentEvent(PullReques 'type': self.comment.comment_type, 'file': self.comment.f_path, 'line': self.comment.line_no, + 'version': self.comment.last_version, 'url': CommentsModel().get_url( self.comment, request=self.request), 'permalink_url': CommentsModel().get_url( @@ -162,3 +162,42 @@ class PullRequestCommentEvent(PullReques } }) return data + + +class PullRequestCommentEditEvent(PullRequestEvent): + """ + An instance of this class is emitted as an :term:`event` after a pull + request comment is edited. + """ + name = 'pullrequest-comment-edit' + display_name = lazy_ugettext('pullrequest comment edited') + description = lazy_ugettext('Event triggered after a comment was edited on a code ' + 'in the pull request') + + def __init__(self, pullrequest, comment): + super(PullRequestCommentEditEvent, self).__init__(pullrequest) + self.comment = comment + + def as_dict(self): + from rhodecode.model.comment import CommentsModel + data = super(PullRequestCommentEditEvent, self).as_dict() + + status = None + if self.comment.status_change: + status = self.comment.status_change[0].status + + data.update({ + 'comment': { + 'status': status, + 'text': self.comment.text, + 'type': self.comment.comment_type, + 'file': self.comment.f_path, + 'line': self.comment.line_no, + 'version': self.comment.last_version, + 'url': CommentsModel().get_url( + self.comment, request=self.request), + 'permalink_url': CommentsModel().get_url( + self.comment, request=self.request, permalink=True), + } + }) + return data diff --git a/rhodecode/events/repo.py b/rhodecode/events/repo.py --- a/rhodecode/events/repo.py +++ b/rhodecode/events/repo.py @@ -211,6 +211,42 @@ class RepoCommitCommentEvent(RepoEvent): 'comment_type': self.comment.comment_type, 'comment_f_path': self.comment.f_path, 'comment_line_no': self.comment.line_no, + 'comment_version': self.comment.last_version, + } + return data + + +class RepoCommitCommentEditEvent(RepoEvent): + """ + An instance of this class is emitted as an :term:`event` after a comment is edited + on repository commit. + """ + + name = 'repo-commit-edit-comment' + display_name = lazy_ugettext('repository commit edit comment') + description = lazy_ugettext('Event triggered after a comment was edited ' + 'on commit inside a repository') + + def __init__(self, repo, commit, comment): + super(RepoCommitCommentEditEvent, self).__init__(repo) + self.commit = commit + self.comment = comment + + def as_dict(self): + data = super(RepoCommitCommentEditEvent, self).as_dict() + data['commit'] = { + 'commit_id': self.commit.raw_id, + 'commit_message': self.commit.message, + 'commit_branch': self.commit.branch, + } + + data['comment'] = { + 'comment_id': self.comment.comment_id, + 'comment_text': self.comment.text, + 'comment_type': self.comment.comment_type, + 'comment_f_path': self.comment.f_path, + 'comment_line_no': self.comment.line_no, + 'comment_version': self.comment.last_version, } return data diff --git a/rhodecode/integrations/types/base.py b/rhodecode/integrations/types/base.py --- a/rhodecode/integrations/types/base.py +++ b/rhodecode/integrations/types/base.py @@ -331,6 +331,26 @@ class WebhookDataHandler(CommitParsingDa return [(url, self.headers, data)] + def repo_commit_comment_edit_handler(self, event, data): + url = self.get_base_parsed_template(data) + log.debug('register %s call(%s) to url %s', self.name, event, url) + comment_vars = [ + ('commit_comment_id', data['comment']['comment_id']), + ('commit_comment_text', data['comment']['comment_text']), + ('commit_comment_type', data['comment']['comment_type']), + + ('commit_comment_f_path', data['comment']['comment_f_path']), + ('commit_comment_line_no', data['comment']['comment_line_no']), + + ('commit_comment_commit_id', data['commit']['commit_id']), + ('commit_comment_commit_branch', data['commit']['commit_branch']), + ('commit_comment_commit_message', data['commit']['commit_message']), + ] + for k, v in comment_vars: + url = UrlTmpl(url).safe_substitute(**{k: v}) + + return [(url, self.headers, data)] + def repo_create_event_handler(self, event, data): url = self.get_base_parsed_template(data) log.debug('register %s call(%s) to url %s', self.name, event, url) @@ -360,6 +380,8 @@ class WebhookDataHandler(CommitParsingDa return self.repo_create_event_handler(event, data) elif isinstance(event, events.RepoCommitCommentEvent): return self.repo_commit_comment_handler(event, data) + elif isinstance(event, events.RepoCommitCommentEditEvent): + return self.repo_commit_comment_edit_handler(event, data) elif isinstance(event, events.PullRequestEvent): return self.pull_request_event_handler(event, data) else: diff --git a/rhodecode/integrations/types/hipchat.py b/rhodecode/integrations/types/hipchat.py --- a/rhodecode/integrations/types/hipchat.py +++ b/rhodecode/integrations/types/hipchat.py @@ -133,6 +133,8 @@ class HipchatIntegrationType(Integration if isinstance(event, events.PullRequestCommentEvent): text = self.format_pull_request_comment_event(event, data) + elif isinstance(event, events.PullRequestCommentEditEvent): + text = self.format_pull_request_comment_event(event, data) elif isinstance(event, events.PullRequestReviewEvent): text = self.format_pull_request_review_event(event, data) elif isinstance(event, events.PullRequestEvent): diff --git a/rhodecode/integrations/types/slack.py b/rhodecode/integrations/types/slack.py --- a/rhodecode/integrations/types/slack.py +++ b/rhodecode/integrations/types/slack.py @@ -157,6 +157,9 @@ class SlackIntegrationType(IntegrationTy if isinstance(event, events.PullRequestCommentEvent): (title, text, fields, overrides) \ = self.format_pull_request_comment_event(event, data) + elif isinstance(event, events.PullRequestCommentEditEvent): + (title, text, fields, overrides) \ + = self.format_pull_request_comment_event(event, data) elif isinstance(event, events.PullRequestReviewEvent): title, text = self.format_pull_request_review_event(event, data) elif isinstance(event, events.PullRequestEvent): diff --git a/rhodecode/integrations/types/webhook.py b/rhodecode/integrations/types/webhook.py --- a/rhodecode/integrations/types/webhook.py +++ b/rhodecode/integrations/types/webhook.py @@ -144,11 +144,13 @@ class WebhookIntegrationType(Integration events.PullRequestMergeEvent, events.PullRequestUpdateEvent, events.PullRequestCommentEvent, + events.PullRequestCommentEditEvent, events.PullRequestReviewEvent, events.PullRequestCreateEvent, events.RepoPushEvent, events.RepoCreateEvent, events.RepoCommitCommentEvent, + events.RepoCommitCommentEditEvent, ] def settings_schema(self): diff --git a/rhodecode/lib/audit_logger.py b/rhodecode/lib/audit_logger.py --- a/rhodecode/lib/audit_logger.py +++ b/rhodecode/lib/audit_logger.py @@ -82,6 +82,7 @@ ACTIONS_V1 = { 'repo.pull_request.merge': '', 'repo.pull_request.vote': '', 'repo.pull_request.comment.create': '', + 'repo.pull_request.comment.edit': '', 'repo.pull_request.comment.delete': '', 'repo.pull_request.reviewer.add': '', @@ -90,6 +91,7 @@ ACTIONS_V1 = { 'repo.commit.strip': {'commit_id': ''}, 'repo.commit.comment.create': {'data': {}}, 'repo.commit.comment.delete': {'data': {}}, + 'repo.commit.comment.edit': {'data': {}}, 'repo.commit.vote': '', 'repo.artifact.add': '', diff --git a/rhodecode/lib/auth.py b/rhodecode/lib/auth.py --- a/rhodecode/lib/auth.py +++ b/rhodecode/lib/auth.py @@ -367,8 +367,7 @@ class PermOriginDict(dict): self.perm_origin_stack = collections.OrderedDict() def __setitem__(self, key, (perm, origin, obj_id)): - self.perm_origin_stack.setdefault(key, []).append( - (perm, origin, obj_id)) + self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id)) dict.__setitem__(self, key, perm) @@ -441,7 +440,7 @@ class PermissionCalculator(object): def calculate(self): if self.user_is_admin and not self.calculate_super_admin_as_user: - return self._calculate_admin_permissions() + return self._calculate_super_admin_permissions() self._calculate_global_default_permissions() self._calculate_global_permissions() @@ -452,9 +451,9 @@ class PermissionCalculator(object): self._calculate_user_group_permissions() return self._permission_structure() - def _calculate_admin_permissions(self): + def _calculate_super_admin_permissions(self): """ - admin user have all default rights for repositories + super-admin user have all default rights for repositories and groups set to admin """ self.permissions_global.add('hg.admin') @@ -774,6 +773,7 @@ class PermissionCalculator(object): for perm in user_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name obj_id = perm.UserRepoToPerm.repository.repo_id + archived = perm.UserRepoToPerm.repository.archived p = perm.Permission.permission_name o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username @@ -795,6 +795,15 @@ class PermissionCalculator(object): o = PermOrigin.SUPER_ADMIN self.permissions_repositories[r_k] = p, o, obj_id + # finally in case of archived repositories, we downgrade higher + # permissions to read + if archived: + current_perm = self.permissions_repositories[r_k] + if current_perm in ['repository.write', 'repository.admin']: + p = 'repository.read' + o = PermOrigin.ARCHIVED + self.permissions_repositories[r_k] = p, o, obj_id + def _calculate_repository_branch_permissions(self): # user group for repositories permissions user_repo_branch_perms_from_user_group = Permission\ diff --git a/rhodecode/lib/base.py b/rhodecode/lib/base.py --- a/rhodecode/lib/base.py +++ b/rhodecode/lib/base.py @@ -384,7 +384,8 @@ def attach_context_attributes(context, r session_attrs = { # defaults "clone_url_format": "http", - "diffmode": "sideside" + "diffmode": "sideside", + "license_fingerprint": request.session.get('license_fingerprint') } if not is_api: diff --git a/rhodecode/lib/bleach_whitelist.py b/rhodecode/lib/bleach_whitelist.py --- a/rhodecode/lib/bleach_whitelist.py +++ b/rhodecode/lib/bleach_whitelist.py @@ -61,6 +61,8 @@ markdown_tags = [ "img", "a", "input", + "details", + "summary" ] markdown_attrs = { diff --git a/rhodecode/lib/celerylib/tasks.py b/rhodecode/lib/celerylib/tasks.py --- a/rhodecode/lib/celerylib/tasks.py +++ b/rhodecode/lib/celerylib/tasks.py @@ -29,18 +29,20 @@ import time from pyramid import compat from pyramid_mailer.mailer import Mailer from pyramid_mailer.message import Message +from email.utils import formatdate import rhodecode from rhodecode.lib import audit_logger from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask -from rhodecode.lib.hooks_base import log_create_repository +from rhodecode.lib import hooks_base from rhodecode.lib.utils2 import safe_int, str2bool from rhodecode.model.db import ( Session, IntegrityError, true, Repository, RepoGroup, User) @async_task(ignore_result=True, base=RequestContextTask) -def send_email(recipients, subject, body='', html_body='', email_config=None): +def send_email(recipients, subject, body='', html_body='', email_config=None, + extra_headers=None): """ Sends an email with defined parameters from the .ini files. @@ -50,6 +52,7 @@ def send_email(recipients, subject, body :param body: body of the mail :param html_body: html version of body :param email_config: specify custom configuration for mailer + :param extra_headers: specify custom headers """ log = get_logger(send_email) @@ -108,13 +111,23 @@ def send_email(recipients, subject, body # sendmail_template='', ) + if extra_headers is None: + extra_headers = {} + + extra_headers.setdefault('Date', formatdate(time.time())) + + if 'thread_ids' in extra_headers: + thread_ids = extra_headers.pop('thread_ids') + extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids) + try: mailer = Mailer(**email_conf) message = Message(subject=subject, sender=email_conf['default_sender'], recipients=recipients, - body=body, html=html_body) + body=body, html=html_body, + extra_headers=extra_headers) mailer.send_immediately(message) except Exception: @@ -187,7 +200,7 @@ def create_repo(form_data, cur_user): clone_uri=clone_uri, ) repo = Repository.get_by_repo_name(repo_name_full) - log_create_repository(created_by=owner.username, **repo.get_dict()) + hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) # update repo commit caches initially repo.update_commit_cache() @@ -273,7 +286,7 @@ def create_repo_fork(form_data, cur_user clone_uri=source_repo_path, ) repo = Repository.get_by_repo_name(repo_name_full) - log_create_repository(created_by=owner.username, **repo.get_dict()) + hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) # update repo commit caches initially config = repo._config diff --git a/rhodecode/lib/codeblocks.py b/rhodecode/lib/codeblocks.py --- a/rhodecode/lib/codeblocks.py +++ b/rhodecode/lib/codeblocks.py @@ -540,10 +540,11 @@ class DiffSet(object): }) file_chunks = patch['chunks'][1:] - for hunk in file_chunks: + for i, hunk in enumerate(file_chunks, 1): hunkbit = self.parse_hunk(hunk, source_file, target_file) hunkbit.source_file_path = source_file_path hunkbit.target_file_path = target_file_path + hunkbit.index = i filediff.hunks.append(hunkbit) # Simulate hunk on OPS type line which doesn't really contain any diff diff --git a/rhodecode/lib/dbmigrate/schema/db_4_19_0_2.py b/rhodecode/lib/dbmigrate/schema/db_4_19_0_2.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/schema/db_4_19_0_2.py @@ -0,0 +1,5625 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2010-2020 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Database Models for RhodeCode Enterprise +""" + +import re +import os +import time +import string +import hashlib +import logging +import datetime +import uuid +import warnings +import ipaddress +import functools +import traceback +import collections + +from sqlalchemy import ( + or_, and_, not_, func, cast, TypeDecorator, event, + Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, + Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, + Text, Float, PickleType, BigInteger) +from sqlalchemy.sql.expression import true, false, case +from sqlalchemy.sql.functions import coalesce, count # pragma: no cover +from sqlalchemy.orm import ( + relationship, joinedload, class_mapper, validates, aliased) +from sqlalchemy.ext.declarative import declared_attr +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.exc import IntegrityError # pragma: no cover +from sqlalchemy.dialects.mysql import LONGTEXT +from zope.cachedescriptors.property import Lazy as LazyProperty +from pyramid import compat +from pyramid.threadlocal import get_current_request +from webhelpers2.text import remove_formatting + +from rhodecode.translation import _ +from rhodecode.lib.vcs import get_vcs_instance, VCSError +from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference +from rhodecode.lib.utils2 import ( + str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe, + time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, + glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict) +from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \ + JsonRaw +from rhodecode.lib.ext_json import json +from rhodecode.lib.caching_query import FromCache +from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data +from rhodecode.lib.encrypt2 import Encryptor +from rhodecode.lib.exceptions import ( + ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) +from rhodecode.model.meta import Base, Session + +URL_SEP = '/' +log = logging.getLogger(__name__) + +# ============================================================================= +# BASE CLASSES +# ============================================================================= + +# this is propagated from .ini file rhodecode.encrypted_values.secret or +# beaker.session.secret if first is not set. +# and initialized at environment.py +ENCRYPTION_KEY = None + +# used to sort permissions by types, '#' used here is not allowed to be in +# usernames, and it's very early in sorted string.printable table. +PERMISSION_TYPE_SORT = { + 'admin': '####', + 'write': '###', + 'read': '##', + 'none': '#', +} + + +def display_user_sort(obj): + """ + Sort function used to sort permissions in .permissions() function of + Repository, RepoGroup, UserGroup. Also it put the default user in front + of all other resources + """ + + if obj.username == User.DEFAULT_USER: + return '#####' + prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') + return prefix + obj.username + + +def display_user_group_sort(obj): + """ + Sort function used to sort permissions in .permissions() function of + Repository, RepoGroup, UserGroup. Also it put the default user in front + of all other resources + """ + + prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') + return prefix + obj.users_group_name + + +def _hash_key(k): + return sha1_safe(k) + + +def in_filter_generator(qry, items, limit=500): + """ + Splits IN() into multiple with OR + e.g.:: + cnt = Repository.query().filter( + or_( + *in_filter_generator(Repository.repo_id, range(100000)) + )).count() + """ + if not items: + # empty list will cause empty query which might cause security issues + # this can lead to hidden unpleasant results + items = [-1] + + parts = [] + for chunk in xrange(0, len(items), limit): + parts.append( + qry.in_(items[chunk: chunk + limit]) + ) + + return parts + + +base_table_args = { + 'extend_existing': True, + 'mysql_engine': 'InnoDB', + 'mysql_charset': 'utf8', + 'sqlite_autoincrement': True +} + + +class EncryptedTextValue(TypeDecorator): + """ + Special column for encrypted long text data, use like:: + + value = Column("encrypted_value", EncryptedValue(), nullable=False) + + This column is intelligent so if value is in unencrypted form it return + unencrypted form, but on save it always encrypts + """ + impl = Text + + def process_bind_param(self, value, dialect): + """ + Setter for storing value + """ + import rhodecode + if not value: + return value + + # protect against double encrypting if values is already encrypted + if value.startswith('enc$aes$') \ + or value.startswith('enc$aes_hmac$') \ + or value.startswith('enc2$'): + raise ValueError('value needs to be in unencrypted format, ' + 'ie. not starting with enc$ or enc2$') + + algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' + if algo == 'aes': + return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value) + elif algo == 'fernet': + return Encryptor(ENCRYPTION_KEY).encrypt(value) + else: + ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) + + def process_result_value(self, value, dialect): + """ + Getter for retrieving value + """ + + import rhodecode + if not value: + return value + + algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' + enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True) + if algo == 'aes': + decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode) + elif algo == 'fernet': + return Encryptor(ENCRYPTION_KEY).decrypt(value) + else: + ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) + return decrypted_data + + +class BaseModel(object): + """ + Base Model for all classes + """ + + @classmethod + def _get_keys(cls): + """return column names for this model """ + return class_mapper(cls).c.keys() + + def get_dict(self): + """ + return dict with keys and values corresponding + to this model data """ + + d = {} + for k in self._get_keys(): + d[k] = getattr(self, k) + + # also use __json__() if present to get additional fields + _json_attr = getattr(self, '__json__', None) + if _json_attr: + # update with attributes from __json__ + if callable(_json_attr): + _json_attr = _json_attr() + for k, val in _json_attr.iteritems(): + d[k] = val + return d + + def get_appstruct(self): + """return list with keys and values tuples corresponding + to this model data """ + + lst = [] + for k in self._get_keys(): + lst.append((k, getattr(self, k),)) + return lst + + def populate_obj(self, populate_dict): + """populate model with data from given populate_dict""" + + for k in self._get_keys(): + if k in populate_dict: + setattr(self, k, populate_dict[k]) + + @classmethod + def query(cls): + return Session().query(cls) + + @classmethod + def get(cls, id_): + if id_: + return cls.query().get(id_) + + @classmethod + def get_or_404(cls, id_): + from pyramid.httpexceptions import HTTPNotFound + + try: + id_ = int(id_) + except (TypeError, ValueError): + raise HTTPNotFound() + + res = cls.query().get(id_) + if not res: + raise HTTPNotFound() + return res + + @classmethod + def getAll(cls): + # deprecated and left for backward compatibility + return cls.get_all() + + @classmethod + def get_all(cls): + return cls.query().all() + + @classmethod + def delete(cls, id_): + obj = cls.query().get(id_) + Session().delete(obj) + + @classmethod + def identity_cache(cls, session, attr_name, value): + exist_in_session = [] + for (item_cls, pkey), instance in session.identity_map.items(): + if cls == item_cls and getattr(instance, attr_name) == value: + exist_in_session.append(instance) + if exist_in_session: + if len(exist_in_session) == 1: + return exist_in_session[0] + log.exception( + 'multiple objects with attr %s and ' + 'value %s found with same name: %r', + attr_name, value, exist_in_session) + + def __repr__(self): + if hasattr(self, '__unicode__'): + # python repr needs to return str + try: + return safe_str(self.__unicode__()) + except UnicodeDecodeError: + pass + return '' % (self.__class__.__name__) + + +class RhodeCodeSetting(Base, BaseModel): + __tablename__ = 'rhodecode_settings' + __table_args__ = ( + UniqueConstraint('app_settings_name'), + base_table_args + ) + + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' + GLOBAL_CONF_KEY = 'app_settings' + + app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) + _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) + _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) + + def __init__(self, key='', val='', type='unicode'): + self.app_settings_name = key + self.app_settings_type = type + self.app_settings_value = val + + @validates('_app_settings_value') + def validate_settings_value(self, key, val): + assert type(val) == unicode + return val + + @hybrid_property + def app_settings_value(self): + v = self._app_settings_value + _type = self.app_settings_type + if _type: + _type = self.app_settings_type.split('.')[0] + # decode the encrypted value + if 'encrypted' in self.app_settings_type: + cipher = EncryptedTextValue() + v = safe_unicode(cipher.process_result_value(v, None)) + + converter = self.SETTINGS_TYPES.get(_type) or \ + self.SETTINGS_TYPES['unicode'] + return converter(v) + + @app_settings_value.setter + def app_settings_value(self, val): + """ + Setter that will always make sure we use unicode in app_settings_value + + :param val: + """ + val = safe_unicode(val) + # encode the encrypted value + if 'encrypted' in self.app_settings_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._app_settings_value = val + + @hybrid_property + def app_settings_type(self): + return self._app_settings_type + + @app_settings_type.setter + def app_settings_type(self, val): + if val.split('.')[0] not in self.SETTINGS_TYPES: + raise Exception('type must be one of %s got %s' + % (self.SETTINGS_TYPES.keys(), val)) + self._app_settings_type = val + + @classmethod + def get_by_prefix(cls, prefix): + return RhodeCodeSetting.query()\ + .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\ + .all() + + def __unicode__(self): + return u"<%s('%s:%s[%s]')>" % ( + self.__class__.__name__, + self.app_settings_name, self.app_settings_value, + self.app_settings_type + ) + + +class RhodeCodeUi(Base, BaseModel): + __tablename__ = 'rhodecode_ui' + __table_args__ = ( + UniqueConstraint('ui_key'), + base_table_args + ) + + HOOK_REPO_SIZE = 'changegroup.repo_size' + # HG + HOOK_PRE_PULL = 'preoutgoing.pre_pull' + HOOK_PULL = 'outgoing.pull_logger' + HOOK_PRE_PUSH = 'prechangegroup.pre_push' + HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' + HOOK_PUSH = 'changegroup.push_logger' + HOOK_PUSH_KEY = 'pushkey.key_push' + + HOOKS_BUILTIN = [ + HOOK_PRE_PULL, + HOOK_PULL, + HOOK_PRE_PUSH, + HOOK_PRETX_PUSH, + HOOK_PUSH, + HOOK_PUSH_KEY, + ] + + # TODO: johbo: Unify way how hooks are configured for git and hg, + # git part is currently hardcoded. + + # SVN PATTERNS + SVN_BRANCH_ID = 'vcs_svn_branch' + SVN_TAG_ID = 'vcs_svn_tag' + + ui_id = Column( + "ui_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + ui_section = Column( + "ui_section", String(255), nullable=True, unique=None, default=None) + ui_key = Column( + "ui_key", String(255), nullable=True, unique=None, default=None) + ui_value = Column( + "ui_value", String(255), nullable=True, unique=None, default=None) + ui_active = Column( + "ui_active", Boolean(), nullable=True, unique=None, default=True) + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, + self.ui_key, self.ui_value) + + +class RepoRhodeCodeSetting(Base, BaseModel): + __tablename__ = 'repo_rhodecode_settings' + __table_args__ = ( + UniqueConstraint( + 'app_settings_name', 'repository_id', + name='uq_repo_rhodecode_setting_name_repo_id'), + base_table_args + ) + + repository_id = Column( + "repository_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + app_settings_id = Column( + "app_settings_id", Integer(), nullable=False, unique=True, + default=None, primary_key=True) + app_settings_name = Column( + "app_settings_name", String(255), nullable=True, unique=None, + default=None) + _app_settings_value = Column( + "app_settings_value", String(4096), nullable=True, unique=None, + default=None) + _app_settings_type = Column( + "app_settings_type", String(255), nullable=True, unique=None, + default=None) + + repository = relationship('Repository') + + def __init__(self, repository_id, key='', val='', type='unicode'): + self.repository_id = repository_id + self.app_settings_name = key + self.app_settings_type = type + self.app_settings_value = val + + @validates('_app_settings_value') + def validate_settings_value(self, key, val): + assert type(val) == unicode + return val + + @hybrid_property + def app_settings_value(self): + v = self._app_settings_value + type_ = self.app_settings_type + SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES + converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] + return converter(v) + + @app_settings_value.setter + def app_settings_value(self, val): + """ + Setter that will always make sure we use unicode in app_settings_value + + :param val: + """ + self._app_settings_value = safe_unicode(val) + + @hybrid_property + def app_settings_type(self): + return self._app_settings_type + + @app_settings_type.setter + def app_settings_type(self, val): + SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES + if val not in SETTINGS_TYPES: + raise Exception('type must be one of %s got %s' + % (SETTINGS_TYPES.keys(), val)) + self._app_settings_type = val + + def __unicode__(self): + return u"<%s('%s:%s:%s[%s]')>" % ( + self.__class__.__name__, self.repository.repo_name, + self.app_settings_name, self.app_settings_value, + self.app_settings_type + ) + + +class RepoRhodeCodeUi(Base, BaseModel): + __tablename__ = 'repo_rhodecode_ui' + __table_args__ = ( + UniqueConstraint( + 'repository_id', 'ui_section', 'ui_key', + name='uq_repo_rhodecode_ui_repository_id_section_key'), + base_table_args + ) + + repository_id = Column( + "repository_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + ui_id = Column( + "ui_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + ui_section = Column( + "ui_section", String(255), nullable=True, unique=None, default=None) + ui_key = Column( + "ui_key", String(255), nullable=True, unique=None, default=None) + ui_value = Column( + "ui_value", String(255), nullable=True, unique=None, default=None) + ui_active = Column( + "ui_active", Boolean(), nullable=True, unique=None, default=True) + + repository = relationship('Repository') + + def __repr__(self): + return '<%s[%s:%s]%s=>%s]>' % ( + self.__class__.__name__, self.repository.repo_name, + self.ui_section, self.ui_key, self.ui_value) + + +class User(Base, BaseModel): + __tablename__ = 'users' + __table_args__ = ( + UniqueConstraint('username'), UniqueConstraint('email'), + Index('u_username_idx', 'username'), + Index('u_email_idx', 'email'), + base_table_args + ) + + DEFAULT_USER = 'default' + DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' + DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' + + user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + username = Column("username", String(255), nullable=True, unique=None, default=None) + password = Column("password", String(255), nullable=True, unique=None, default=None) + active = Column("active", Boolean(), nullable=True, unique=None, default=True) + admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) + name = Column("firstname", String(255), nullable=True, unique=None, default=None) + lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) + _email = Column("email", String(255), nullable=True, unique=None, default=None) + last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) + last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + + extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) + extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) + _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) + inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data + + user_log = relationship('UserLog') + user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') + + repositories = relationship('Repository') + repository_groups = relationship('RepoGroup') + user_groups = relationship('UserGroup') + + user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') + followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') + + repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') + repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + + group_member = relationship('UserGroupMember', cascade='all') + + notifications = relationship('UserNotification', cascade='all') + # notifications assigned to this user + user_created_notifications = relationship('Notification', cascade='all') + # comments created by this user + user_comments = relationship('ChangesetComment', cascade='all') + # user profile extra info + user_emails = relationship('UserEmailMap', cascade='all') + user_ip_map = relationship('UserIpMap', cascade='all') + user_auth_tokens = relationship('UserApiKeys', cascade='all') + user_ssh_keys = relationship('UserSshKeys', cascade='all') + + # gists + user_gists = relationship('Gist', cascade='all') + # user pull requests + user_pull_requests = relationship('PullRequest', cascade='all') + + # external identities + external_identities = relationship( + 'ExternalIdentity', + primaryjoin="User.user_id==ExternalIdentity.local_user_id", + cascade='all') + # review rules + user_review_rules = relationship('RepoReviewRuleUser', cascade='all') + + # artifacts owned + artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id') + + # no cascade, set NULL + scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id') + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % (self.__class__.__name__, + self.user_id, self.username) + + @hybrid_property + def email(self): + return self._email + + @email.setter + def email(self, val): + self._email = val.lower() if val else None + + @hybrid_property + def first_name(self): + from rhodecode.lib import helpers as h + if self.name: + return h.escape(self.name) + return self.name + + @hybrid_property + def last_name(self): + from rhodecode.lib import helpers as h + if self.lastname: + return h.escape(self.lastname) + return self.lastname + + @hybrid_property + def api_key(self): + """ + Fetch if exist an auth-token with role ALL connected to this user + """ + user_auth_token = UserApiKeys.query()\ + .filter(UserApiKeys.user_id == self.user_id)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time()))\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() + if user_auth_token: + user_auth_token = user_auth_token.api_key + + return user_auth_token + + @api_key.setter + def api_key(self, val): + # don't allow to set API key this is deprecated for now + self._api_key = None + + @property + def reviewer_pull_requests(self): + return PullRequestReviewers.query() \ + .options(joinedload(PullRequestReviewers.pull_request)) \ + .filter(PullRequestReviewers.user_id == self.user_id) \ + .all() + + @property + def firstname(self): + # alias for future + return self.name + + @property + def emails(self): + other = UserEmailMap.query()\ + .filter(UserEmailMap.user == self) \ + .order_by(UserEmailMap.email_id.asc()) \ + .all() + return [self.email] + [x.email for x in other] + + def emails_cached(self): + emails = UserEmailMap.query()\ + .filter(UserEmailMap.user == self) \ + .order_by(UserEmailMap.email_id.asc()) + + emails = emails.options( + FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id)) + ) + + return [self.email] + [x.email for x in emails] + + @property + def auth_tokens(self): + auth_tokens = self.get_auth_tokens() + return [x.api_key for x in auth_tokens] + + def get_auth_tokens(self): + return UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .order_by(UserApiKeys.user_api_key_id.asc())\ + .all() + + @LazyProperty + def feed_token(self): + return self.get_feed_token() + + def get_feed_token(self, cache=True): + feed_tokens = UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) + if cache: + feed_tokens = feed_tokens.options( + FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) + + feed_tokens = feed_tokens.all() + if feed_tokens: + return feed_tokens[0].api_key + return 'NO_FEED_TOKEN_AVAILABLE' + + @LazyProperty + def artifact_token(self): + return self.get_artifact_token() + + def get_artifact_token(self, cache=True): + artifacts_tokens = UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) + if cache: + artifacts_tokens = artifacts_tokens.options( + FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id)) + + artifacts_tokens = artifacts_tokens.all() + if artifacts_tokens: + return artifacts_tokens[0].api_key + return 'NO_ARTIFACT_TOKEN_AVAILABLE' + + @classmethod + def get(cls, user_id, cache=False): + if not user_id: + return + + user = cls.query() + if cache: + user = user.options( + FromCache("sql_cache_short", "get_users_%s" % user_id)) + return user.get(user_id) + + @classmethod + def extra_valid_auth_tokens(cls, user, role=None): + tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + if role: + tokens = tokens.filter(or_(UserApiKeys.role == role, + UserApiKeys.role == UserApiKeys.ROLE_ALL)) + return tokens.all() + + def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): + from rhodecode.lib import auth + + log.debug('Trying to authenticate user: %s via auth-token, ' + 'and roles: %s', self, roles) + + if not auth_token: + return False + + roles = (roles or []) + [UserApiKeys.ROLE_ALL] + tokens_q = UserApiKeys.query()\ + .filter(UserApiKeys.user_id == self.user_id)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + + tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) + + crypto_backend = auth.crypto_backend() + enc_token_map = {} + plain_token_map = {} + for token in tokens_q: + if token.api_key.startswith(crypto_backend.ENC_PREF): + enc_token_map[token.api_key] = token + else: + plain_token_map[token.api_key] = token + log.debug( + 'Found %s plain and %s encrypted tokens to check for authentication for this user', + len(plain_token_map), len(enc_token_map)) + + # plain token match comes first + match = plain_token_map.get(auth_token) + + # check encrypted tokens now + if not match: + for token_hash, token in enc_token_map.items(): + # NOTE(marcink): this is expensive to calculate, but most secure + if crypto_backend.hash_check(auth_token, token_hash): + match = token + break + + if match: + log.debug('Found matching token %s', match) + if match.repo_id: + log.debug('Found scope, checking for scope match of token %s', match) + if match.repo_id == scope_repo_id: + return True + else: + log.debug( + 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, ' + 'and calling scope is:%s, skipping further checks', + match.repo, scope_repo_id) + return False + else: + return True + + return False + + @property + def ip_addresses(self): + ret = UserIpMap.query().filter(UserIpMap.user == self).all() + return [x.ip_addr for x in ret] + + @property + def username_and_name(self): + return '%s (%s %s)' % (self.username, self.first_name, self.last_name) + + @property + def username_or_name_or_email(self): + full_name = self.full_name if self.full_name is not ' ' else None + return self.username or full_name or self.email + + @property + def full_name(self): + return '%s %s' % (self.first_name, self.last_name) + + @property + def full_name_or_username(self): + return ('%s %s' % (self.first_name, self.last_name) + if (self.first_name and self.last_name) else self.username) + + @property + def full_contact(self): + return '%s %s <%s>' % (self.first_name, self.last_name, self.email) + + @property + def short_contact(self): + return '%s %s' % (self.first_name, self.last_name) + + @property + def is_admin(self): + return self.admin + + @property + def language(self): + return self.user_data.get('language') + + def AuthUser(self, **kwargs): + """ + Returns instance of AuthUser for this user + """ + from rhodecode.lib.auth import AuthUser + return AuthUser(user_id=self.user_id, username=self.username, **kwargs) + + @hybrid_property + def user_data(self): + if not self._user_data: + return {} + + try: + return json.loads(self._user_data) + except TypeError: + return {} + + @user_data.setter + def user_data(self, val): + if not isinstance(val, dict): + raise Exception('user_data must be dict, got %s' % type(val)) + try: + self._user_data = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @classmethod + def get_by_username(cls, username, case_insensitive=False, + cache=False, identity_cache=False): + session = Session() + + if case_insensitive: + q = cls.query().filter( + func.lower(cls.username) == func.lower(username)) + else: + q = cls.query().filter(cls.username == username) + + if cache: + if identity_cache: + val = cls.identity_cache(session, 'username', username) + if val: + return val + else: + cache_key = "get_user_by_name_%s" % _hash_key(username) + q = q.options( + FromCache("sql_cache_short", cache_key)) + + return q.scalar() + + @classmethod + def get_by_auth_token(cls, auth_token, cache=False): + q = UserApiKeys.query()\ + .filter(UserApiKeys.api_key == auth_token)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_auth_token_%s" % auth_token)) + + match = q.first() + if match: + return match.user + + @classmethod + def get_by_email(cls, email, case_insensitive=False, cache=False): + + if case_insensitive: + q = cls.query().filter(func.lower(cls.email) == func.lower(email)) + + else: + q = cls.query().filter(cls.email == email) + + email_key = _hash_key(email) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_email_key_%s" % email_key)) + + ret = q.scalar() + if ret is None: + q = UserEmailMap.query() + # try fetching in alternate email map + if case_insensitive: + q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) + else: + q = q.filter(UserEmailMap.email == email) + q = q.options(joinedload(UserEmailMap.user)) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_email_map_key_%s" % email_key)) + ret = getattr(q.scalar(), 'user', None) + + return ret + + @classmethod + def get_from_cs_author(cls, author): + """ + Tries to get User objects out of commit author string + + :param author: + """ + from rhodecode.lib.helpers import email, author_name + # Valid email in the attribute passed, see if they're in the system + _email = email(author) + if _email: + user = cls.get_by_email(_email, case_insensitive=True) + if user: + return user + # Maybe we can match by username? + _author = author_name(author) + user = cls.get_by_username(_author, case_insensitive=True) + if user: + return user + + def update_userdata(self, **kwargs): + usr = self + old = usr.user_data + old.update(**kwargs) + usr.user_data = old + Session().add(usr) + log.debug('updated userdata with %s', kwargs) + + def update_lastlogin(self): + """Update user lastlogin""" + self.last_login = datetime.datetime.now() + Session().add(self) + log.debug('updated user %s lastlogin', self.username) + + def update_password(self, new_password): + from rhodecode.lib.auth import get_crypt_password + + self.password = get_crypt_password(new_password) + Session().add(self) + + @classmethod + def get_first_super_admin(cls): + user = User.query()\ + .filter(User.admin == true()) \ + .order_by(User.user_id.asc()) \ + .first() + + if user is None: + raise Exception('FATAL: Missing administrative account!') + return user + + @classmethod + def get_all_super_admins(cls, only_active=False): + """ + Returns all admin accounts sorted by username + """ + qry = User.query().filter(User.admin == true()).order_by(User.username.asc()) + if only_active: + qry = qry.filter(User.active == true()) + return qry.all() + + @classmethod + def get_all_user_ids(cls, only_active=True): + """ + Returns all users IDs + """ + qry = Session().query(User.user_id) + + if only_active: + qry = qry.filter(User.active == true()) + return [x.user_id for x in qry] + + @classmethod + def get_default_user(cls, cache=False, refresh=False): + user = User.get_by_username(User.DEFAULT_USER, cache=cache) + if user is None: + raise Exception('FATAL: Missing default account!') + if refresh: + # The default user might be based on outdated state which + # has been loaded from the cache. + # A call to refresh() ensures that the + # latest state from the database is used. + Session().refresh(user) + return user + + @classmethod + def get_default_user_id(cls): + import rhodecode + return rhodecode.CONFIG['default_user_id'] + + def _get_default_perms(self, user, suffix=''): + from rhodecode.model.permission import PermissionModel + return PermissionModel().get_default_perms(user.user_perms, suffix) + + def get_default_perms(self, suffix=''): + return self._get_default_perms(self, suffix) + + def get_api_data(self, include_secrets=False, details='full'): + """ + Common function for generating user related data for API + + :param include_secrets: By default secrets in the API data will be replaced + by a placeholder value to prevent exposing this data by accident. In case + this data shall be exposed, set this flag to ``True``. + + :param details: details can be 'basic|full' basic gives only a subset of + the available user information that includes user_id, name and emails. + """ + user = self + user_data = self.user_data + data = { + 'user_id': user.user_id, + 'username': user.username, + 'firstname': user.name, + 'lastname': user.lastname, + 'description': user.description, + 'email': user.email, + 'emails': user.emails, + } + if details == 'basic': + return data + + auth_token_length = 40 + auth_token_replacement = '*' * auth_token_length + + extras = { + 'auth_tokens': [auth_token_replacement], + 'active': user.active, + 'admin': user.admin, + 'extern_type': user.extern_type, + 'extern_name': user.extern_name, + 'last_login': user.last_login, + 'last_activity': user.last_activity, + 'ip_addresses': user.ip_addresses, + 'language': user_data.get('language') + } + data.update(extras) + + if include_secrets: + data['auth_tokens'] = user.auth_tokens + return data + + def __json__(self): + data = { + 'full_name': self.full_name, + 'full_name_or_username': self.full_name_or_username, + 'short_contact': self.short_contact, + 'full_contact': self.full_contact, + } + data.update(self.get_api_data()) + return data + + +class UserApiKeys(Base, BaseModel): + __tablename__ = 'user_api_keys' + __table_args__ = ( + Index('uak_api_key_idx', 'api_key'), + Index('uak_api_key_expires_idx', 'api_key', 'expires'), + base_table_args + ) + __mapper_args__ = {} + + # ApiKey role + ROLE_ALL = 'token_role_all' + ROLE_HTTP = 'token_role_http' + ROLE_VCS = 'token_role_vcs' + ROLE_API = 'token_role_api' + ROLE_FEED = 'token_role_feed' + ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' + ROLE_PASSWORD_RESET = 'token_password_reset' + + ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] + + user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + api_key = Column("api_key", String(255), nullable=False, unique=True) + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + expires = Column('expires', Float(53), nullable=False) + role = Column('role', String(255), nullable=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + # scope columns + repo_id = Column( + 'repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + repo_group_id = Column( + 'repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + user = relationship('User', lazy='joined') + + def __unicode__(self): + return u"<%s('%s')>" % (self.__class__.__name__, self.role) + + def __json__(self): + data = { + 'auth_token': self.api_key, + 'role': self.role, + 'scope': self.scope_humanized, + 'expired': self.expired + } + return data + + def get_api_data(self, include_secrets=False): + data = self.__json__() + if include_secrets: + return data + else: + data['auth_token'] = self.token_obfuscated + return data + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @property + def expired(self): + if self.expires == -1: + return False + return time.time() > self.expires + + @classmethod + def _get_role_name(cls, role): + return { + cls.ROLE_ALL: _('all'), + cls.ROLE_HTTP: _('http/web interface'), + cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), + cls.ROLE_API: _('api calls'), + cls.ROLE_FEED: _('feed access'), + cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), + }.get(role, role) + + @property + def role_humanized(self): + return self._get_role_name(self.role) + + def _get_scope(self): + if self.repo: + return 'Repository: {}'.format(self.repo.repo_name) + if self.repo_group: + return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name) + return 'Global' + + @property + def scope_humanized(self): + return self._get_scope() + + @property + def token_obfuscated(self): + if self.api_key: + return self.api_key[:4] + "****" + + +class UserEmailMap(Base, BaseModel): + __tablename__ = 'user_email_map' + __table_args__ = ( + Index('uem_email_idx', 'email'), + UniqueConstraint('email'), + base_table_args + ) + __mapper_args__ = {} + + email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + _email = Column("email", String(255), nullable=True, unique=False, default=None) + user = relationship('User', lazy='joined') + + @validates('_email') + def validate_email(self, key, email): + # check if this email is not main one + main_email = Session().query(User).filter(User.email == email).scalar() + if main_email is not None: + raise AttributeError('email %s is present is user table' % email) + return email + + @hybrid_property + def email(self): + return self._email + + @email.setter + def email(self, val): + self._email = val.lower() if val else None + + +class UserIpMap(Base, BaseModel): + __tablename__ = 'user_ip_map' + __table_args__ = ( + UniqueConstraint('user_id', 'ip_addr'), + base_table_args + ) + __mapper_args__ = {} + + ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) + active = Column("active", Boolean(), nullable=True, unique=None, default=True) + description = Column("description", String(10000), nullable=True, unique=None, default=None) + user = relationship('User', lazy='joined') + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @classmethod + def _get_ip_range(cls, ip_addr): + net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False) + return [str(net.network_address), str(net.broadcast_address)] + + def __json__(self): + return { + 'ip_addr': self.ip_addr, + 'ip_range': self._get_ip_range(self.ip_addr), + } + + def __unicode__(self): + return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, + self.user_id, self.ip_addr) + + +class UserSshKeys(Base, BaseModel): + __tablename__ = 'user_ssh_keys' + __table_args__ = ( + Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), + + UniqueConstraint('ssh_key_fingerprint'), + + base_table_args + ) + __mapper_args__ = {} + + ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) + ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) + ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) + + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + + user = relationship('User', lazy='joined') + + def __json__(self): + data = { + 'ssh_fingerprint': self.ssh_key_fingerprint, + 'description': self.description, + 'created_on': self.created_on + } + return data + + def get_api_data(self): + data = self.__json__() + return data + + +class UserLog(Base, BaseModel): + __tablename__ = 'user_logs' + __table_args__ = ( + base_table_args, + ) + + VERSION_1 = 'v1' + VERSION_2 = 'v2' + VERSIONS = [VERSION_1, VERSION_2] + + user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) + username = Column("username", String(255), nullable=True, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) + repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) + user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) + action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) + action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) + + version = Column("version", String(255), nullable=True, default=VERSION_1) + user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) + action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % ( + self.__class__.__name__, self.repository_name, self.action) + + def __json__(self): + return { + 'user_id': self.user_id, + 'username': self.username, + 'repository_id': self.repository_id, + 'repository_name': self.repository_name, + 'user_ip': self.user_ip, + 'action_date': self.action_date, + 'action': self.action, + } + + @hybrid_property + def entry_id(self): + return self.user_log_id + + @property + def action_as_day(self): + return datetime.date(*self.action_date.timetuple()[:3]) + + user = relationship('User') + repository = relationship('Repository', cascade='') + + +class UserGroup(Base, BaseModel): + __tablename__ = 'users_groups' + __table_args__ = ( + base_table_args, + ) + + users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) + user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) + users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) + inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data + + members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined") + users_group_to_perm = relationship('UserGroupToPerm', cascade='all') + users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') + users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') + user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') + user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') + + user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all') + user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id") + + @classmethod + def _load_group_data(cls, column): + if not column: + return {} + + try: + return json.loads(column) or {} + except TypeError: + return {} + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.user_group_description) + + @hybrid_property + def group_data(self): + return self._load_group_data(self._group_data) + + @group_data.expression + def group_data(self, **kwargs): + return self._group_data + + @group_data.setter + def group_data(self, val): + try: + self._group_data = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @classmethod + def _load_sync(cls, group_data): + if group_data: + return group_data.get('extern_type') + + @property + def sync(self): + return self._load_sync(self.group_data) + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % (self.__class__.__name__, + self.users_group_id, + self.users_group_name) + + @classmethod + def get_by_group_name(cls, group_name, cache=False, + case_insensitive=False): + if case_insensitive: + q = cls.query().filter(func.lower(cls.users_group_name) == + func.lower(group_name)) + + else: + q = cls.query().filter(cls.users_group_name == group_name) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name))) + return q.scalar() + + @classmethod + def get(cls, user_group_id, cache=False): + if not user_group_id: + return + + user_group = cls.query() + if cache: + user_group = user_group.options( + FromCache("sql_cache_short", "get_users_group_%s" % user_group_id)) + return user_group.get(user_group_id) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for user groups + """ + _admin_perm = 'usergroup.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + super_admin_rows.append(usr) + + q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) + q = q.options(joinedload(UserUserGroupToPerm.user_group), + joinedload(UserUserGroupToPerm.user), + joinedload(UserUserGroupToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + usr.permission = _usr.permission.permission_name + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=False): + q = UserGroupUserGroupToPerm.query()\ + .filter(UserGroupUserGroupToPerm.target_user_group == self) + q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), + joinedload(UserGroupUserGroupToPerm.target_user_group), + joinedload(UserGroupUserGroupToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.user_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.user_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def _get_default_perms(self, user_group, suffix=''): + from rhodecode.model.permission import PermissionModel + return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) + + def get_default_perms(self, suffix=''): + return self._get_default_perms(self, suffix) + + def get_api_data(self, with_group_members=True, include_secrets=False): + """ + :param include_secrets: See :meth:`User.get_api_data`, this parameter is + basically forwarded. + + """ + user_group = self + data = { + 'users_group_id': user_group.users_group_id, + 'group_name': user_group.users_group_name, + 'group_description': user_group.user_group_description, + 'active': user_group.users_group_active, + 'owner': user_group.user.username, + 'sync': user_group.sync, + 'owner_email': user_group.user.email, + } + + if with_group_members: + users = [] + for user in user_group.members: + user = user.user + users.append(user.get_api_data(include_secrets=include_secrets)) + data['users'] = users + + return data + + +class UserGroupMember(Base, BaseModel): + __tablename__ = 'users_groups_members' + __table_args__ = ( + base_table_args, + ) + + users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + + user = relationship('User', lazy='joined') + users_group = relationship('UserGroup') + + def __init__(self, gr_id='', u_id=''): + self.users_group_id = gr_id + self.user_id = u_id + + +class RepositoryField(Base, BaseModel): + __tablename__ = 'repositories_fields' + __table_args__ = ( + UniqueConstraint('repository_id', 'field_key'), # no-multi field + base_table_args, + ) + + PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields + + repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + field_key = Column("field_key", String(250)) + field_label = Column("field_label", String(1024), nullable=False) + field_value = Column("field_value", String(10000), nullable=False) + field_desc = Column("field_desc", String(1024), nullable=False) + field_type = Column("field_type", String(255), nullable=False, unique=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + repository = relationship('Repository') + + @property + def field_key_prefixed(self): + return 'ex_%s' % self.field_key + + @classmethod + def un_prefix_key(cls, key): + if key.startswith(cls.PREFIX): + return key[len(cls.PREFIX):] + return key + + @classmethod + def get_by_key_name(cls, key, repo): + row = cls.query()\ + .filter(cls.repository == repo)\ + .filter(cls.field_key == key).scalar() + return row + + +class Repository(Base, BaseModel): + __tablename__ = 'repositories' + __table_args__ = ( + Index('r_repo_name_idx', 'repo_name', mysql_length=255), + base_table_args, + ) + DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' + DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' + DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}' + + STATE_CREATED = 'repo_state_created' + STATE_PENDING = 'repo_state_pending' + STATE_ERROR = 'repo_state_error' + + LOCK_AUTOMATIC = 'lock_auto' + LOCK_API = 'lock_api' + LOCK_WEB = 'lock_web' + LOCK_PULL = 'lock_pull' + + NAME_SEP = URL_SEP + + repo_id = Column( + "repo_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _repo_name = Column( + "repo_name", Text(), nullable=False, default=None) + repo_name_hash = Column( + "repo_name_hash", String(255), nullable=False, unique=True) + repo_state = Column("repo_state", String(255), nullable=True) + + clone_uri = Column( + "clone_uri", EncryptedTextValue(), nullable=True, unique=False, + default=None) + push_uri = Column( + "push_uri", EncryptedTextValue(), nullable=True, unique=False, + default=None) + repo_type = Column( + "repo_type", String(255), nullable=False, unique=False, default=None) + user_id = Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, + unique=False, default=None) + private = Column( + "private", Boolean(), nullable=True, unique=None, default=None) + archived = Column( + "archived", Boolean(), nullable=True, unique=None, default=None) + enable_statistics = Column( + "statistics", Boolean(), nullable=True, unique=None, default=True) + enable_downloads = Column( + "downloads", Boolean(), nullable=True, unique=None, default=True) + description = Column( + "description", String(10000), nullable=True, unique=None, default=None) + created_on = Column( + 'created_on', DateTime(timezone=False), nullable=True, unique=None, + default=datetime.datetime.now) + updated_on = Column( + 'updated_on', DateTime(timezone=False), nullable=True, unique=None, + default=datetime.datetime.now) + _landing_revision = Column( + "landing_revision", String(255), nullable=False, unique=False, + default=None) + enable_locking = Column( + "enable_locking", Boolean(), nullable=False, unique=None, + default=False) + _locked = Column( + "locked", String(255), nullable=True, unique=False, default=None) + _changeset_cache = Column( + "changeset_cache", LargeBinary(), nullable=True) # JSON data + + fork_id = Column( + "fork_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=False, default=None) + group_id = Column( + "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, + unique=False, default=None) + + user = relationship('User', lazy='joined') + fork = relationship('Repository', remote_side=repo_id, lazy='joined') + group = relationship('RepoGroup', lazy='joined') + repo_to_perm = relationship( + 'UserRepoToPerm', cascade='all', + order_by='UserRepoToPerm.repo_to_perm_id') + users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') + stats = relationship('Statistics', cascade='all', uselist=False) + + followers = relationship( + 'UserFollowing', + primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', + cascade='all') + extra_fields = relationship( + 'RepositoryField', cascade="all, delete-orphan") + logs = relationship('UserLog') + comments = relationship( + 'ChangesetComment', cascade="all, delete-orphan") + pull_requests_source = relationship( + 'PullRequest', + primaryjoin='PullRequest.source_repo_id==Repository.repo_id', + cascade="all, delete-orphan") + pull_requests_target = relationship( + 'PullRequest', + primaryjoin='PullRequest.target_repo_id==Repository.repo_id', + cascade="all, delete-orphan") + ui = relationship('RepoRhodeCodeUi', cascade="all") + settings = relationship('RepoRhodeCodeSetting', cascade="all") + integrations = relationship('Integration', cascade="all, delete-orphan") + + scoped_tokens = relationship('UserApiKeys', cascade="all") + + # no cascade, set NULL + artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id') + + def __unicode__(self): + return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, + safe_unicode(self.repo_name)) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @hybrid_property + def landing_rev(self): + # always should return [rev_type, rev] + if self._landing_revision: + _rev_info = self._landing_revision.split(':') + if len(_rev_info) < 2: + _rev_info.insert(0, 'rev') + return [_rev_info[0], _rev_info[1]] + return [None, None] + + @landing_rev.setter + def landing_rev(self, val): + if ':' not in val: + raise ValueError('value must be delimited with `:` and consist ' + 'of :, got %s instead' % val) + self._landing_revision = val + + @hybrid_property + def locked(self): + if self._locked: + user_id, timelocked, reason = self._locked.split(':') + lock_values = int(user_id), timelocked, reason + else: + lock_values = [None, None, None] + return lock_values + + @locked.setter + def locked(self, val): + if val and isinstance(val, (list, tuple)): + self._locked = ':'.join(map(str, val)) + else: + self._locked = None + + @classmethod + def _load_changeset_cache(cls, repo_id, changeset_cache_raw): + from rhodecode.lib.vcs.backends.base import EmptyCommit + dummy = EmptyCommit().__json__() + if not changeset_cache_raw: + dummy['source_repo_id'] = repo_id + return json.loads(json.dumps(dummy)) + + try: + return json.loads(changeset_cache_raw) + except TypeError: + return dummy + except Exception: + log.error(traceback.format_exc()) + return dummy + + @hybrid_property + def changeset_cache(self): + return self._load_changeset_cache(self.repo_id, self._changeset_cache) + + @changeset_cache.setter + def changeset_cache(self, val): + try: + self._changeset_cache = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @hybrid_property + def repo_name(self): + return self._repo_name + + @repo_name.setter + def repo_name(self, value): + self._repo_name = value + self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() + + @classmethod + def normalize_repo_name(cls, repo_name): + """ + Normalizes os specific repo_name to the format internally stored inside + database using URL_SEP + + :param cls: + :param repo_name: + """ + return cls.NAME_SEP.join(repo_name.split(os.sep)) + + @classmethod + def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): + session = Session() + q = session.query(cls).filter(cls.repo_name == repo_name) + + if cache: + if identity_cache: + val = cls.identity_cache(session, 'repo_name', repo_name) + if val: + return val + else: + cache_key = "get_repo_by_name_%s" % _hash_key(repo_name) + q = q.options( + FromCache("sql_cache_short", cache_key)) + + return q.scalar() + + @classmethod + def get_by_id_or_repo_name(cls, repoid): + if isinstance(repoid, (int, long)): + try: + repo = cls.get(repoid) + except ValueError: + repo = None + else: + repo = cls.get_by_repo_name(repoid) + return repo + + @classmethod + def get_by_full_path(cls, repo_full_path): + repo_name = repo_full_path.split(cls.base_path(), 1)[-1] + repo_name = cls.normalize_repo_name(repo_name) + return cls.get_by_repo_name(repo_name.strip(URL_SEP)) + + @classmethod + def get_repo_forks(cls, repo_id): + return cls.query().filter(Repository.fork_id == repo_id) + + @classmethod + def base_path(cls): + """ + Returns base path when all repos are stored + + :param cls: + """ + q = Session().query(RhodeCodeUi)\ + .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return q.one().ui_value + + @classmethod + def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), + case_insensitive=True, archived=False): + q = Repository.query() + + if not archived: + q = q.filter(Repository.archived.isnot(true())) + + if not isinstance(user_id, Optional): + q = q.filter(Repository.user_id == user_id) + + if not isinstance(group_id, Optional): + q = q.filter(Repository.group_id == group_id) + + if case_insensitive: + q = q.order_by(func.lower(Repository.repo_name)) + else: + q = q.order_by(Repository.repo_name) + + return q.all() + + @property + def repo_uid(self): + return '_{}'.format(self.repo_id) + + @property + def forks(self): + """ + Return forks of this repo + """ + return Repository.get_repo_forks(self.repo_id) + + @property + def parent(self): + """ + Returns fork parent + """ + return self.fork + + @property + def just_name(self): + return self.repo_name.split(self.NAME_SEP)[-1] + + @property + def groups_with_parents(self): + groups = [] + if self.group is None: + return groups + + cur_gr = self.group + groups.insert(0, cur_gr) + while 1: + gr = getattr(cur_gr, 'parent_group', None) + cur_gr = cur_gr.parent_group + if gr is None: + break + groups.insert(0, gr) + + return groups + + @property + def groups_and_repo(self): + return self.groups_with_parents, self + + @LazyProperty + def repo_path(self): + """ + Returns base full path for that repository means where it actually + exists on a filesystem + """ + q = Session().query(RhodeCodeUi).filter( + RhodeCodeUi.ui_key == self.NAME_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return q.one().ui_value + + @property + def repo_full_path(self): + p = [self.repo_path] + # we need to split the name by / since this is how we store the + # names in the database, but that eventually needs to be converted + # into a valid system path + p += self.repo_name.split(self.NAME_SEP) + return os.path.join(*map(safe_unicode, p)) + + @property + def cache_keys(self): + """ + Returns associated cache keys for that repo + """ + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + return CacheKey.query()\ + .filter(CacheKey.cache_args == invalidation_namespace)\ + .order_by(CacheKey.cache_key)\ + .all() + + @property + def cached_diffs_relative_dir(self): + """ + Return a relative to the repository store path of cached diffs + used for safe display for users, who shouldn't know the absolute store + path + """ + return os.path.join( + os.path.dirname(self.repo_name), + self.cached_diffs_dir.split(os.path.sep)[-1]) + + @property + def cached_diffs_dir(self): + path = self.repo_full_path + return os.path.join( + os.path.dirname(path), + '.__shadow_diff_cache_repo_{}'.format(self.repo_id)) + + def cached_diffs(self): + diff_cache_dir = self.cached_diffs_dir + if os.path.isdir(diff_cache_dir): + return os.listdir(diff_cache_dir) + return [] + + def shadow_repos(self): + shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id) + return [ + x for x in os.listdir(os.path.dirname(self.repo_full_path)) + if x.startswith(shadow_repos_pattern)] + + def get_new_name(self, repo_name): + """ + returns new full repository name based on assigned group and new new + + :param group_name: + """ + path_prefix = self.group.full_path_splitted if self.group else [] + return self.NAME_SEP.join(path_prefix + [repo_name]) + + @property + def _config(self): + """ + Returns db based config object. + """ + from rhodecode.lib.utils import make_db_config + return make_db_config(clear_session=False, repo=self) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for repositories + """ + _admin_perm = 'repository.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + usr.permission_id = None + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + usr.permission_id = None + super_admin_rows.append(usr) + + q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) + q = q.options(joinedload(UserRepoToPerm.repository), + joinedload(UserRepoToPerm.user), + joinedload(UserRepoToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + # also check if this permission is maybe used by branch_permissions + if _usr.branch_perm_entry: + usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry] + + usr.permission = _usr.permission.permission_name + usr.permission_id = _usr.repo_to_perm_id + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=True): + q = UserGroupRepoToPerm.query()\ + .filter(UserGroupRepoToPerm.repository == self) + q = q.options(joinedload(UserGroupRepoToPerm.repository), + joinedload(UserGroupRepoToPerm.users_group), + joinedload(UserGroupRepoToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.users_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.users_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def get_api_data(self, include_secrets=False): + """ + Common function for generating repo api data + + :param include_secrets: See :meth:`User.get_api_data`. + + """ + # TODO: mikhail: Here there is an anti-pattern, we probably need to + # move this methods on models level. + from rhodecode.model.settings import SettingsModel + from rhodecode.model.repo import RepoModel + + repo = self + _user_id, _time, _reason = self.locked + + data = { + 'repo_id': repo.repo_id, + 'repo_name': repo.repo_name, + 'repo_type': repo.repo_type, + 'clone_uri': repo.clone_uri or '', + 'push_uri': repo.push_uri or '', + 'url': RepoModel().get_url(self), + 'private': repo.private, + 'created_on': repo.created_on, + 'description': repo.description_safe, + 'landing_rev': repo.landing_rev, + 'owner': repo.user.username, + 'fork_of': repo.fork.repo_name if repo.fork else None, + 'fork_of_id': repo.fork.repo_id if repo.fork else None, + 'enable_statistics': repo.enable_statistics, + 'enable_locking': repo.enable_locking, + 'enable_downloads': repo.enable_downloads, + 'last_changeset': repo.changeset_cache, + 'locked_by': User.get(_user_id).get_api_data( + include_secrets=include_secrets) if _user_id else None, + 'locked_date': time_to_datetime(_time) if _time else None, + 'lock_reason': _reason if _reason else None, + } + + # TODO: mikhail: should be per-repo settings here + rc_config = SettingsModel().get_all_settings() + repository_fields = str2bool( + rc_config.get('rhodecode_repository_fields')) + if repository_fields: + for f in self.extra_fields: + data[f.field_key_prefixed] = f.field_value + + return data + + @classmethod + def lock(cls, repo, user_id, lock_time=None, lock_reason=None): + if not lock_time: + lock_time = time.time() + if not lock_reason: + lock_reason = cls.LOCK_AUTOMATIC + repo.locked = [user_id, lock_time, lock_reason] + Session().add(repo) + Session().commit() + + @classmethod + def unlock(cls, repo): + repo.locked = None + Session().add(repo) + Session().commit() + + @classmethod + def getlock(cls, repo): + return repo.locked + + def is_user_lock(self, user_id): + if self.lock[0]: + lock_user_id = safe_int(self.lock[0]) + user_id = safe_int(user_id) + # both are ints, and they are equal + return all([lock_user_id, user_id]) and lock_user_id == user_id + + return False + + def get_locking_state(self, action, user_id, only_when_enabled=True): + """ + Checks locking on this repository, if locking is enabled and lock is + present returns a tuple of make_lock, locked, locked_by. + make_lock can have 3 states None (do nothing) True, make lock + False release lock, This value is later propagated to hooks, which + do the locking. Think about this as signals passed to hooks what to do. + + """ + # TODO: johbo: This is part of the business logic and should be moved + # into the RepositoryModel. + + if action not in ('push', 'pull'): + raise ValueError("Invalid action value: %s" % repr(action)) + + # defines if locked error should be thrown to user + currently_locked = False + # defines if new lock should be made, tri-state + make_lock = None + repo = self + user = User.get(user_id) + + lock_info = repo.locked + + if repo and (repo.enable_locking or not only_when_enabled): + if action == 'push': + # check if it's already locked !, if it is compare users + locked_by_user_id = lock_info[0] + if user.user_id == locked_by_user_id: + log.debug( + 'Got `push` action from user %s, now unlocking', user) + # unlock if we have push from user who locked + make_lock = False + else: + # we're not the same user who locked, ban with + # code defined in settings (default is 423 HTTP Locked) ! + log.debug('Repo %s is currently locked by %s', repo, user) + currently_locked = True + elif action == 'pull': + # [0] user [1] date + if lock_info[0] and lock_info[1]: + log.debug('Repo %s is currently locked by %s', repo, user) + currently_locked = True + else: + log.debug('Setting lock on repo %s by %s', repo, user) + make_lock = True + + else: + log.debug('Repository %s do not have locking enabled', repo) + + log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', + make_lock, currently_locked, lock_info) + + from rhodecode.lib.auth import HasRepoPermissionAny + perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') + if make_lock and not perm_check(repo_name=repo.repo_name, user=user): + # if we don't have at least write permission we cannot make a lock + log.debug('lock state reset back to FALSE due to lack ' + 'of at least read permission') + make_lock = False + + return make_lock, currently_locked, lock_info + + @property + def last_commit_cache_update_diff(self): + return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) + + @classmethod + def _load_commit_change(cls, last_commit_cache): + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + date_latest = last_commit_cache.get('date', empty_date) + try: + return parse_datetime(date_latest) + except Exception: + return empty_date + + @property + def last_commit_change(self): + return self._load_commit_change(self.changeset_cache) + + @property + def last_db_change(self): + return self.updated_on + + @property + def clone_uri_hidden(self): + clone_uri = self.clone_uri + if clone_uri: + import urlobject + url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) + if url_obj.password: + clone_uri = url_obj.with_password('*****') + return clone_uri + + @property + def push_uri_hidden(self): + push_uri = self.push_uri + if push_uri: + import urlobject + url_obj = urlobject.URLObject(cleaned_uri(push_uri)) + if url_obj.password: + push_uri = url_obj.with_password('*****') + return push_uri + + def clone_url(self, **override): + from rhodecode.model.settings import SettingsModel + + uri_tmpl = None + if 'with_id' in override: + uri_tmpl = self.DEFAULT_CLONE_URI_ID + del override['with_id'] + + if 'uri_tmpl' in override: + uri_tmpl = override['uri_tmpl'] + del override['uri_tmpl'] + + ssh = False + if 'ssh' in override: + ssh = True + del override['ssh'] + + # we didn't override our tmpl from **overrides + request = get_current_request() + if not uri_tmpl: + if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): + rc_config = request.call_context.rc_config + else: + rc_config = SettingsModel().get_all_settings(cache=True) + + if ssh: + uri_tmpl = rc_config.get( + 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH + + else: + uri_tmpl = rc_config.get( + 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI + + return get_clone_url(request=request, + uri_tmpl=uri_tmpl, + repo_name=self.repo_name, + repo_id=self.repo_id, + repo_type=self.repo_type, + **override) + + def set_state(self, state): + self.repo_state = state + Session().add(self) + #========================================================================== + # SCM PROPERTIES + #========================================================================== + + def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False): + return get_commit_safe( + self.scm_instance(), commit_id, commit_idx, pre_load=pre_load, + maybe_unreachable=maybe_unreachable) + + def get_changeset(self, rev=None, pre_load=None): + warnings.warn("Use get_commit", DeprecationWarning) + commit_id = None + commit_idx = None + if isinstance(rev, compat.string_types): + commit_id = rev + else: + commit_idx = rev + return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, + pre_load=pre_load) + + def get_landing_commit(self): + """ + Returns landing commit, or if that doesn't exist returns the tip + """ + _rev_type, _rev = self.landing_rev + commit = self.get_commit(_rev) + if isinstance(commit, EmptyCommit): + return self.get_commit() + return commit + + def flush_commit_cache(self): + self.update_commit_cache(cs_cache={'raw_id':'0'}) + self.update_commit_cache() + + def update_commit_cache(self, cs_cache=None, config=None): + """ + Update cache of last commit for repository + cache_keys should be:: + + source_repo_id + short_id + raw_id + revision + parents + message + date + author + updated_on + + """ + from rhodecode.lib.vcs.backends.base import BaseChangeset + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + + if cs_cache is None: + # use no-cache version here + try: + scm_repo = self.scm_instance(cache=False, config=config) + except VCSError: + scm_repo = None + empty = scm_repo is None or scm_repo.is_empty() + + if not empty: + cs_cache = scm_repo.get_commit( + pre_load=["author", "date", "message", "parents", "branch"]) + else: + cs_cache = EmptyCommit() + + if isinstance(cs_cache, BaseChangeset): + cs_cache = cs_cache.__json__() + + def is_outdated(new_cs_cache): + if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or + new_cs_cache['revision'] != self.changeset_cache['revision']): + return True + return False + + # check if we have maybe already latest cached revision + if is_outdated(cs_cache) or not self.changeset_cache: + _current_datetime = datetime.datetime.utcnow() + last_change = cs_cache.get('date') or _current_datetime + # we check if last update is newer than the new value + # if yes, we use the current timestamp instead. Imagine you get + # old commit pushed 1y ago, we'd set last update 1y to ago. + last_change_timestamp = datetime_to_time(last_change) + current_timestamp = datetime_to_time(last_change) + if last_change_timestamp > current_timestamp and not empty: + cs_cache['date'] = _current_datetime + + _date_latest = parse_datetime(cs_cache.get('date') or empty_date) + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + self.updated_on = last_change + Session().add(self) + Session().commit() + + else: + if empty: + cs_cache = EmptyCommit().__json__() + else: + cs_cache = self.changeset_cache + + _date_latest = parse_datetime(cs_cache.get('date') or empty_date) + + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + self.updated_on = _date_latest + Session().add(self) + Session().commit() + + log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', + self.repo_name, cs_cache, _date_latest) + + @property + def tip(self): + return self.get_commit('tip') + + @property + def author(self): + return self.tip.author + + @property + def last_change(self): + return self.scm_instance().last_change + + def get_comments(self, revisions=None): + """ + Returns comments for this repository grouped by revisions + + :param revisions: filter query by revisions only + """ + cmts = ChangesetComment.query()\ + .filter(ChangesetComment.repo == self) + if revisions: + cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) + grouped = collections.defaultdict(list) + for cmt in cmts.all(): + grouped[cmt.revision].append(cmt) + return grouped + + def statuses(self, revisions=None): + """ + Returns statuses for this repository + + :param revisions: list of revisions to get statuses for + """ + statuses = ChangesetStatus.query()\ + .filter(ChangesetStatus.repo == self)\ + .filter(ChangesetStatus.version == 0) + + if revisions: + # Try doing the filtering in chunks to avoid hitting limits + size = 500 + status_results = [] + for chunk in xrange(0, len(revisions), size): + status_results += statuses.filter( + ChangesetStatus.revision.in_( + revisions[chunk: chunk+size]) + ).all() + else: + status_results = statuses.all() + + grouped = {} + + # maybe we have open new pullrequest without a status? + stat = ChangesetStatus.STATUS_UNDER_REVIEW + status_lbl = ChangesetStatus.get_status_lbl(stat) + for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): + for rev in pr.revisions: + pr_id = pr.pull_request_id + pr_repo = pr.target_repo.repo_name + grouped[rev] = [stat, status_lbl, pr_id, pr_repo] + + for stat in status_results: + pr_id = pr_repo = None + if stat.pull_request: + pr_id = stat.pull_request.pull_request_id + pr_repo = stat.pull_request.target_repo.repo_name + grouped[stat.revision] = [str(stat.status), stat.status_lbl, + pr_id, pr_repo] + return grouped + + # ========================================================================== + # SCM CACHE INSTANCE + # ========================================================================== + + def scm_instance(self, **kwargs): + import rhodecode + + # Passing a config will not hit the cache currently only used + # for repo2dbmapper + config = kwargs.pop('config', None) + cache = kwargs.pop('cache', None) + vcs_full_cache = kwargs.pop('vcs_full_cache', None) + if vcs_full_cache is not None: + # allows override global config + full_cache = vcs_full_cache + else: + full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) + # if cache is NOT defined use default global, else we have a full + # control over cache behaviour + if cache is None and full_cache and not config: + log.debug('Initializing pure cached instance for %s', self.repo_path) + return self._get_instance_cached() + + # cache here is sent to the "vcs server" + return self._get_instance(cache=bool(cache), config=config) + + def _get_instance_cached(self): + from rhodecode.lib import rc_cache + + cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) + def get_instance_cached(repo_id, context_id, _cache_state_uid): + return self._get_instance(repo_state_uid=_cache_state_uid) + + # we must use thread scoped cache here, + # because each thread of gevent needs it's own not shared connection and cache + # we also alter `args` so the cache key is individual for every green thread. + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace, + thread_scoped=True) + with inv_context_manager as invalidation_context: + cache_state_uid = invalidation_context.cache_data['cache_state_uid'] + args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid) + + # re-compute and store cache if we get invalidate signal + if invalidation_context.should_invalidate(): + instance = get_instance_cached.refresh(*args) + else: + instance = get_instance_cached(*args) + + log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) + return instance + + def _get_instance(self, cache=True, config=None, repo_state_uid=None): + log.debug('Initializing %s instance `%s` with cache flag set to: %s', + self.repo_type, self.repo_path, cache) + config = config or self._config + custom_wire = { + 'cache': cache, # controls the vcs.remote cache + 'repo_state_uid': repo_state_uid + } + repo = get_vcs_instance( + repo_path=safe_str(self.repo_full_path), + config=config, + with_wire=custom_wire, + create=False, + _vcs_alias=self.repo_type) + if repo is not None: + repo.count() # cache rebuild + return repo + + def get_shadow_repository_path(self, workspace_id): + from rhodecode.lib.vcs.backends.base import BaseRepository + shadow_repo_path = BaseRepository._get_shadow_repository_path( + self.repo_full_path, self.repo_id, workspace_id) + return shadow_repo_path + + def __json__(self): + return {'landing_rev': self.landing_rev} + + def get_dict(self): + + # Since we transformed `repo_name` to a hybrid property, we need to + # keep compatibility with the code which uses `repo_name` field. + + result = super(Repository, self).get_dict() + result['repo_name'] = result.pop('_repo_name', None) + return result + + +class RepoGroup(Base, BaseModel): + __tablename__ = 'groups' + __table_args__ = ( + UniqueConstraint('group_name', 'group_parent_id'), + base_table_args, + ) + __mapper_args__ = {'order_by': 'group_name'} + + CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups + + group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) + group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) + group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) + group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) + enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) + personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) + _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data + + repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') + users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') + parent_group = relationship('RepoGroup', remote_side=group_id) + user = relationship('User') + integrations = relationship('Integration', cascade="all, delete-orphan") + + # no cascade, set NULL + scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id') + + def __init__(self, group_name='', parent_group=None): + self.group_name = group_name + self.parent_group = parent_group + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % ( + self.__class__.__name__, self.group_id, self.group_name) + + @hybrid_property + def group_name(self): + return self._group_name + + @group_name.setter + def group_name(self, value): + self._group_name = value + self.group_name_hash = self.hash_repo_group_name(value) + + @classmethod + def _load_changeset_cache(cls, repo_id, changeset_cache_raw): + from rhodecode.lib.vcs.backends.base import EmptyCommit + dummy = EmptyCommit().__json__() + if not changeset_cache_raw: + dummy['source_repo_id'] = repo_id + return json.loads(json.dumps(dummy)) + + try: + return json.loads(changeset_cache_raw) + except TypeError: + return dummy + except Exception: + log.error(traceback.format_exc()) + return dummy + + @hybrid_property + def changeset_cache(self): + return self._load_changeset_cache('', self._changeset_cache) + + @changeset_cache.setter + def changeset_cache(self, val): + try: + self._changeset_cache = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @validates('group_parent_id') + def validate_group_parent_id(self, key, val): + """ + Check cycle references for a parent group to self + """ + if self.group_id and val: + assert val != self.group_id + + return val + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.group_description) + + @classmethod + def hash_repo_group_name(cls, repo_group_name): + val = remove_formatting(repo_group_name) + val = safe_str(val).lower() + chars = [] + for c in val: + if c not in string.ascii_letters: + c = str(ord(c)) + chars.append(c) + + return ''.join(chars) + + @classmethod + def _generate_choice(cls, repo_group): + from webhelpers2.html import literal as _literal + _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) + return repo_group.group_id, _name(repo_group.full_path_splitted) + + @classmethod + def groups_choices(cls, groups=None, show_empty_group=True): + if not groups: + groups = cls.query().all() + + repo_groups = [] + if show_empty_group: + repo_groups = [(-1, u'-- %s --' % _('No parent'))] + + repo_groups.extend([cls._generate_choice(x) for x in groups]) + + repo_groups = sorted( + repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) + return repo_groups + + @classmethod + def url_sep(cls): + return URL_SEP + + @classmethod + def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): + if case_insensitive: + gr = cls.query().filter(func.lower(cls.group_name) + == func.lower(group_name)) + else: + gr = cls.query().filter(cls.group_name == group_name) + if cache: + name_key = _hash_key(group_name) + gr = gr.options( + FromCache("sql_cache_short", "get_group_%s" % name_key)) + return gr.scalar() + + @classmethod + def get_user_personal_repo_group(cls, user_id): + user = User.get(user_id) + if user.username == User.DEFAULT_USER: + return None + + return cls.query()\ + .filter(cls.personal == true()) \ + .filter(cls.user == user) \ + .order_by(cls.group_id.asc()) \ + .first() + + @classmethod + def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), + case_insensitive=True): + q = RepoGroup.query() + + if not isinstance(user_id, Optional): + q = q.filter(RepoGroup.user_id == user_id) + + if not isinstance(group_id, Optional): + q = q.filter(RepoGroup.group_parent_id == group_id) + + if case_insensitive: + q = q.order_by(func.lower(RepoGroup.group_name)) + else: + q = q.order_by(RepoGroup.group_name) + return q.all() + + @property + def parents(self, parents_recursion_limit=10): + groups = [] + if self.parent_group is None: + return groups + cur_gr = self.parent_group + groups.insert(0, cur_gr) + cnt = 0 + while 1: + cnt += 1 + gr = getattr(cur_gr, 'parent_group', None) + cur_gr = cur_gr.parent_group + if gr is None: + break + if cnt == parents_recursion_limit: + # this will prevent accidental infinit loops + log.error('more than %s parents found for group %s, stopping ' + 'recursive parent fetching', parents_recursion_limit, self) + break + + groups.insert(0, gr) + return groups + + @property + def last_commit_cache_update_diff(self): + return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) + + @classmethod + def _load_commit_change(cls, last_commit_cache): + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + date_latest = last_commit_cache.get('date', empty_date) + try: + return parse_datetime(date_latest) + except Exception: + return empty_date + + @property + def last_commit_change(self): + return self._load_commit_change(self.changeset_cache) + + @property + def last_db_change(self): + return self.updated_on + + @property + def children(self): + return RepoGroup.query().filter(RepoGroup.parent_group == self) + + @property + def name(self): + return self.group_name.split(RepoGroup.url_sep())[-1] + + @property + def full_path(self): + return self.group_name + + @property + def full_path_splitted(self): + return self.group_name.split(RepoGroup.url_sep()) + + @property + def repositories(self): + return Repository.query()\ + .filter(Repository.group == self)\ + .order_by(Repository.repo_name) + + @property + def repositories_recursive_count(self): + cnt = self.repositories.count() + + def children_count(group): + cnt = 0 + for child in group.children: + cnt += child.repositories.count() + cnt += children_count(child) + return cnt + + return cnt + children_count(self) + + def _recursive_objects(self, include_repos=True, include_groups=True): + all_ = [] + + def _get_members(root_gr): + if include_repos: + for r in root_gr.repositories: + all_.append(r) + childs = root_gr.children.all() + if childs: + for gr in childs: + if include_groups: + all_.append(gr) + _get_members(gr) + + root_group = [] + if include_groups: + root_group = [self] + + _get_members(self) + return root_group + all_ + + def recursive_groups_and_repos(self): + """ + Recursive return all groups, with repositories in those groups + """ + return self._recursive_objects() + + def recursive_groups(self): + """ + Returns all children groups for this group including children of children + """ + return self._recursive_objects(include_repos=False) + + def recursive_repos(self): + """ + Returns all children repositories for this group + """ + return self._recursive_objects(include_groups=False) + + def get_new_name(self, group_name): + """ + returns new full group name based on parent and new name + + :param group_name: + """ + path_prefix = (self.parent_group.full_path_splitted if + self.parent_group else []) + return RepoGroup.url_sep().join(path_prefix + [group_name]) + + def update_commit_cache(self, config=None): + """ + Update cache of last commit for newest repository inside this repository group. + cache_keys should be:: + + source_repo_id + short_id + raw_id + revision + parents + message + date + author + + """ + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + + def repo_groups_and_repos(root_gr): + for _repo in root_gr.repositories: + yield _repo + for child_group in root_gr.children.all(): + yield child_group + + latest_repo_cs_cache = {} + for obj in repo_groups_and_repos(self): + repo_cs_cache = obj.changeset_cache + date_latest = latest_repo_cs_cache.get('date', empty_date) + date_current = repo_cs_cache.get('date', empty_date) + current_timestamp = datetime_to_time(parse_datetime(date_latest)) + if current_timestamp < datetime_to_time(parse_datetime(date_current)): + latest_repo_cs_cache = repo_cs_cache + if hasattr(obj, 'repo_id'): + latest_repo_cs_cache['source_repo_id'] = obj.repo_id + else: + latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id') + + _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date) + + latest_repo_cs_cache['updated_on'] = time.time() + self.changeset_cache = latest_repo_cs_cache + self.updated_on = _date_latest + Session().add(self) + Session().commit() + + log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s', + self.group_name, latest_repo_cs_cache, _date_latest) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for repository groups + """ + _admin_perm = 'group.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + super_admin_rows.append(usr) + + q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) + q = q.options(joinedload(UserRepoGroupToPerm.group), + joinedload(UserRepoGroupToPerm.user), + joinedload(UserRepoGroupToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + usr.permission = _usr.permission.permission_name + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=False): + q = UserGroupRepoGroupToPerm.query()\ + .filter(UserGroupRepoGroupToPerm.group == self) + q = q.options(joinedload(UserGroupRepoGroupToPerm.group), + joinedload(UserGroupRepoGroupToPerm.users_group), + joinedload(UserGroupRepoGroupToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.users_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.users_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def get_api_data(self): + """ + Common function for generating api data + + """ + group = self + data = { + 'group_id': group.group_id, + 'group_name': group.group_name, + 'group_description': group.description_safe, + 'parent_group': group.parent_group.group_name if group.parent_group else None, + 'repositories': [x.repo_name for x in group.repositories], + 'owner': group.user.username, + } + return data + + def get_dict(self): + # Since we transformed `group_name` to a hybrid property, we need to + # keep compatibility with the code which uses `group_name` field. + result = super(RepoGroup, self).get_dict() + result['group_name'] = result.pop('_group_name', None) + return result + + +class Permission(Base, BaseModel): + __tablename__ = 'permissions' + __table_args__ = ( + Index('p_perm_name_idx', 'permission_name'), + base_table_args, + ) + + PERMS = [ + ('hg.admin', _('RhodeCode Super Administrator')), + + ('repository.none', _('Repository no access')), + ('repository.read', _('Repository read access')), + ('repository.write', _('Repository write access')), + ('repository.admin', _('Repository admin access')), + + ('group.none', _('Repository group no access')), + ('group.read', _('Repository group read access')), + ('group.write', _('Repository group write access')), + ('group.admin', _('Repository group admin access')), + + ('usergroup.none', _('User group no access')), + ('usergroup.read', _('User group read access')), + ('usergroup.write', _('User group write access')), + ('usergroup.admin', _('User group admin access')), + + ('branch.none', _('Branch no permissions')), + ('branch.merge', _('Branch access by web merge')), + ('branch.push', _('Branch access by push')), + ('branch.push_force', _('Branch access by push with force')), + + ('hg.repogroup.create.false', _('Repository Group creation disabled')), + ('hg.repogroup.create.true', _('Repository Group creation enabled')), + + ('hg.usergroup.create.false', _('User Group creation disabled')), + ('hg.usergroup.create.true', _('User Group creation enabled')), + + ('hg.create.none', _('Repository creation disabled')), + ('hg.create.repository', _('Repository creation enabled')), + ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), + ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), + + ('hg.fork.none', _('Repository forking disabled')), + ('hg.fork.repository', _('Repository forking enabled')), + + ('hg.register.none', _('Registration disabled')), + ('hg.register.manual_activate', _('User Registration with manual account activation')), + ('hg.register.auto_activate', _('User Registration with automatic account activation')), + + ('hg.password_reset.enabled', _('Password reset enabled')), + ('hg.password_reset.hidden', _('Password reset hidden')), + ('hg.password_reset.disabled', _('Password reset disabled')), + + ('hg.extern_activate.manual', _('Manual activation of external account')), + ('hg.extern_activate.auto', _('Automatic activation of external account')), + + ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), + ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), + ] + + # definition of system default permissions for DEFAULT user, created on + # system setup + DEFAULT_USER_PERMISSIONS = [ + # object perms + 'repository.read', + 'group.read', + 'usergroup.read', + # branch, for backward compat we need same value as before so forced pushed + 'branch.push_force', + # global + 'hg.create.repository', + 'hg.repogroup.create.false', + 'hg.usergroup.create.false', + 'hg.create.write_on_repogroup.true', + 'hg.fork.repository', + 'hg.register.manual_activate', + 'hg.password_reset.enabled', + 'hg.extern_activate.auto', + 'hg.inherit_default_perms.true', + ] + + # defines which permissions are more important higher the more important + # Weight defines which permissions are more important. + # The higher number the more important. + PERM_WEIGHTS = { + 'repository.none': 0, + 'repository.read': 1, + 'repository.write': 3, + 'repository.admin': 4, + + 'group.none': 0, + 'group.read': 1, + 'group.write': 3, + 'group.admin': 4, + + 'usergroup.none': 0, + 'usergroup.read': 1, + 'usergroup.write': 3, + 'usergroup.admin': 4, + + 'branch.none': 0, + 'branch.merge': 1, + 'branch.push': 3, + 'branch.push_force': 4, + + 'hg.repogroup.create.false': 0, + 'hg.repogroup.create.true': 1, + + 'hg.usergroup.create.false': 0, + 'hg.usergroup.create.true': 1, + + 'hg.fork.none': 0, + 'hg.fork.repository': 1, + 'hg.create.none': 0, + 'hg.create.repository': 1 + } + + permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) + permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) + + def __unicode__(self): + return u"<%s('%s:%s')>" % ( + self.__class__.__name__, self.permission_id, self.permission_name + ) + + @classmethod + def get_by_key(cls, key): + return cls.query().filter(cls.permission_name == key).scalar() + + @classmethod + def get_default_repo_perms(cls, user_id, repo_id=None): + q = Session().query(UserRepoToPerm, Repository, Permission)\ + .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ + .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ + .filter(UserRepoToPerm.user_id == user_id) + if repo_id: + q = q.filter(UserRepoToPerm.repository_id == repo_id) + return q.all() + + @classmethod + def get_default_repo_branch_perms(cls, user_id, repo_id=None): + q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \ + .join( + Permission, + UserToRepoBranchPermission.permission_id == Permission.permission_id) \ + .join( + UserRepoToPerm, + UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \ + .filter(UserRepoToPerm.user_id == user_id) + + if repo_id: + q = q.filter(UserToRepoBranchPermission.repository_id == repo_id) + return q.order_by(UserToRepoBranchPermission.rule_order).all() + + @classmethod + def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): + q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ + .join( + Permission, + UserGroupRepoToPerm.permission_id == Permission.permission_id)\ + .join( + Repository, + UserGroupRepoToPerm.repository_id == Repository.repo_id)\ + .join( + UserGroup, + UserGroupRepoToPerm.users_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupRepoToPerm.users_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if repo_id: + q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) + return q.all() + + @classmethod + def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None): + q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \ + .join( + Permission, + UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \ + .join( + UserGroupRepoToPerm, + UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \ + .join( + UserGroup, + UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \ + .join( + UserGroupMember, + UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + + if repo_id: + q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id) + return q.order_by(UserGroupToRepoBranchPermission.rule_order).all() + + @classmethod + def get_default_group_perms(cls, user_id, repo_group_id=None): + q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ + .join( + Permission, + UserRepoGroupToPerm.permission_id == Permission.permission_id)\ + .join( + RepoGroup, + UserRepoGroupToPerm.group_id == RepoGroup.group_id)\ + .filter(UserRepoGroupToPerm.user_id == user_id) + if repo_group_id: + q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) + return q.all() + + @classmethod + def get_default_group_perms_from_user_group( + cls, user_id, repo_group_id=None): + q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ + .join( + Permission, + UserGroupRepoGroupToPerm.permission_id == + Permission.permission_id)\ + .join( + RepoGroup, + UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ + .join( + UserGroup, + UserGroupRepoGroupToPerm.users_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupRepoGroupToPerm.users_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if repo_group_id: + q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) + return q.all() + + @classmethod + def get_default_user_group_perms(cls, user_id, user_group_id=None): + q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ + .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ + .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ + .filter(UserUserGroupToPerm.user_id == user_id) + if user_group_id: + q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) + return q.all() + + @classmethod + def get_default_user_group_perms_from_user_group( + cls, user_id, user_group_id=None): + TargetUserGroup = aliased(UserGroup, name='target_user_group') + q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ + .join( + Permission, + UserGroupUserGroupToPerm.permission_id == + Permission.permission_id)\ + .join( + TargetUserGroup, + UserGroupUserGroupToPerm.target_user_group_id == + TargetUserGroup.users_group_id)\ + .join( + UserGroup, + UserGroupUserGroupToPerm.user_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupUserGroupToPerm.user_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if user_group_id: + q = q.filter( + UserGroupUserGroupToPerm.user_group_id == user_group_id) + + return q.all() + + +class UserRepoToPerm(Base, BaseModel): + __tablename__ = 'repo_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'repository_id', 'permission_id'), + base_table_args + ) + + repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + repository = relationship('Repository') + permission = relationship('Permission') + + branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined') + + @classmethod + def create(cls, user, repository, permission): + n = cls() + n.user = user + n.repository = repository + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.repository) + + +class UserUserGroupToPerm(Base, BaseModel): + __tablename__ = 'user_user_group_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'user_group_id', 'permission_id'), + base_table_args + ) + + user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + user_group = relationship('UserGroup') + permission = relationship('Permission') + + @classmethod + def create(cls, user, user_group, permission): + n = cls() + n.user = user + n.user_group = user_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.user_group) + + +class UserToPerm(Base, BaseModel): + __tablename__ = 'user_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'permission_id'), + base_table_args + ) + + user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + permission = relationship('Permission', lazy='joined') + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.permission) + + +class UserGroupRepoToPerm(Base, BaseModel): + __tablename__ = 'users_group_repo_to_perm' + __table_args__ = ( + UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), + base_table_args + ) + + users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + repository = relationship('Repository') + user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all') + + @classmethod + def create(cls, users_group, repository, permission): + n = cls() + n.users_group = users_group + n.repository = repository + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.users_group, self.repository) + + +class UserGroupUserGroupToPerm(Base, BaseModel): + __tablename__ = 'user_group_user_group_to_perm' + __table_args__ = ( + UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), + CheckConstraint('target_user_group_id != user_group_id'), + base_table_args + ) + + user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + + target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') + user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') + permission = relationship('Permission') + + @classmethod + def create(cls, target_user_group, user_group, permission): + n = cls() + n.target_user_group = target_user_group + n.user_group = user_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.target_user_group, self.user_group) + + +class UserGroupToPerm(Base, BaseModel): + __tablename__ = 'users_group_to_perm' + __table_args__ = ( + UniqueConstraint('users_group_id', 'permission_id',), + base_table_args + ) + + users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + + +class UserRepoGroupToPerm(Base, BaseModel): + __tablename__ = 'user_repo_group_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'group_id', 'permission_id'), + base_table_args + ) + + group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + group = relationship('RepoGroup') + permission = relationship('Permission') + + @classmethod + def create(cls, user, repository_group, permission): + n = cls() + n.user = user + n.group = repository_group + n.permission = permission + Session().add(n) + return n + + +class UserGroupRepoGroupToPerm(Base, BaseModel): + __tablename__ = 'users_group_repo_group_to_perm' + __table_args__ = ( + UniqueConstraint('users_group_id', 'group_id'), + base_table_args + ) + + users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + group = relationship('RepoGroup') + + @classmethod + def create(cls, user_group, repository_group, permission): + n = cls() + n.users_group = user_group + n.group = repository_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.users_group, self.group) + + +class Statistics(Base, BaseModel): + __tablename__ = 'statistics' + __table_args__ = ( + base_table_args + ) + + stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) + stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) + commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data + commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data + languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data + + repository = relationship('Repository', single_parent=True) + + +class UserFollowing(Base, BaseModel): + __tablename__ = 'user_followings' + __table_args__ = ( + UniqueConstraint('user_id', 'follows_repository_id'), + UniqueConstraint('user_id', 'follows_user_id'), + base_table_args + ) + + user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) + follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) + + user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') + + follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') + follows_repository = relationship('Repository', order_by='Repository.repo_name') + + @classmethod + def get_repo_followers(cls, repo_id): + return cls.query().filter(cls.follows_repo_id == repo_id) + + +class CacheKey(Base, BaseModel): + __tablename__ = 'cache_invalidation' + __table_args__ = ( + UniqueConstraint('cache_key'), + Index('key_idx', 'cache_key'), + base_table_args, + ) + + CACHE_TYPE_FEED = 'FEED' + + # namespaces used to register process/thread aware caches + REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' + SETTINGS_INVALIDATION_NAMESPACE = 'system_settings' + + cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) + cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) + cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) + cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) + + def __init__(self, cache_key, cache_args='', cache_state_uid=None): + self.cache_key = cache_key + self.cache_args = cache_args + self.cache_active = False + # first key should be same for all entries, since all workers should share it + self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() + + def __unicode__(self): + return u"<%s('%s:%s[%s]')>" % ( + self.__class__.__name__, + self.cache_id, self.cache_key, self.cache_active) + + def _cache_key_partition(self): + prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) + return prefix, repo_name, suffix + + def get_prefix(self): + """ + Try to extract prefix from existing cache key. The key could consist + of prefix, repo_name, suffix + """ + # this returns prefix, repo_name, suffix + return self._cache_key_partition()[0] + + def get_suffix(self): + """ + get suffix that might have been used in _get_cache_key to + generate self.cache_key. Only used for informational purposes + in repo_edit.mako. + """ + # prefix, repo_name, suffix + return self._cache_key_partition()[2] + + @classmethod + def generate_new_state_uid(cls, based_on=None): + if based_on: + return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) + else: + return str(uuid.uuid4()) + + @classmethod + def delete_all_cache(cls): + """ + Delete all cache keys from database. + Should only be run when all instances are down and all entries + thus stale. + """ + cls.query().delete() + Session().commit() + + @classmethod + def set_invalidate(cls, cache_uid, delete=False): + """ + Mark all caches of a repo as invalid in the database. + """ + + try: + qry = Session().query(cls).filter(cls.cache_args == cache_uid) + if delete: + qry.delete() + log.debug('cache objects deleted for cache args %s', + safe_str(cache_uid)) + else: + qry.update({"cache_active": False, + "cache_state_uid": cls.generate_new_state_uid()}) + log.debug('cache objects marked as invalid for cache args %s', + safe_str(cache_uid)) + + Session().commit() + except Exception: + log.exception( + 'Cache key invalidation failed for cache args %s', + safe_str(cache_uid)) + Session().rollback() + + @classmethod + def get_active_cache(cls, cache_key): + inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() + if inv_obj: + return inv_obj + return None + + @classmethod + def get_namespace_map(cls, namespace): + return { + x.cache_key: x + for x in cls.query().filter(cls.cache_args == namespace)} + + +class ChangesetComment(Base, BaseModel): + __tablename__ = 'changeset_comments' + __table_args__ = ( + Index('cc_revision_idx', 'revision'), + base_table_args, + ) + + COMMENT_OUTDATED = u'comment_outdated' + COMMENT_TYPE_NOTE = u'note' + COMMENT_TYPE_TODO = u'todo' + COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] + + OP_IMMUTABLE = u'immutable' + OP_CHANGEABLE = u'changeable' + + comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) + repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) + revision = Column('revision', String(40), nullable=True) + pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) + pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) + line_no = Column('line_no', Unicode(10), nullable=True) + hl_lines = Column('hl_lines', Unicode(512), nullable=True) + f_path = Column('f_path', Unicode(1000), nullable=True) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + renderer = Column('renderer', Unicode(64), nullable=True) + display_state = Column('display_state', Unicode(128), nullable=True) + immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE) + + comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) + resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) + + resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by') + resolved_by = relationship('ChangesetComment', back_populates='resolved_comment') + + author = relationship('User', lazy='joined') + repo = relationship('Repository') + status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined') + pull_request = relationship('PullRequest', lazy='joined') + pull_request_version = relationship('PullRequestVersion') + history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='joined', order_by='ChangesetCommentHistory.version') + + @classmethod + def get_users(cls, revision=None, pull_request_id=None): + """ + Returns user associated with this ChangesetComment. ie those + who actually commented + + :param cls: + :param revision: + """ + q = Session().query(User)\ + .join(ChangesetComment.author) + if revision: + q = q.filter(cls.revision == revision) + elif pull_request_id: + q = q.filter(cls.pull_request_id == pull_request_id) + return q.all() + + @classmethod + def get_index_from_version(cls, pr_version, versions): + num_versions = [x.pull_request_version_id for x in versions] + try: + return num_versions.index(pr_version) +1 + except (IndexError, ValueError): + return + + @property + def outdated(self): + return self.display_state == self.COMMENT_OUTDATED + + @property + def immutable(self): + return self.immutable_state == self.OP_IMMUTABLE + + def outdated_at_version(self, version): + """ + Checks if comment is outdated for given pull request version + """ + return self.outdated and self.pull_request_version_id != version + + def older_than_version(self, version): + """ + Checks if comment is made from previous version than given + """ + if version is None: + return self.pull_request_version_id is not None + + return self.pull_request_version_id < version + + @property + def resolved(self): + return self.resolved_by[0] if self.resolved_by else None + + @property + def is_todo(self): + return self.comment_type == self.COMMENT_TYPE_TODO + + @property + def is_inline(self): + return self.line_no and self.f_path + + def get_index_version(self, versions): + return self.get_index_from_version( + self.pull_request_version_id, versions) + + def __repr__(self): + if self.comment_id: + return '' % self.comment_id + else: + return '' % id(self) + + def get_api_data(self): + comment = self + data = { + 'comment_id': comment.comment_id, + 'comment_type': comment.comment_type, + 'comment_text': comment.text, + 'comment_status': comment.status_change, + 'comment_f_path': comment.f_path, + 'comment_lineno': comment.line_no, + 'comment_author': comment.author, + 'comment_created_on': comment.created_on, + 'comment_resolved_by': self.resolved, + 'comment_commit_id': comment.revision, + 'comment_pull_request_id': comment.pull_request_id, + } + return data + + def __json__(self): + data = dict() + data.update(self.get_api_data()) + return data + + +class ChangesetCommentHistory(Base, BaseModel): + __tablename__ = 'changeset_comments_history' + __table_args__ = ( + Index('cch_comment_id_idx', 'comment_id'), + base_table_args, + ) + + comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True) + comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False) + version = Column("version", Integer(), nullable=False, default=0) + created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + deleted = Column('deleted', Boolean(), default=False) + + author = relationship('User', lazy='joined') + comment = relationship('ChangesetComment', cascade="all, delete") + + @classmethod + def get_version(cls, comment_id): + q = Session().query(ChangesetCommentHistory).filter( + ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc()) + if q.count() == 0: + return 1 + elif q.count() >= q[0].version: + return q.count() + 1 + else: + return q[0].version + 1 + + +class ChangesetStatus(Base, BaseModel): + __tablename__ = 'changeset_statuses' + __table_args__ = ( + Index('cs_revision_idx', 'revision'), + Index('cs_version_idx', 'version'), + UniqueConstraint('repo_id', 'revision', 'version'), + base_table_args + ) + + STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' + STATUS_APPROVED = 'approved' + STATUS_REJECTED = 'rejected' + STATUS_UNDER_REVIEW = 'under_review' + + STATUSES = [ + (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default + (STATUS_APPROVED, _("Approved")), + (STATUS_REJECTED, _("Rejected")), + (STATUS_UNDER_REVIEW, _("Under Review")), + ] + + changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) + repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) + revision = Column('revision', String(40), nullable=False) + status = Column('status', String(128), nullable=False, default=DEFAULT) + changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) + modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) + version = Column('version', Integer(), nullable=False, default=0) + pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) + + author = relationship('User', lazy='joined') + repo = relationship('Repository') + comment = relationship('ChangesetComment', lazy='joined') + pull_request = relationship('PullRequest', lazy='joined') + + def __unicode__(self): + return u"<%s('%s[v%s]:%s')>" % ( + self.__class__.__name__, + self.status, self.version, self.author + ) + + @classmethod + def get_status_lbl(cls, value): + return dict(cls.STATUSES).get(value) + + @property + def status_lbl(self): + return ChangesetStatus.get_status_lbl(self.status) + + def get_api_data(self): + status = self + data = { + 'status_id': status.changeset_status_id, + 'status': status.status, + } + return data + + def __json__(self): + data = dict() + data.update(self.get_api_data()) + return data + + +class _SetState(object): + """ + Context processor allowing changing state for sensitive operation such as + pull request update or merge + """ + + def __init__(self, pull_request, pr_state, back_state=None): + self._pr = pull_request + self._org_state = back_state or pull_request.pull_request_state + self._pr_state = pr_state + self._current_state = None + + def __enter__(self): + log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`', + self._pr, self._pr_state) + self.set_pr_state(self._pr_state) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_val is not None: + log.error(traceback.format_exc(exc_tb)) + return None + + self.set_pr_state(self._org_state) + log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`', + self._pr, self._org_state) + + @property + def state(self): + return self._current_state + + def set_pr_state(self, pr_state): + try: + self._pr.pull_request_state = pr_state + Session().add(self._pr) + Session().commit() + self._current_state = pr_state + except Exception: + log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) + raise + + +class _PullRequestBase(BaseModel): + """ + Common attributes of pull request and version entries. + """ + + # .status values + STATUS_NEW = u'new' + STATUS_OPEN = u'open' + STATUS_CLOSED = u'closed' + + # available states + STATE_CREATING = u'creating' + STATE_UPDATING = u'updating' + STATE_MERGING = u'merging' + STATE_CREATED = u'created' + + title = Column('title', Unicode(255), nullable=True) + description = Column( + 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), + nullable=True) + description_renderer = Column('description_renderer', Unicode(64), nullable=True) + + # new/open/closed status of pull request (not approve/reject/etc) + status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) + created_on = Column( + 'created_on', DateTime(timezone=False), nullable=False, + default=datetime.datetime.now) + updated_on = Column( + 'updated_on', DateTime(timezone=False), nullable=False, + default=datetime.datetime.now) + + pull_request_state = Column("pull_request_state", String(255), nullable=True) + + @declared_attr + def user_id(cls): + return Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, + unique=None) + + # 500 revisions max + _revisions = Column( + 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) + + common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True) + + @declared_attr + def source_repo_id(cls): + # TODO: dan: rename column to source_repo_id + return Column( + 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + + _source_ref = Column('org_ref', Unicode(255), nullable=False) + + @hybrid_property + def source_ref(self): + return self._source_ref + + @source_ref.setter + def source_ref(self, val): + parts = (val or '').split(':') + if len(parts) != 3: + raise ValueError( + 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) + self._source_ref = safe_unicode(val) + + _target_ref = Column('other_ref', Unicode(255), nullable=False) + + @hybrid_property + def target_ref(self): + return self._target_ref + + @target_ref.setter + def target_ref(self, val): + parts = (val or '').split(':') + if len(parts) != 3: + raise ValueError( + 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) + self._target_ref = safe_unicode(val) + + @declared_attr + def target_repo_id(cls): + # TODO: dan: rename column to target_repo_id + return Column( + 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + + _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) + + # TODO: dan: rename column to last_merge_source_rev + _last_merge_source_rev = Column( + 'last_merge_org_rev', String(40), nullable=True) + # TODO: dan: rename column to last_merge_target_rev + _last_merge_target_rev = Column( + 'last_merge_other_rev', String(40), nullable=True) + _last_merge_status = Column('merge_status', Integer(), nullable=True) + last_merge_metadata = Column( + 'last_merge_metadata', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + + merge_rev = Column('merge_rev', String(40), nullable=True) + + reviewer_data = Column( + 'reviewer_data_json', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + + @property + def reviewer_data_json(self): + return json.dumps(self.reviewer_data) + + @property + def work_in_progress(self): + """checks if pull request is work in progress by checking the title""" + title = self.title.upper() + if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title): + return True + return False + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @hybrid_property + def revisions(self): + return self._revisions.split(':') if self._revisions else [] + + @revisions.setter + def revisions(self, val): + self._revisions = u':'.join(val) + + @hybrid_property + def last_merge_status(self): + return safe_int(self._last_merge_status) + + @last_merge_status.setter + def last_merge_status(self, val): + self._last_merge_status = val + + @declared_attr + def author(cls): + return relationship('User', lazy='joined') + + @declared_attr + def source_repo(cls): + return relationship( + 'Repository', + primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) + + @property + def source_ref_parts(self): + return self.unicode_to_reference(self.source_ref) + + @declared_attr + def target_repo(cls): + return relationship( + 'Repository', + primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) + + @property + def target_ref_parts(self): + return self.unicode_to_reference(self.target_ref) + + @property + def shadow_merge_ref(self): + return self.unicode_to_reference(self._shadow_merge_ref) + + @shadow_merge_ref.setter + def shadow_merge_ref(self, ref): + self._shadow_merge_ref = self.reference_to_unicode(ref) + + @staticmethod + def unicode_to_reference(raw): + """ + Convert a unicode (or string) to a reference object. + If unicode evaluates to False it returns None. + """ + if raw: + refs = raw.split(':') + return Reference(*refs) + else: + return None + + @staticmethod + def reference_to_unicode(ref): + """ + Convert a reference object to unicode. + If reference is None it returns None. + """ + if ref: + return u':'.join(ref) + else: + return None + + def get_api_data(self, with_merge_state=True): + from rhodecode.model.pull_request import PullRequestModel + + pull_request = self + if with_merge_state: + merge_response, merge_status, msg = \ + PullRequestModel().merge_status(pull_request) + merge_state = { + 'status': merge_status, + 'message': safe_unicode(msg), + } + else: + merge_state = {'status': 'not_available', + 'message': 'not_available'} + + merge_data = { + 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), + 'reference': ( + pull_request.shadow_merge_ref._asdict() + if pull_request.shadow_merge_ref else None), + } + + data = { + 'pull_request_id': pull_request.pull_request_id, + 'url': PullRequestModel().get_url(pull_request), + 'title': pull_request.title, + 'description': pull_request.description, + 'status': pull_request.status, + 'state': pull_request.pull_request_state, + 'created_on': pull_request.created_on, + 'updated_on': pull_request.updated_on, + 'commit_ids': pull_request.revisions, + 'review_status': pull_request.calculated_review_status(), + 'mergeable': merge_state, + 'source': { + 'clone_url': pull_request.source_repo.clone_url(), + 'repository': pull_request.source_repo.repo_name, + 'reference': { + 'name': pull_request.source_ref_parts.name, + 'type': pull_request.source_ref_parts.type, + 'commit_id': pull_request.source_ref_parts.commit_id, + }, + }, + 'target': { + 'clone_url': pull_request.target_repo.clone_url(), + 'repository': pull_request.target_repo.repo_name, + 'reference': { + 'name': pull_request.target_ref_parts.name, + 'type': pull_request.target_ref_parts.type, + 'commit_id': pull_request.target_ref_parts.commit_id, + }, + }, + 'merge': merge_data, + 'author': pull_request.author.get_api_data(include_secrets=False, + details='basic'), + 'reviewers': [ + { + 'user': reviewer.get_api_data(include_secrets=False, + details='basic'), + 'reasons': reasons, + 'review_status': st[0][1].status if st else 'not_reviewed', + } + for obj, reviewer, reasons, mandatory, st in + pull_request.reviewers_statuses() + ] + } + + return data + + def set_state(self, pull_request_state, final_state=None): + """ + # goes from initial state to updating to initial state. + # initial state can be changed by specifying back_state= + with pull_request_obj.set_state(PullRequest.STATE_UPDATING): + pull_request.merge() + + :param pull_request_state: + :param final_state: + + """ + + return _SetState(self, pull_request_state, back_state=final_state) + + +class PullRequest(Base, _PullRequestBase): + __tablename__ = 'pull_requests' + __table_args__ = ( + base_table_args, + ) + + pull_request_id = Column( + 'pull_request_id', Integer(), nullable=False, primary_key=True) + + def __repr__(self): + if self.pull_request_id: + return '' % self.pull_request_id + else: + return '' % id(self) + + reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan") + statuses = relationship('ChangesetStatus', cascade="all, delete-orphan") + comments = relationship('ChangesetComment', cascade="all, delete-orphan") + versions = relationship('PullRequestVersion', cascade="all, delete-orphan", + lazy='dynamic') + + @classmethod + def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, + internal_methods=None): + + class PullRequestDisplay(object): + """ + Special object wrapper for showing PullRequest data via Versions + It mimics PR object as close as possible. This is read only object + just for display + """ + + def __init__(self, attrs, internal=None): + self.attrs = attrs + # internal have priority over the given ones via attrs + self.internal = internal or ['versions'] + + def __getattr__(self, item): + if item in self.internal: + return getattr(self, item) + try: + return self.attrs[item] + except KeyError: + raise AttributeError( + '%s object has no attribute %s' % (self, item)) + + def __repr__(self): + return '' % self.attrs.get('pull_request_id') + + def versions(self): + return pull_request_obj.versions.order_by( + PullRequestVersion.pull_request_version_id).all() + + def is_closed(self): + return pull_request_obj.is_closed() + + def is_state_changing(self): + return pull_request_obj.is_state_changing() + + @property + def pull_request_version_id(self): + return getattr(pull_request_obj, 'pull_request_version_id', None) + + attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) + + attrs.author = StrictAttributeDict( + pull_request_obj.author.get_api_data()) + if pull_request_obj.target_repo: + attrs.target_repo = StrictAttributeDict( + pull_request_obj.target_repo.get_api_data()) + attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url + + if pull_request_obj.source_repo: + attrs.source_repo = StrictAttributeDict( + pull_request_obj.source_repo.get_api_data()) + attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url + + attrs.source_ref_parts = pull_request_obj.source_ref_parts + attrs.target_ref_parts = pull_request_obj.target_ref_parts + attrs.revisions = pull_request_obj.revisions + attrs.common_ancestor_id = pull_request_obj.common_ancestor_id + attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref + attrs.reviewer_data = org_pull_request_obj.reviewer_data + attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json + + return PullRequestDisplay(attrs, internal=internal_methods) + + def is_closed(self): + return self.status == self.STATUS_CLOSED + + def is_state_changing(self): + return self.pull_request_state != PullRequest.STATE_CREATED + + def __json__(self): + return { + 'revisions': self.revisions, + 'versions': self.versions_count + } + + def calculated_review_status(self): + from rhodecode.model.changeset_status import ChangesetStatusModel + return ChangesetStatusModel().calculated_review_status(self) + + def reviewers_statuses(self): + from rhodecode.model.changeset_status import ChangesetStatusModel + return ChangesetStatusModel().reviewers_statuses(self) + + @property + def workspace_id(self): + from rhodecode.model.pull_request import PullRequestModel + return PullRequestModel()._workspace_id(self) + + def get_shadow_repo(self): + workspace_id = self.workspace_id + shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) + if os.path.isdir(shadow_repository_path): + vcs_obj = self.target_repo.scm_instance() + return vcs_obj.get_shadow_instance(shadow_repository_path) + + @property + def versions_count(self): + """ + return number of versions this PR have, e.g a PR that once been + updated will have 2 versions + """ + return self.versions.count() + 1 + + +class PullRequestVersion(Base, _PullRequestBase): + __tablename__ = 'pull_request_versions' + __table_args__ = ( + base_table_args, + ) + + pull_request_version_id = Column( + 'pull_request_version_id', Integer(), nullable=False, primary_key=True) + pull_request_id = Column( + 'pull_request_id', Integer(), + ForeignKey('pull_requests.pull_request_id'), nullable=False) + pull_request = relationship('PullRequest') + + def __repr__(self): + if self.pull_request_version_id: + return '' % self.pull_request_version_id + else: + return '' % id(self) + + @property + def reviewers(self): + return self.pull_request.reviewers + + @property + def versions(self): + return self.pull_request.versions + + def is_closed(self): + # calculate from original + return self.pull_request.status == self.STATUS_CLOSED + + def is_state_changing(self): + return self.pull_request.pull_request_state != PullRequest.STATE_CREATED + + def calculated_review_status(self): + return self.pull_request.calculated_review_status() + + def reviewers_statuses(self): + return self.pull_request.reviewers_statuses() + + +class PullRequestReviewers(Base, BaseModel): + __tablename__ = 'pull_request_reviewers' + __table_args__ = ( + base_table_args, + ) + + @hybrid_property + def reasons(self): + if not self._reasons: + return [] + return self._reasons + + @reasons.setter + def reasons(self, val): + val = val or [] + if any(not isinstance(x, compat.string_types) for x in val): + raise Exception('invalid reasons type, must be list of strings') + self._reasons = val + + pull_requests_reviewers_id = Column( + 'pull_requests_reviewers_id', Integer(), nullable=False, + primary_key=True) + pull_request_id = Column( + "pull_request_id", Integer(), + ForeignKey('pull_requests.pull_request_id'), nullable=False) + user_id = Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) + _reasons = Column( + 'reason', MutationList.as_mutable( + JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) + + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + user = relationship('User') + pull_request = relationship('PullRequest') + + rule_data = Column( + 'rule_data_json', + JsonType(dialect_map=dict(mysql=UnicodeText(16384)))) + + def rule_user_group_data(self): + """ + Returns the voting user group rule data for this reviewer + """ + + if self.rule_data and 'vote_rule' in self.rule_data: + user_group_data = {} + if 'rule_user_group_entry_id' in self.rule_data: + # means a group with voting rules ! + user_group_data['id'] = self.rule_data['rule_user_group_entry_id'] + user_group_data['name'] = self.rule_data['rule_name'] + user_group_data['vote_rule'] = self.rule_data['vote_rule'] + + return user_group_data + + def __unicode__(self): + return u"<%s('id:%s')>" % (self.__class__.__name__, + self.pull_requests_reviewers_id) + + +class Notification(Base, BaseModel): + __tablename__ = 'notifications' + __table_args__ = ( + Index('notification_type_idx', 'type'), + base_table_args, + ) + + TYPE_CHANGESET_COMMENT = u'cs_comment' + TYPE_MESSAGE = u'message' + TYPE_MENTION = u'mention' + TYPE_REGISTRATION = u'registration' + TYPE_PULL_REQUEST = u'pull_request' + TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' + TYPE_PULL_REQUEST_UPDATE = u'pull_request_update' + + notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) + subject = Column('subject', Unicode(512), nullable=True) + body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) + created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + type_ = Column('type', Unicode(255)) + + created_by_user = relationship('User') + notifications_to_users = relationship('UserNotification', lazy='joined', + cascade="all, delete-orphan") + + @property + def recipients(self): + return [x.user for x in UserNotification.query()\ + .filter(UserNotification.notification == self)\ + .order_by(UserNotification.user_id.asc()).all()] + + @classmethod + def create(cls, created_by, subject, body, recipients, type_=None): + if type_ is None: + type_ = Notification.TYPE_MESSAGE + + notification = cls() + notification.created_by_user = created_by + notification.subject = subject + notification.body = body + notification.type_ = type_ + notification.created_on = datetime.datetime.now() + + # For each recipient link the created notification to his account + for u in recipients: + assoc = UserNotification() + assoc.user_id = u.user_id + assoc.notification = notification + + # if created_by is inside recipients mark his notification + # as read + if u.user_id == created_by.user_id: + assoc.read = True + Session().add(assoc) + + Session().add(notification) + + return notification + + +class UserNotification(Base, BaseModel): + __tablename__ = 'user_to_notification' + __table_args__ = ( + UniqueConstraint('user_id', 'notification_id'), + base_table_args + ) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) + notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) + read = Column('read', Boolean, default=False) + sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) + + user = relationship('User', lazy="joined") + notification = relationship('Notification', lazy="joined", + order_by=lambda: Notification.created_on.desc(),) + + def mark_as_read(self): + self.read = True + Session().add(self) + + +class UserNotice(Base, BaseModel): + __tablename__ = 'user_notices' + __table_args__ = ( + base_table_args + ) + + NOTIFICATION_TYPE_MESSAGE = 'message' + NOTIFICATION_TYPE_NOTICE = 'notice' + + NOTIFICATION_LEVEL_INFO = 'info' + NOTIFICATION_LEVEL_WARNING = 'warning' + NOTIFICATION_LEVEL_ERROR = 'error' + + user_notice_id = Column('gist_id', Integer(), primary_key=True) + + notice_subject = Column('notice_subject', Unicode(512), nullable=True) + notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) + + notice_read = Column('notice_read', Boolean, default=False) + + notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO) + notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE) + + notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True) + notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id')) + user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id') + + @classmethod + def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False): + + if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR, + cls.NOTIFICATION_LEVEL_WARNING, + cls.NOTIFICATION_LEVEL_INFO]: + return + + from rhodecode.model.user import UserModel + user = UserModel().get_user(user) + + new_notice = UserNotice() + if not allow_duplicate: + existing_msg = UserNotice().query() \ + .filter(UserNotice.user == user) \ + .filter(UserNotice.notice_body == body) \ + .filter(UserNotice.notice_read == false()) \ + .scalar() + if existing_msg: + log.warning('Ignoring duplicate notice for user %s', user) + return + + new_notice.user = user + new_notice.notice_subject = subject + new_notice.notice_body = body + new_notice.notification_level = notice_level + Session().add(new_notice) + Session().commit() + + +class Gist(Base, BaseModel): + __tablename__ = 'gists' + __table_args__ = ( + Index('g_gist_access_id_idx', 'gist_access_id'), + Index('g_created_on_idx', 'created_on'), + base_table_args + ) + + GIST_PUBLIC = u'public' + GIST_PRIVATE = u'private' + DEFAULT_FILENAME = u'gistfile1.txt' + + ACL_LEVEL_PUBLIC = u'acl_public' + ACL_LEVEL_PRIVATE = u'acl_private' + + gist_id = Column('gist_id', Integer(), primary_key=True) + gist_access_id = Column('gist_access_id', Unicode(250)) + gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) + gist_expires = Column('gist_expires', Float(53), nullable=False) + gist_type = Column('gist_type', Unicode(128), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + acl_level = Column('acl_level', Unicode(128), nullable=True) + + owner = relationship('User') + + def __repr__(self): + return '' % (self.gist_type, self.gist_access_id) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.gist_description) + + @classmethod + def get_or_404(cls, id_): + from pyramid.httpexceptions import HTTPNotFound + + res = cls.query().filter(cls.gist_access_id == id_).scalar() + if not res: + raise HTTPNotFound() + return res + + @classmethod + def get_by_access_id(cls, gist_access_id): + return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() + + def gist_url(self): + from rhodecode.model.gist import GistModel + return GistModel().get_url(self) + + @classmethod + def base_path(cls): + """ + Returns base path when all gists are stored + + :param cls: + """ + from rhodecode.model.gist import GIST_STORE_LOC + q = Session().query(RhodeCodeUi)\ + .filter(RhodeCodeUi.ui_key == URL_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return os.path.join(q.one().ui_value, GIST_STORE_LOC) + + def get_api_data(self): + """ + Common function for generating gist related data for API + """ + gist = self + data = { + 'gist_id': gist.gist_id, + 'type': gist.gist_type, + 'access_id': gist.gist_access_id, + 'description': gist.gist_description, + 'url': gist.gist_url(), + 'expires': gist.gist_expires, + 'created_on': gist.created_on, + 'modified_at': gist.modified_at, + 'content': None, + 'acl_level': gist.acl_level, + } + return data + + def __json__(self): + data = dict( + ) + data.update(self.get_api_data()) + return data + # SCM functions + + def scm_instance(self, **kwargs): + """ + Get an instance of VCS Repository + + :param kwargs: + """ + from rhodecode.model.gist import GistModel + full_repo_path = os.path.join(self.base_path(), self.gist_access_id) + return get_vcs_instance( + repo_path=safe_str(full_repo_path), create=False, + _vcs_alias=GistModel.vcs_backend) + + +class ExternalIdentity(Base, BaseModel): + __tablename__ = 'external_identities' + __table_args__ = ( + Index('local_user_id_idx', 'local_user_id'), + Index('external_id_idx', 'external_id'), + base_table_args + ) + + external_id = Column('external_id', Unicode(255), default=u'', primary_key=True) + external_username = Column('external_username', Unicode(1024), default=u'') + local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) + provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True) + access_token = Column('access_token', String(1024), default=u'') + alt_token = Column('alt_token', String(1024), default=u'') + token_secret = Column('token_secret', String(1024), default=u'') + + @classmethod + def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None): + """ + Returns ExternalIdentity instance based on search params + + :param external_id: + :param provider_name: + :return: ExternalIdentity + """ + query = cls.query() + query = query.filter(cls.external_id == external_id) + query = query.filter(cls.provider_name == provider_name) + if local_user_id: + query = query.filter(cls.local_user_id == local_user_id) + return query.first() + + @classmethod + def user_by_external_id_and_provider(cls, external_id, provider_name): + """ + Returns User instance based on search params + + :param external_id: + :param provider_name: + :return: User + """ + query = User.query() + query = query.filter(cls.external_id == external_id) + query = query.filter(cls.provider_name == provider_name) + query = query.filter(User.user_id == cls.local_user_id) + return query.first() + + @classmethod + def by_local_user_id(cls, local_user_id): + """ + Returns all tokens for user + + :param local_user_id: + :return: ExternalIdentity + """ + query = cls.query() + query = query.filter(cls.local_user_id == local_user_id) + return query + + @classmethod + def load_provider_plugin(cls, plugin_id): + from rhodecode.authentication.base import loadplugin + _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id) + auth_plugin = loadplugin(_plugin_id) + return auth_plugin + + +class Integration(Base, BaseModel): + __tablename__ = 'integrations' + __table_args__ = ( + base_table_args + ) + + integration_id = Column('integration_id', Integer(), primary_key=True) + integration_type = Column('integration_type', String(255)) + enabled = Column('enabled', Boolean(), nullable=False) + name = Column('name', String(255), nullable=False) + child_repos_only = Column('child_repos_only', Boolean(), nullable=False, + default=False) + + settings = Column( + 'settings_json', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + repo_id = Column( + 'repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + repo_group_id = Column( + 'repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + @property + def scope(self): + if self.repo: + return repr(self.repo) + if self.repo_group: + if self.child_repos_only: + return repr(self.repo_group) + ' (child repos only)' + else: + return repr(self.repo_group) + ' (recursive)' + if self.child_repos_only: + return 'root_repos' + return 'global' + + def __repr__(self): + return '' % (self.integration_type, self.scope) + + +class RepoReviewRuleUser(Base, BaseModel): + __tablename__ = 'repo_review_rules_users' + __table_args__ = ( + base_table_args + ) + + repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) + repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + user = relationship('User') + + def rule_data(self): + return { + 'mandatory': self.mandatory + } + + +class RepoReviewRuleUserGroup(Base, BaseModel): + __tablename__ = 'repo_review_rules_users_groups' + __table_args__ = ( + base_table_args + ) + + VOTE_RULE_ALL = -1 + + repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) + repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) + users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False) + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL) + users_group = relationship('UserGroup') + + def rule_data(self): + return { + 'mandatory': self.mandatory, + 'vote_rule': self.vote_rule + } + + @property + def vote_rule_label(self): + if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL: + return 'all must vote' + else: + return 'min. vote {}'.format(self.vote_rule) + + +class RepoReviewRule(Base, BaseModel): + __tablename__ = 'repo_review_rules' + __table_args__ = ( + base_table_args + ) + + repo_review_rule_id = Column( + 'repo_review_rule_id', Integer(), primary_key=True) + repo_id = Column( + "repo_id", Integer(), ForeignKey('repositories.repo_id')) + repo = relationship('Repository', backref='review_rules') + + review_rule_name = Column('review_rule_name', String(255)) + _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + + use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) + forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) + forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) + forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) + + rule_users = relationship('RepoReviewRuleUser') + rule_user_groups = relationship('RepoReviewRuleUserGroup') + + def _validate_pattern(self, value): + re.compile('^' + glob2re(value) + '$') + + @hybrid_property + def source_branch_pattern(self): + return self._branch_pattern or '*' + + @source_branch_pattern.setter + def source_branch_pattern(self, value): + self._validate_pattern(value) + self._branch_pattern = value or '*' + + @hybrid_property + def target_branch_pattern(self): + return self._target_branch_pattern or '*' + + @target_branch_pattern.setter + def target_branch_pattern(self, value): + self._validate_pattern(value) + self._target_branch_pattern = value or '*' + + @hybrid_property + def file_pattern(self): + return self._file_pattern or '*' + + @file_pattern.setter + def file_pattern(self, value): + self._validate_pattern(value) + self._file_pattern = value or '*' + + def matches(self, source_branch, target_branch, files_changed): + """ + Check if this review rule matches a branch/files in a pull request + + :param source_branch: source branch name for the commit + :param target_branch: target branch name for the commit + :param files_changed: list of file paths changed in the pull request + """ + + source_branch = source_branch or '' + target_branch = target_branch or '' + files_changed = files_changed or [] + + branch_matches = True + if source_branch or target_branch: + if self.source_branch_pattern == '*': + source_branch_match = True + else: + if self.source_branch_pattern.startswith('re:'): + source_pattern = self.source_branch_pattern[3:] + else: + source_pattern = '^' + glob2re(self.source_branch_pattern) + '$' + source_branch_regex = re.compile(source_pattern) + source_branch_match = bool(source_branch_regex.search(source_branch)) + if self.target_branch_pattern == '*': + target_branch_match = True + else: + if self.target_branch_pattern.startswith('re:'): + target_pattern = self.target_branch_pattern[3:] + else: + target_pattern = '^' + glob2re(self.target_branch_pattern) + '$' + target_branch_regex = re.compile(target_pattern) + target_branch_match = bool(target_branch_regex.search(target_branch)) + + branch_matches = source_branch_match and target_branch_match + + files_matches = True + if self.file_pattern != '*': + files_matches = False + if self.file_pattern.startswith('re:'): + file_pattern = self.file_pattern[3:] + else: + file_pattern = glob2re(self.file_pattern) + file_regex = re.compile(file_pattern) + for filename in files_changed: + if file_regex.search(filename): + files_matches = True + break + + return branch_matches and files_matches + + @property + def review_users(self): + """ Returns the users which this rule applies to """ + + users = collections.OrderedDict() + + for rule_user in self.rule_users: + if rule_user.user.active: + if rule_user.user not in users: + users[rule_user.user.username] = { + 'user': rule_user.user, + 'source': 'user', + 'source_data': {}, + 'data': rule_user.rule_data() + } + + for rule_user_group in self.rule_user_groups: + source_data = { + 'user_group_id': rule_user_group.users_group.users_group_id, + 'name': rule_user_group.users_group.users_group_name, + 'members': len(rule_user_group.users_group.members) + } + for member in rule_user_group.users_group.members: + if member.user.active: + key = member.user.username + if key in users: + # skip this member as we have him already + # this prevents from override the "first" matched + # users with duplicates in multiple groups + continue + + users[key] = { + 'user': member.user, + 'source': 'user_group', + 'source_data': source_data, + 'data': rule_user_group.rule_data() + } + + return users + + def user_group_vote_rule(self, user_id): + + rules = [] + if not self.rule_user_groups: + return rules + + for user_group in self.rule_user_groups: + user_group_members = [x.user_id for x in user_group.users_group.members] + if user_id in user_group_members: + rules.append(user_group) + return rules + + def __repr__(self): + return '' % ( + self.repo_review_rule_id, self.repo) + + +class ScheduleEntry(Base, BaseModel): + __tablename__ = 'schedule_entries' + __table_args__ = ( + UniqueConstraint('schedule_name', name='s_schedule_name_idx'), + UniqueConstraint('task_uid', name='s_task_uid_idx'), + base_table_args, + ) + + schedule_types = ['crontab', 'timedelta', 'integer'] + schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) + + schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) + schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) + schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) + + _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) + schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) + + schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) + schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) + + # task + task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) + task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) + task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) + task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) + + @hybrid_property + def schedule_type(self): + return self._schedule_type + + @schedule_type.setter + def schedule_type(self, val): + if val not in self.schedule_types: + raise ValueError('Value must be on of `{}` and got `{}`'.format( + val, self.schedule_type)) + + self._schedule_type = val + + @classmethod + def get_uid(cls, obj): + args = obj.task_args + kwargs = obj.task_kwargs + if isinstance(args, JsonRaw): + try: + args = json.loads(args) + except ValueError: + args = tuple() + + if isinstance(kwargs, JsonRaw): + try: + kwargs = json.loads(kwargs) + except ValueError: + kwargs = dict() + + dot_notation = obj.task_dot_notation + val = '.'.join(map(safe_str, [ + sorted(dot_notation), args, sorted(kwargs.items())])) + return hashlib.sha1(val).hexdigest() + + @classmethod + def get_by_schedule_name(cls, schedule_name): + return cls.query().filter(cls.schedule_name == schedule_name).scalar() + + @classmethod + def get_by_schedule_id(cls, schedule_id): + return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() + + @property + def task(self): + return self.task_dot_notation + + @property + def schedule(self): + from rhodecode.lib.celerylib.utils import raw_2_schedule + schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) + return schedule + + @property + def args(self): + try: + return list(self.task_args or []) + except ValueError: + return list() + + @property + def kwargs(self): + try: + return dict(self.task_kwargs or {}) + except ValueError: + return dict() + + def _as_raw(self, val): + if hasattr(val, 'de_coerce'): + val = val.de_coerce() + if val: + val = json.dumps(val) + + return val + + @property + def schedule_definition_raw(self): + return self._as_raw(self.schedule_definition) + + @property + def args_raw(self): + return self._as_raw(self.task_args) + + @property + def kwargs_raw(self): + return self._as_raw(self.task_kwargs) + + def __repr__(self): + return ''.format( + self.schedule_entry_id, self.schedule_name) + + +@event.listens_for(ScheduleEntry, 'before_update') +def update_task_uid(mapper, connection, target): + target.task_uid = ScheduleEntry.get_uid(target) + + +@event.listens_for(ScheduleEntry, 'before_insert') +def set_task_uid(mapper, connection, target): + target.task_uid = ScheduleEntry.get_uid(target) + + +class _BaseBranchPerms(BaseModel): + @classmethod + def compute_hash(cls, value): + return sha1_safe(value) + + @hybrid_property + def branch_pattern(self): + return self._branch_pattern or '*' + + @hybrid_property + def branch_hash(self): + return self._branch_hash + + def _validate_glob(self, value): + re.compile('^' + glob2re(value) + '$') + + @branch_pattern.setter + def branch_pattern(self, value): + self._validate_glob(value) + self._branch_pattern = value or '*' + # set the Hash when setting the branch pattern + self._branch_hash = self.compute_hash(self._branch_pattern) + + def matches(self, branch): + """ + Check if this the branch matches entry + + :param branch: branch name for the commit + """ + + branch = branch or '' + + branch_matches = True + if branch: + branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$') + branch_matches = bool(branch_regex.search(branch)) + + return branch_matches + + +class UserToRepoBranchPermission(Base, _BaseBranchPerms): + __tablename__ = 'user_to_repo_branch_permissions' + __table_args__ = ( + base_table_args + ) + + branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) + + repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + repo = relationship('Repository', backref='user_branch_perms') + + permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + permission = relationship('Permission') + + rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None) + user_repo_to_perm = relationship('UserRepoToPerm') + + rule_order = Column('rule_order', Integer(), nullable=False) + _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob + _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) + + def __unicode__(self): + return u' %r)>' % ( + self.user_repo_to_perm, self.branch_pattern) + + +class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): + __tablename__ = 'user_group_to_repo_branch_permissions' + __table_args__ = ( + base_table_args + ) + + branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) + + repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + repo = relationship('Repository', backref='user_group_branch_perms') + + permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + permission = relationship('Permission') + + rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None) + user_group_repo_to_perm = relationship('UserGroupRepoToPerm') + + rule_order = Column('rule_order', Integer(), nullable=False) + _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob + _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) + + def __unicode__(self): + return u' %r)>' % ( + self.user_group_repo_to_perm, self.branch_pattern) + + +class UserBookmark(Base, BaseModel): + __tablename__ = 'user_bookmarks' + __table_args__ = ( + UniqueConstraint('user_id', 'bookmark_repo_id'), + UniqueConstraint('user_id', 'bookmark_repo_group_id'), + UniqueConstraint('user_id', 'bookmark_position'), + base_table_args + ) + + user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + position = Column("bookmark_position", Integer(), nullable=False) + title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None) + redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None) + created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None) + bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None) + + user = relationship("User") + + repository = relationship("Repository") + repository_group = relationship("RepoGroup") + + @classmethod + def get_by_position_for_user(cls, position, user_id): + return cls.query() \ + .filter(UserBookmark.user_id == user_id) \ + .filter(UserBookmark.position == position).scalar() + + @classmethod + def get_bookmarks_for_user(cls, user_id, cache=True): + bookmarks = cls.query() \ + .filter(UserBookmark.user_id == user_id) \ + .options(joinedload(UserBookmark.repository)) \ + .options(joinedload(UserBookmark.repository_group)) \ + .order_by(UserBookmark.position.asc()) + + if cache: + bookmarks = bookmarks.options( + FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id)) + ) + + return bookmarks.all() + + def __unicode__(self): + return u'' % (self.position, self.redirect_url) + + +class FileStore(Base, BaseModel): + __tablename__ = 'file_store' + __table_args__ = ( + base_table_args + ) + + file_store_id = Column('file_store_id', Integer(), primary_key=True) + file_uid = Column('file_uid', String(1024), nullable=False) + file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True) + file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) + file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False) + + # sha256 hash + file_hash = Column('file_hash', String(512), nullable=False) + file_size = Column('file_size', BigInteger(), nullable=False) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) + accessed_count = Column('accessed_count', Integer(), default=0) + + enabled = Column('enabled', Boolean(), nullable=False, default=True) + + # if repo/repo_group reference is set, check for permissions + check_acl = Column('check_acl', Boolean(), nullable=False, default=True) + + # hidden defines an attachment that should be hidden from showing in artifact listing + hidden = Column('hidden', Boolean(), nullable=False, default=False) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id') + + file_metadata = relationship('FileStoreMetadata', lazy='joined') + + # scope limited to user, which requester have access to + scope_user_id = Column( + 'scope_user_id', Integer(), ForeignKey('users.user_id'), + nullable=True, unique=None, default=None) + user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id') + + # scope limited to user group, which requester have access to + scope_user_group_id = Column( + 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'), + nullable=True, unique=None, default=None) + user_group = relationship('UserGroup', lazy='joined') + + # scope limited to repo, which requester have access to + scope_repo_id = Column( + 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + # scope limited to repo group, which requester have access to + scope_repo_group_id = Column( + 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + @classmethod + def get_by_store_uid(cls, file_store_uid): + return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() + + @classmethod + def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', + file_description='', enabled=True, hidden=False, check_acl=True, + user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): + + store_entry = FileStore() + store_entry.file_uid = file_uid + store_entry.file_display_name = file_display_name + store_entry.file_org_name = filename + store_entry.file_size = file_size + store_entry.file_hash = file_hash + store_entry.file_description = file_description + + store_entry.check_acl = check_acl + store_entry.enabled = enabled + store_entry.hidden = hidden + + store_entry.user_id = user_id + store_entry.scope_user_id = scope_user_id + store_entry.scope_repo_id = scope_repo_id + store_entry.scope_repo_group_id = scope_repo_group_id + + return store_entry + + @classmethod + def store_metadata(cls, file_store_id, args, commit=True): + file_store = FileStore.get(file_store_id) + if file_store is None: + return + + for section, key, value, value_type in args: + has_key = FileStoreMetadata().query() \ + .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ + .filter(FileStoreMetadata.file_store_meta_section == section) \ + .filter(FileStoreMetadata.file_store_meta_key == key) \ + .scalar() + if has_key: + msg = 'key `{}` already defined under section `{}` for this file.'\ + .format(key, section) + raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) + + # NOTE(marcink): raises ArtifactMetadataBadValueType + FileStoreMetadata.valid_value_type(value_type) + + meta_entry = FileStoreMetadata() + meta_entry.file_store = file_store + meta_entry.file_store_meta_section = section + meta_entry.file_store_meta_key = key + meta_entry.file_store_meta_value_type = value_type + meta_entry.file_store_meta_value = value + + Session().add(meta_entry) + + try: + if commit: + Session().commit() + except IntegrityError: + Session().rollback() + raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') + + @classmethod + def bump_access_counter(cls, file_uid, commit=True): + FileStore().query()\ + .filter(FileStore.file_uid == file_uid)\ + .update({FileStore.accessed_count: (FileStore.accessed_count + 1), + FileStore.accessed_on: datetime.datetime.now()}) + if commit: + Session().commit() + + def __json__(self): + data = { + 'filename': self.file_display_name, + 'filename_org': self.file_org_name, + 'file_uid': self.file_uid, + 'description': self.file_description, + 'hidden': self.hidden, + 'size': self.file_size, + 'created_on': self.created_on, + 'uploaded_by': self.upload_user.get_api_data(details='basic'), + 'downloaded_times': self.accessed_count, + 'sha256': self.file_hash, + 'metadata': self.file_metadata, + } + + return data + + def __repr__(self): + return ''.format(self.file_store_id) + + +class FileStoreMetadata(Base, BaseModel): + __tablename__ = 'file_store_metadata' + __table_args__ = ( + UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), + Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), + Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), + base_table_args + ) + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + + file_store_meta_id = Column( + "file_store_meta_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _file_store_meta_section = Column( + "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_section_hash = Column( + "file_store_meta_section_hash", String(255), + nullable=True, unique=None, default=None) + _file_store_meta_key = Column( + "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_key_hash = Column( + "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) + _file_store_meta_value = Column( + "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_value_type = Column( + "file_store_meta_value_type", String(255), nullable=True, unique=None, + default='unicode') + + file_store_id = Column( + 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), + nullable=True, unique=None, default=None) + + file_store = relationship('FileStore', lazy='joined') + + @classmethod + def valid_value_type(cls, value): + if value.split('.')[0] not in cls.SETTINGS_TYPES: + raise ArtifactMetadataBadValueType( + 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value)) + + @hybrid_property + def file_store_meta_section(self): + return self._file_store_meta_section + + @file_store_meta_section.setter + def file_store_meta_section(self, value): + self._file_store_meta_section = value + self._file_store_meta_section_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_key(self): + return self._file_store_meta_key + + @file_store_meta_key.setter + def file_store_meta_key(self, value): + self._file_store_meta_key = value + self._file_store_meta_key_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_value(self): + val = self._file_store_meta_value + + if self._file_store_meta_value_type: + # e.g unicode.encrypted == unicode + _type = self._file_store_meta_value_type.split('.')[0] + # decode the encrypted value if it's encrypted field type + if '.encrypted' in self._file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_result_value(val, None)) + # do final type conversion + converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] + val = converter(val) + + return val + + @file_store_meta_value.setter + def file_store_meta_value(self, val): + val = safe_unicode(val) + # encode the encrypted value + if '.encrypted' in self.file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._file_store_meta_value = val + + @hybrid_property + def file_store_meta_value_type(self): + return self._file_store_meta_value_type + + @file_store_meta_value_type.setter + def file_store_meta_value_type(self, val): + # e.g unicode.encrypted + self.valid_value_type(val) + self._file_store_meta_value_type = val + + def __json__(self): + data = { + 'artifact': self.file_store.file_uid, + 'section': self.file_store_meta_section, + 'key': self.file_store_meta_key, + 'value': self.file_store_meta_value, + } + + return data + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section, + self.file_store_meta_key, self.file_store_meta_value) + + +class DbMigrateVersion(Base, BaseModel): + __tablename__ = 'db_migrate_version' + __table_args__ = ( + base_table_args, + ) + + repository_id = Column('repository_id', String(250), primary_key=True) + repository_path = Column('repository_path', Text) + version = Column('version', Integer) + + @classmethod + def set_version(cls, version): + """ + Helper for forcing a different version, usually for debugging purposes via ishell. + """ + ver = DbMigrateVersion.query().first() + ver.version = version + Session().commit() + + +class DbSession(Base, BaseModel): + __tablename__ = 'db_session' + __table_args__ = ( + base_table_args, + ) + + def __repr__(self): + return ''.format(self.id) + + id = Column('id', Integer()) + namespace = Column('namespace', String(255), primary_key=True) + accessed = Column('accessed', DateTime, nullable=False) + created = Column('created', DateTime, nullable=False) + data = Column('data', PickleType, nullable=False) diff --git a/rhodecode/lib/dbmigrate/versions/108_version_4_19_1.py b/rhodecode/lib/dbmigrate/versions/108_version_4_19_1.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/108_version_4_19_1.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +import logging +from sqlalchemy import * + +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import BigInteger + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_19_0_2 as db + + init_model_encryption(db) + db.ChangesetCommentHistory().__table__.create() + + +def downgrade(migrate_engine): + meta = MetaData() + meta.bind = migrate_engine + + +def fixups(models, _SESSION): + pass diff --git a/rhodecode/lib/exc_tracking.py b/rhodecode/lib/exc_tracking.py --- a/rhodecode/lib/exc_tracking.py +++ b/rhodecode/lib/exc_tracking.py @@ -143,8 +143,7 @@ def send_exc_email(request, exc_id, exc_ 'exc_traceback': read_exception(exc_id, prefix=None), } - (subject, headers, email_body, - email_body_plaintext) = EmailNotificationModel().render_email( + (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( EmailNotificationModel.TYPE_EMAIL_EXCEPTION, **email_kwargs) run_task(tasks.send_email, recipients, subject, diff --git a/rhodecode/lib/exceptions.py b/rhodecode/lib/exceptions.py --- a/rhodecode/lib/exceptions.py +++ b/rhodecode/lib/exceptions.py @@ -177,3 +177,7 @@ class ArtifactMetadataDuplicate(ValueErr class ArtifactMetadataBadValueType(ValueError): pass + + +class CommentVersionMismatch(ValueError): + pass diff --git a/rhodecode/lib/helpers.py b/rhodecode/lib/helpers.py --- a/rhodecode/lib/helpers.py +++ b/rhodecode/lib/helpers.py @@ -24,6 +24,7 @@ Helper functions Consists of functions to typically be used within templates, but also available to Controllers. This module is available to both as 'h'. """ +import base64 import os import random @@ -52,7 +53,7 @@ from pygments.lexers import ( get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) from pyramid.threadlocal import get_current_request - +from tempita import looper from webhelpers2.html import literal, HTML, escape from webhelpers2.html._autolink import _auto_link_urls from webhelpers2.html.tools import ( @@ -85,10 +86,11 @@ from rhodecode.lib.utils2 import ( from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit +from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS from rhodecode.lib.index.search_utils import get_matching_line_offsets from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT from rhodecode.model.changeset_status import ChangesetStatusModel -from rhodecode.model.db import Permission, User, Repository +from rhodecode.model.db import Permission, User, Repository, UserApiKeys from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.settings import IssueTrackerSettingsModel @@ -783,13 +785,24 @@ flash = Flash() # SCM FILTERS available via h. #============================================================================== from rhodecode.lib.vcs.utils import author_name, author_email -from rhodecode.lib.utils2 import credentials_filter, age, age_from_seconds +from rhodecode.lib.utils2 import age, age_from_seconds from rhodecode.model.db import User, ChangesetStatus -capitalize = lambda x: x.capitalize() + email = author_email -short_id = lambda x: x[:12] -hide_credentials = lambda x: ''.join(credentials_filter(x)) + + +def capitalize(raw_text): + return raw_text.capitalize() + + +def short_id(long_id): + return long_id[:12] + + +def hide_credentials(url): + from rhodecode.lib.utils2 import credentials_filter + return credentials_filter(url) import pytz @@ -948,7 +961,7 @@ def link_to_user(author, length=0, **kwa if length: display_person = shorter(display_person, length) - if user: + if user and user.username != user.DEFAULT_USER: return link_to( escape(display_person), route_path('user_profile', username=user.username), @@ -1341,7 +1354,7 @@ class InitialsGravatar(object): def generate_svg(self, svg_type=None): img_data = self.get_img_data(svg_type) - return "data:image/svg+xml;base64,%s" % img_data.encode('base64') + return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data) def initials_gravatar(email_address, first_name, last_name, size=30): diff --git a/rhodecode/lib/hooks_base.py b/rhodecode/lib/hooks_base.py --- a/rhodecode/lib/hooks_base.py +++ b/rhodecode/lib/hooks_base.py @@ -400,7 +400,7 @@ pre_create_user = ExtensionCallback( 'admin', 'created_by')) -log_create_pull_request = ExtensionCallback( +create_pull_request = ExtensionCallback( hook_name='CREATE_PULL_REQUEST', kwargs_keys=( 'server_url', 'config', 'scm', 'username', 'ip', 'action', @@ -409,7 +409,7 @@ log_create_pull_request = ExtensionCallb 'mergeable', 'source', 'target', 'author', 'reviewers')) -log_merge_pull_request = ExtensionCallback( +merge_pull_request = ExtensionCallback( hook_name='MERGE_PULL_REQUEST', kwargs_keys=( 'server_url', 'config', 'scm', 'username', 'ip', 'action', @@ -418,7 +418,7 @@ log_merge_pull_request = ExtensionCallba 'mergeable', 'source', 'target', 'author', 'reviewers')) -log_close_pull_request = ExtensionCallback( +close_pull_request = ExtensionCallback( hook_name='CLOSE_PULL_REQUEST', kwargs_keys=( 'server_url', 'config', 'scm', 'username', 'ip', 'action', @@ -427,7 +427,7 @@ log_close_pull_request = ExtensionCallba 'mergeable', 'source', 'target', 'author', 'reviewers')) -log_review_pull_request = ExtensionCallback( +review_pull_request = ExtensionCallback( hook_name='REVIEW_PULL_REQUEST', kwargs_keys=( 'server_url', 'config', 'scm', 'username', 'ip', 'action', @@ -436,7 +436,7 @@ log_review_pull_request = ExtensionCallb 'mergeable', 'source', 'target', 'author', 'reviewers')) -log_comment_pull_request = ExtensionCallback( +comment_pull_request = ExtensionCallback( hook_name='COMMENT_PULL_REQUEST', kwargs_keys=( 'server_url', 'config', 'scm', 'username', 'ip', 'action', @@ -445,7 +445,16 @@ log_comment_pull_request = ExtensionCall 'mergeable', 'source', 'target', 'author', 'reviewers')) -log_update_pull_request = ExtensionCallback( +comment_edit_pull_request = ExtensionCallback( + hook_name='COMMENT_EDIT_PULL_REQUEST', + kwargs_keys=( + 'server_url', 'config', 'scm', 'username', 'ip', 'action', + 'repository', 'pull_request_id', 'url', 'title', 'description', + 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', + 'mergeable', 'source', 'target', 'author', 'reviewers')) + + +update_pull_request = ExtensionCallback( hook_name='UPDATE_PULL_REQUEST', kwargs_keys=( 'server_url', 'config', 'scm', 'username', 'ip', 'action', @@ -454,7 +463,7 @@ log_update_pull_request = ExtensionCallb 'mergeable', 'source', 'target', 'author', 'reviewers')) -log_create_user = ExtensionCallback( +create_user = ExtensionCallback( hook_name='CREATE_USER_HOOK', kwargs_keys=( 'username', 'full_name_or_username', 'full_contact', 'user_id', @@ -465,7 +474,7 @@ log_create_user = ExtensionCallback( 'inherit_default_permissions', 'created_by', 'created_on')) -log_delete_user = ExtensionCallback( +delete_user = ExtensionCallback( hook_name='DELETE_USER_HOOK', kwargs_keys=( 'username', 'full_name_or_username', 'full_contact', 'user_id', @@ -476,7 +485,7 @@ log_delete_user = ExtensionCallback( 'inherit_default_permissions', 'deleted_by')) -log_create_repository = ExtensionCallback( +create_repository = ExtensionCallback( hook_name='CREATE_REPO_HOOK', kwargs_keys=( 'repo_name', 'repo_type', 'description', 'private', 'created_on', @@ -484,7 +493,7 @@ log_create_repository = ExtensionCallbac 'clone_uri', 'fork_id', 'group_id', 'created_by')) -log_delete_repository = ExtensionCallback( +delete_repository = ExtensionCallback( hook_name='DELETE_REPO_HOOK', kwargs_keys=( 'repo_name', 'repo_type', 'description', 'private', 'created_on', @@ -492,7 +501,7 @@ log_delete_repository = ExtensionCallbac 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) -log_comment_commit_repository = ExtensionCallback( +comment_commit_repository = ExtensionCallback( hook_name='COMMENT_COMMIT_REPO_HOOK', kwargs_keys=( 'repo_name', 'repo_type', 'description', 'private', 'created_on', @@ -500,8 +509,16 @@ log_comment_commit_repository = Extensio 'clone_uri', 'fork_id', 'group_id', 'repository', 'created_by', 'comment', 'commit')) +comment_edit_commit_repository = ExtensionCallback( + hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK', + kwargs_keys=( + 'repo_name', 'repo_type', 'description', 'private', 'created_on', + 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', + 'clone_uri', 'fork_id', 'group_id', + 'repository', 'created_by', 'comment', 'commit')) -log_create_repository_group = ExtensionCallback( + +create_repository_group = ExtensionCallback( hook_name='CREATE_REPO_GROUP_HOOK', kwargs_keys=( 'group_name', 'group_parent_id', 'group_description', diff --git a/rhodecode/lib/hooks_utils.py b/rhodecode/lib/hooks_utils.py --- a/rhodecode/lib/hooks_utils.py +++ b/rhodecode/lib/hooks_utils.py @@ -94,7 +94,34 @@ def trigger_comment_commit_hooks(usernam extras.commit = commit.serialize() extras.comment = comment.get_api_data() extras.created_by = username - hooks_base.log_comment_commit_repository(**extras) + hooks_base.comment_commit_repository(**extras) + + +def trigger_comment_commit_edit_hooks(username, repo_name, repo_type, repo, data=None): + """ + Triggers when a comment is edited on a commit + + :param username: username who edits the comment + :param repo_name: name of target repo + :param repo_type: the type of SCM target repo + :param repo: the repo object we trigger the event for + :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj} + """ + if not _supports_repo_type(repo_type): + return + + extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit') + + comment = data['comment'] + commit = data['commit'] + + events.trigger(events.RepoCommitCommentEditEvent(repo, commit, comment)) + extras.update(repo.get_dict()) + + extras.commit = commit.serialize() + extras.comment = comment.get_api_data() + extras.created_by = username + hooks_base.comment_edit_commit_repository(**extras) def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): @@ -113,7 +140,7 @@ def trigger_create_pull_request_hook(use extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request') events.trigger(events.PullRequestCreateEvent(pull_request)) extras.update(pull_request.get_api_data(with_merge_state=False)) - hooks_base.log_create_pull_request(**extras) + hooks_base.create_pull_request(**extras) def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): @@ -132,7 +159,7 @@ def trigger_merge_pull_request_hook(user extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request') events.trigger(events.PullRequestMergeEvent(pull_request)) extras.update(pull_request.get_api_data()) - hooks_base.log_merge_pull_request(**extras) + hooks_base.merge_pull_request(**extras) def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): @@ -151,7 +178,7 @@ def trigger_close_pull_request_hook(user extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request') events.trigger(events.PullRequestCloseEvent(pull_request)) extras.update(pull_request.get_api_data()) - hooks_base.log_close_pull_request(**extras) + hooks_base.close_pull_request(**extras) def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): @@ -171,7 +198,7 @@ def trigger_review_pull_request_hook(use status = data.get('status') events.trigger(events.PullRequestReviewEvent(pull_request, status)) extras.update(pull_request.get_api_data()) - hooks_base.log_review_pull_request(**extras) + hooks_base.review_pull_request(**extras) def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): @@ -193,7 +220,29 @@ def trigger_comment_pull_request_hook(us events.trigger(events.PullRequestCommentEvent(pull_request, comment)) extras.update(pull_request.get_api_data()) extras.comment = comment.get_api_data() - hooks_base.log_comment_pull_request(**extras) + hooks_base.comment_pull_request(**extras) + + +def trigger_comment_pull_request_edit_hook(username, repo_name, repo_type, pull_request, data=None): + """ + Triggers when a comment was edited on a pull request + + :param username: username who made the edit + :param repo_name: name of target repo + :param repo_type: the type of SCM target repo + :param pull_request: the pull request that comment was made on + :param data: extra data for specific events e.g {'comment': comment_obj} + """ + if not _supports_repo_type(repo_type): + return + + extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request') + + comment = data['comment'] + events.trigger(events.PullRequestCommentEditEvent(pull_request, comment)) + extras.update(pull_request.get_api_data()) + extras.comment = comment.get_api_data() + hooks_base.comment_edit_pull_request(**extras) def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): @@ -212,4 +261,4 @@ def trigger_update_pull_request_hook(use extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request') events.trigger(events.PullRequestUpdateEvent(pull_request)) extras.update(pull_request.get_api_data()) - hooks_base.log_update_pull_request(**extras) + hooks_base.update_pull_request(**extras) diff --git a/rhodecode/lib/html_filters.py b/rhodecode/lib/html_filters.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/html_filters.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2020-2020 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +## base64 filter e.g ${ example | base64,n } +def base64(text): + import base64 + from rhodecode.lib.helpers import safe_str + return base64.encodestring(safe_str(text)) diff --git a/rhodecode/lib/rc_cache/__init__.py b/rhodecode/lib/rc_cache/__init__.py --- a/rhodecode/lib/rc_cache/__init__.py +++ b/rhodecode/lib/rc_cache/__init__.py @@ -48,6 +48,7 @@ from .utils import ( FILE_TREE_CACHE_VER = 'v4' +LICENSE_CACHE_VER = 'v2' def configure_dogpile_cache(settings): diff --git a/rhodecode/lib/rc_cache/backends.py b/rhodecode/lib/rc_cache/backends.py --- a/rhodecode/lib/rc_cache/backends.py +++ b/rhodecode/lib/rc_cache/backends.py @@ -159,7 +159,14 @@ class FileNamespaceBackend(PickleSeriali def __init__(self, arguments): arguments['lock_factory'] = CustomLockFactory - super(FileNamespaceBackend, self).__init__(arguments) + db_file = arguments.get('filename') + + log.debug('initialing %s DB in %s', self.__class__.__name__, db_file) + try: + super(FileNamespaceBackend, self).__init__(arguments) + except Exception: + log.error('Failed to initialize db at: %s', db_file) + raise def __repr__(self): return '{} `{}`'.format(self.__class__, self.filename) diff --git a/rhodecode/lib/utils.py b/rhodecode/lib/utils.py --- a/rhodecode/lib/utils.py +++ b/rhodecode/lib/utils.py @@ -30,6 +30,7 @@ import os import re import sys import shutil +import socket import tempfile import traceback import tarfile @@ -782,3 +783,18 @@ def generate_platform_uuid(): except Exception as e: log.error('Failed to generate host uuid: %s', e) return 'UNDEFINED' + + +def send_test_email(recipients, email_body='TEST EMAIL'): + """ + Simple code for generating test emails. + Usage:: + + from rhodecode.lib import utils + utils.send_test_email() + """ + from rhodecode.lib.celerylib import tasks, run_task + + email_body = email_body_plaintext = email_body + subject = 'SUBJECT FROM: {}'.format(socket.gethostname()) + tasks.send_email(recipients, subject, email_body_plaintext, email_body) diff --git a/rhodecode/lib/vcs/backends/hg/repository.py b/rhodecode/lib/vcs/backends/hg/repository.py --- a/rhodecode/lib/vcs/backends/hg/repository.py +++ b/rhodecode/lib/vcs/backends/hg/repository.py @@ -628,34 +628,42 @@ class MercurialRepository(BaseRepository push_branches=push_branches) def _local_merge(self, target_ref, merge_message, user_name, user_email, - source_ref, use_rebase=False, dry_run=False): + source_ref, use_rebase=False, close_commit_id=None, dry_run=False): """ Merge the given source_revision into the checked out revision. Returns the commit id of the merge and a boolean indicating if the commit needs to be pushed. """ - self._update(target_ref.commit_id, clean=True) + source_ref_commit_id = source_ref.commit_id + target_ref_commit_id = target_ref.commit_id - ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id) + # update our workdir to target ref, for proper merge + self._update(target_ref_commit_id, clean=True) + + ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id) is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) - if ancestor == source_ref.commit_id: - # Nothing to do, the changes were already integrated - return target_ref.commit_id, False + if close_commit_id: + # NOTE(marcink): if we get the close commit, this is our new source + # which will include the close commit itself. + source_ref_commit_id = close_commit_id - elif ancestor == target_ref.commit_id and is_the_same_branch: + if ancestor == source_ref_commit_id: + # Nothing to do, the changes were already integrated + return target_ref_commit_id, False + + elif ancestor == target_ref_commit_id and is_the_same_branch: # In this case we should force a commit message - return source_ref.commit_id, True + return source_ref_commit_id, True unresolved = None if use_rebase: try: - bookmark_name = 'rcbook%s%s' % (source_ref.commit_id, - target_ref.commit_id) + bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id) self.bookmark(bookmark_name, revision=source_ref.commit_id) self._remote.rebase( - source=source_ref.commit_id, dest=target_ref.commit_id) + source=source_ref_commit_id, dest=target_ref_commit_id) self._remote.invalidate_vcs_cache() self._update(bookmark_name, clean=True) return self._identify(), True @@ -678,7 +686,7 @@ class MercurialRepository(BaseRepository raise else: try: - self._remote.merge(source_ref.commit_id) + self._remote.merge(source_ref_commit_id) self._remote.invalidate_vcs_cache() self._remote.commit( message=safe_str(merge_message), @@ -820,10 +828,12 @@ class MercurialRepository(BaseRepository needs_push = False if merge_possible: + try: merge_commit_id, needs_push = shadow_repo._local_merge( target_ref, merge_message, merger_name, merger_email, - source_ref, use_rebase=use_rebase, dry_run=dry_run) + source_ref, use_rebase=use_rebase, + close_commit_id=close_commit_id, dry_run=dry_run) merge_possible = True # read the state of the close action, if it diff --git a/rhodecode/lib/vcs/conf/settings.py b/rhodecode/lib/vcs/conf/settings.py --- a/rhodecode/lib/vcs/conf/settings.py +++ b/rhodecode/lib/vcs/conf/settings.py @@ -41,7 +41,7 @@ BACKENDS = { ARCHIVE_SPECS = [ - ('tbz2', 'application/x-bzip2', 'tbz2'), + ('tbz2', 'application/x-bzip2', '.tbz2'), ('tbz2', 'application/x-bzip2', '.tar.bz2'), ('tgz', 'application/x-gzip', '.tgz'), diff --git a/rhodecode/model/comment.py b/rhodecode/model/comment.py --- a/rhodecode/model/comment.py +++ b/rhodecode/model/comment.py @@ -21,6 +21,7 @@ """ comments model for RhodeCode """ +import datetime import logging import traceback @@ -32,10 +33,17 @@ from sqlalchemy.sql.functions import coa from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils from rhodecode.lib import audit_logger -from rhodecode.lib.utils2 import extract_mentioned_users, safe_str +from rhodecode.lib.exceptions import CommentVersionMismatch +from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int from rhodecode.model import BaseModel from rhodecode.model.db import ( - ChangesetComment, User, Notification, PullRequest, AttributeDict) + ChangesetComment, + User, + Notification, + PullRequest, + AttributeDict, + ChangesetCommentHistory, +) from rhodecode.model.notification import NotificationModel from rhodecode.model.meta import Session from rhodecode.model.settings import VcsSettingsModel @@ -362,13 +370,18 @@ class CommentsModel(BaseModel): repo.repo_name, h.route_url('repo_summary', repo_name=repo.repo_name)) + commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, + commit_id=commit_id) + # commit specifics kwargs.update({ 'commit': commit_obj, 'commit_message': commit_obj.message, 'commit_target_repo_url': target_repo_url, 'commit_comment_url': commit_comment_url, - 'commit_comment_reply_url': commit_comment_reply_url + 'commit_comment_reply_url': commit_comment_reply_url, + 'commit_url': commit_url, + 'thread_ids': [commit_url, commit_comment_url], }) elif pull_request_obj: @@ -413,15 +426,14 @@ class CommentsModel(BaseModel): 'pr_comment_url': pr_comment_url, 'pr_comment_reply_url': pr_comment_reply_url, 'pr_closing': closing_pr, + 'thread_ids': [pr_url, pr_comment_url], }) recipients += [self._get_user(u) for u in (extra_recipients or [])] if send_email: # pre-generate the subject for notification itself - (subject, - _h, _e, # we don't care about those - body_plaintext) = EmailNotificationModel().render_email( + (subject, _e, body_plaintext) = EmailNotificationModel().render_email( notification_type, **kwargs) mention_recipients = set( @@ -479,6 +491,60 @@ class CommentsModel(BaseModel): return comment + def edit(self, comment_id, text, auth_user, version): + """ + Change existing comment for commit or pull request. + + :param comment_id: + :param text: + :param auth_user: current authenticated user calling this method + :param version: last comment version + """ + if not text: + log.warning('Missing text for comment, skipping...') + return + + comment = ChangesetComment.get(comment_id) + old_comment_text = comment.text + comment.text = text + comment.modified_at = datetime.datetime.now() + version = safe_int(version) + + # NOTE(marcink): this returns initial comment + edits, so v2 from ui + # would return 3 here + comment_version = ChangesetCommentHistory.get_version(comment_id) + + if isinstance(version, (int, long)) and (comment_version - version) != 1: + log.warning( + 'Version mismatch comment_version {} submitted {}, skipping'.format( + comment_version-1, # -1 since note above + version + ) + ) + raise CommentVersionMismatch() + + comment_history = ChangesetCommentHistory() + comment_history.comment_id = comment_id + comment_history.version = comment_version + comment_history.created_by_user_id = auth_user.user_id + comment_history.text = old_comment_text + # TODO add email notification + Session().add(comment_history) + Session().add(comment) + Session().flush() + + if comment.pull_request: + action = 'repo.pull_request.comment.edit' + else: + action = 'repo.commit.comment.edit' + + comment_data = comment.get_api_data() + comment_data['old_comment_text'] = old_comment_text + self._log_audit_action( + action, {'data': comment_data}, auth_user, comment) + + return comment_history + def delete(self, comment, auth_user): """ Deletes given comment @@ -712,6 +778,7 @@ class CommentsModel(BaseModel): .filter(ChangesetComment.line_no == None)\ .filter(ChangesetComment.f_path == None)\ .filter(ChangesetComment.pull_request == pull_request) + return comments @staticmethod @@ -726,8 +793,7 @@ class CommentsModel(BaseModel): if action == 'create': trigger_hook = hooks_utils.trigger_comment_commit_hooks elif action == 'edit': - # TODO(dan): when this is supported we trigger edit hook too - return + trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks else: return diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -103,7 +103,12 @@ def display_user_sort(obj): if obj.username == User.DEFAULT_USER: return '#####' prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') - return prefix + obj.username + extra_sort_num = '1' # default + + # NOTE(dan): inactive duplicates goes last + if getattr(obj, 'duplicate_perm', None): + extra_sort_num = '9' + return prefix + extra_sort_num + obj.username def display_user_group_sort(obj): @@ -1128,14 +1133,16 @@ class UserApiKeys(Base, BaseModel): # ApiKey role ROLE_ALL = 'token_role_all' - ROLE_HTTP = 'token_role_http' ROLE_VCS = 'token_role_vcs' ROLE_API = 'token_role_api' + ROLE_HTTP = 'token_role_http' ROLE_FEED = 'token_role_feed' ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' + # The last one is ignored in the list as we only + # use it for one action, and cannot be created by users ROLE_PASSWORD_RESET = 'token_password_reset' - ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] + ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) @@ -1200,6 +1207,22 @@ class UserApiKeys(Base, BaseModel): cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), }.get(role, role) + @classmethod + def _get_role_description(cls, role): + return { + cls.ROLE_ALL: _('Token for all actions.'), + cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without ' + 'login using `api_access_controllers_whitelist` functionality.'), + cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. ' + 'Requires auth_token authentication plugin to be active.
' + 'Such Token should be used then instead of a password to ' + 'interact with a repository, and additionally can be ' + 'limited to single repository using repo scope.'), + cls.ROLE_API: _('Token limited to api calls.'), + cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'), + cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'), + }.get(role, role) + @property def role_humanized(self): return self._get_role_name(self.role) @@ -3755,6 +3778,7 @@ class ChangesetComment(Base, BaseModel): status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined') pull_request = relationship('PullRequest', lazy='joined') pull_request_version = relationship('PullRequestVersion') + history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='joined', order_by='ChangesetCommentHistory.version') @classmethod def get_users(cls, revision=None, pull_request_id=None): @@ -3777,7 +3801,7 @@ class ChangesetComment(Base, BaseModel): def get_index_from_version(cls, pr_version, versions): num_versions = [x.pull_request_version_id for x in versions] try: - return num_versions.index(pr_version) +1 + return num_versions.index(pr_version) + 1 except (IndexError, ValueError): return @@ -3805,6 +3829,11 @@ class ChangesetComment(Base, BaseModel): return self.pull_request_version_id < version @property + def commit_id(self): + """New style naming to stop using .revision""" + return self.revision + + @property def resolved(self): return self.resolved_by[0] if self.resolved_by else None @@ -3816,6 +3845,13 @@ class ChangesetComment(Base, BaseModel): def is_inline(self): return self.line_no and self.f_path + @property + def last_version(self): + version = 0 + if self.history: + version = self.history[-1].version + return version + def get_index_version(self, versions): return self.get_index_from_version( self.pull_request_version_id, versions) @@ -3828,6 +3864,7 @@ class ChangesetComment(Base, BaseModel): def get_api_data(self): comment = self + data = { 'comment_id': comment.comment_id, 'comment_type': comment.comment_type, @@ -3840,6 +3877,7 @@ class ChangesetComment(Base, BaseModel): 'comment_resolved_by': self.resolved, 'comment_commit_id': comment.revision, 'comment_pull_request_id': comment.pull_request_id, + 'comment_last_version': self.last_version } return data @@ -3849,6 +3887,36 @@ class ChangesetComment(Base, BaseModel): return data +class ChangesetCommentHistory(Base, BaseModel): + __tablename__ = 'changeset_comments_history' + __table_args__ = ( + Index('cch_comment_id_idx', 'comment_id'), + base_table_args, + ) + + comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True) + comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False) + version = Column("version", Integer(), nullable=False, default=0) + created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + deleted = Column('deleted', Boolean(), default=False) + + author = relationship('User', lazy='joined') + comment = relationship('ChangesetComment', cascade="all, delete") + + @classmethod + def get_version(cls, comment_id): + q = Session().query(ChangesetCommentHistory).filter( + ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc()) + if q.count() == 0: + return 1 + elif q.count() >= q[0].version: + return q.count() + 1 + else: + return q[0].version + 1 + + class ChangesetStatus(Base, BaseModel): __tablename__ = 'changeset_statuses' __table_args__ = ( diff --git a/rhodecode/model/notification.py b/rhodecode/model/notification.py --- a/rhodecode/model/notification.py +++ b/rhodecode/model/notification.py @@ -131,15 +131,17 @@ class NotificationModel(BaseModel): # inject current recipient email_kwargs['recipient'] = recipient email_kwargs['mention'] = recipient in mention_recipients - (subject, headers, email_body, - email_body_plaintext) = EmailNotificationModel().render_email( + (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( notification_type, **email_kwargs) - log.debug( - 'Creating notification email task for user:`%s`', recipient) + extra_headers = None + if 'thread_ids' in email_kwargs: + extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} + + log.debug('Creating notification email task for user:`%s`', recipient) task = run_task( tasks.send_email, recipient.email, subject, - email_body_plaintext, email_body) + email_body_plaintext, email_body, extra_headers=extra_headers) log.debug('Created email task: %s', task) return notification @@ -293,6 +295,27 @@ class NotificationModel(BaseModel): } +# Templates for Titles, that could be overwritten by rcextensions +# Title of email for pull-request update +EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' +# Title of email for request for pull request review +EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' + +# Title of email for general comment on pull request +EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' +# Title of email for general comment which includes status change on pull request +EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' +# Title of email for inline comment on a file in pull request +EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' + +# Title of email for general comment on commit +EMAIL_COMMENT_SUBJECT_TEMPLATE = '' +# Title of email for general comment which includes status change on commit +EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' +# Title of email for inline comment on a file in commit +EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' + + class EmailNotificationModel(BaseModel): TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT TYPE_REGISTRATION = Notification.TYPE_REGISTRATION @@ -333,7 +356,7 @@ class EmailNotificationModel(BaseModel): } premailer_instance = premailer.Premailer( - cssutils_logging_level=logging.WARNING, + cssutils_logging_level=logging.ERROR, cssutils_logging_handler=logging.getLogger().handlers[0] if logging.getLogger().handlers else None, ) @@ -342,8 +365,7 @@ class EmailNotificationModel(BaseModel): """ Example usage:: - (subject, headers, email_body, - email_body_plaintext) = EmailNotificationModel().render_email( + (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( EmailNotificationModel.TYPE_TEST, **email_kwargs) """ @@ -387,12 +409,6 @@ class EmailNotificationModel(BaseModel): subject = email_template.render('subject', **_kwargs) try: - headers = email_template.render('headers', **_kwargs) - except AttributeError: - # it's not defined in template, ok we can skip it - headers = '' - - try: body_plaintext = email_template.render('body_plaintext', **_kwargs) except AttributeError: # it's not defined in template, ok we can skip it @@ -408,4 +424,4 @@ class EmailNotificationModel(BaseModel): log.exception('Failed to parse body with premailer') pass - return subject, headers, body, body_plaintext + return subject, body, body_plaintext diff --git a/rhodecode/model/permission.py b/rhodecode/model/permission.py --- a/rhodecode/model/permission.py +++ b/rhodecode/model/permission.py @@ -577,7 +577,8 @@ class PermissionModel(BaseModel): user_group_write_permissions[p.users_group_id] = p return user_group_write_permissions - def trigger_permission_flush(self, affected_user_ids): + def trigger_permission_flush(self, affected_user_ids=None): + affected_user_ids or User.get_all_user_ids() events.trigger(events.UserPermissionsChange(affected_user_ids)) def flush_user_permission_caches(self, changes, affected_user_ids=None): diff --git a/rhodecode/model/pull_request.py b/rhodecode/model/pull_request.py --- a/rhodecode/model/pull_request.py +++ b/rhodecode/model/pull_request.py @@ -703,6 +703,8 @@ class PullRequestModel(BaseModel): trigger_hook = hooks_utils.trigger_update_pull_request_hook elif action == 'comment': trigger_hook = hooks_utils.trigger_comment_pull_request_hook + elif action == 'comment_edit': + trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook else: return @@ -1342,12 +1344,11 @@ class PullRequestModel(BaseModel): 'pull_request_source_repo_url': pr_source_repo_url, 'pull_request_url': pr_url, + 'thread_ids': [pr_url], } # pre-generate the subject for notification itself - (subject, - _h, _e, # we don't care about those - body_plaintext) = EmailNotificationModel().render_email( + (subject, _e, body_plaintext) = EmailNotificationModel().render_email( notification_type, **kwargs) # create notification objects, and emails @@ -1412,11 +1413,10 @@ class PullRequestModel(BaseModel): 'added_files': file_changes.added, 'modified_files': file_changes.modified, 'removed_files': file_changes.removed, + 'thread_ids': [pr_url], } - (subject, - _h, _e, # we don't care about those - body_plaintext) = EmailNotificationModel().render_email( + (subject, _e, body_plaintext) = EmailNotificationModel().render_email( EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs) # create notification objects, and emails @@ -2053,9 +2053,9 @@ class MergeCheck(object): repo_type = pull_request.target_repo.repo_type close_msg = '' if repo_type == 'hg': - close_msg = _('Source branch will be closed after merge.') + close_msg = _('Source branch will be closed before the merge.') elif repo_type == 'git': - close_msg = _('Source branch will be deleted after merge.') + close_msg = _('Source branch will be deleted after the merge.') merge_details['close_branch'] = dict( details={}, diff --git a/rhodecode/model/repo.py b/rhodecode/model/repo.py --- a/rhodecode/model/repo.py +++ b/rhodecode/model/repo.py @@ -33,7 +33,7 @@ from rhodecode import events from rhodecode.lib.auth import HasUserGroupPermissionAny from rhodecode.lib.caching_query import FromCache from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError -from rhodecode.lib.hooks_base import log_delete_repository +from rhodecode.lib import hooks_base from rhodecode.lib.user_log_filter import user_log_filter from rhodecode.lib.utils import make_db_config from rhodecode.lib.utils2 import ( @@ -767,7 +767,7 @@ class RepoModel(BaseModel): 'deleted_by': cur_user, 'deleted_on': time.time(), }) - log_delete_repository(**old_repo_dict) + hooks_base.delete_repository(**old_repo_dict) events.trigger(events.RepoDeleteEvent(repo)) except Exception: log.error(traceback.format_exc()) diff --git a/rhodecode/model/repo_group.py b/rhodecode/model/repo_group.py --- a/rhodecode/model/repo_group.py +++ b/rhodecode/model/repo_group.py @@ -308,13 +308,13 @@ class RepoGroupModel(BaseModel): self._create_group(new_repo_group.group_name) # trigger the post hook - from rhodecode.lib.hooks_base import log_create_repository_group + from rhodecode.lib import hooks_base repo_group = RepoGroup.get_by_group_name(group_name) # update repo group commit caches initially repo_group.update_commit_cache() - log_create_repository_group( + hooks_base.create_repository_group( created_by=user.username, **repo_group.get_dict()) # Trigger create event. diff --git a/rhodecode/model/user.py b/rhodecode/model/user.py --- a/rhodecode/model/user.py +++ b/rhodecode/model/user.py @@ -262,8 +262,7 @@ class UserModel(BaseModel): from rhodecode.lib.auth import ( get_crypt_password, check_password) - from rhodecode.lib.hooks_base import ( - log_create_user, check_allowed_create_user) + from rhodecode.lib import hooks_base def _password_change(new_user, password): old_password = new_user.password or '' @@ -327,7 +326,7 @@ class UserModel(BaseModel): if new_active_user and strict_creation_check: # raises UserCreationError if it's not allowed for any reason to # create new active user, this also executes pre-create hooks - check_allowed_create_user(user_data, cur_user, strict_check=True) + hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True) events.trigger(events.UserPreCreate(user_data)) new_user = User() edit = False @@ -390,7 +389,7 @@ class UserModel(BaseModel): kwargs = new_user.get_dict() # backward compat, require api_keys present kwargs['api_keys'] = kwargs['auth_tokens'] - log_create_user(created_by=cur_user, **kwargs) + hooks_base.create_user(created_by=cur_user, **kwargs) events.trigger(events.UserPostCreate(user_data)) return new_user except (DatabaseError,): @@ -423,9 +422,7 @@ class UserModel(BaseModel): } notification_type = EmailNotificationModel.TYPE_REGISTRATION # pre-generate the subject for notification itself - (subject, - _h, _e, # we don't care about those - body_plaintext) = EmailNotificationModel().render_email( + (subject, _e, body_plaintext) = EmailNotificationModel().render_email( notification_type, **kwargs) # create notification objects, and emails @@ -569,7 +566,7 @@ class UserModel(BaseModel): def delete(self, user, cur_user=None, handle_repos=None, handle_repo_groups=None, handle_user_groups=None, handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None): - from rhodecode.lib.hooks_base import log_delete_user + from rhodecode.lib import hooks_base if not cur_user: cur_user = getattr(get_current_rhodecode_user(), 'username', None) @@ -638,7 +635,7 @@ class UserModel(BaseModel): self.sa.expire(user) self.sa.delete(user) - log_delete_user(deleted_by=cur_user, **user_data) + hooks_base.delete_user(deleted_by=cur_user, **user_data) except Exception: log.error(traceback.format_exc()) raise @@ -660,8 +657,7 @@ class UserModel(BaseModel): 'first_admin_email': User.get_first_super_admin().email } - (subject, headers, email_body, - email_body_plaintext) = EmailNotificationModel().render_email( + (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs) recipients = [user_email] @@ -719,8 +715,7 @@ class UserModel(BaseModel): 'first_admin_email': User.get_first_super_admin().email } - (subject, headers, email_body, - email_body_plaintext) = EmailNotificationModel().render_email( + (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, **email_kwargs) diff --git a/rhodecode/model/validation_schema/schemas/repo_group_schema.py b/rhodecode/model/validation_schema/schemas/repo_group_schema.py --- a/rhodecode/model/validation_schema/schemas/repo_group_schema.py +++ b/rhodecode/model/validation_schema/schemas/repo_group_schema.py @@ -53,7 +53,8 @@ def deferred_can_write_to_group_validato # permissions denied we expose as not existing, to prevent # resource discovery 'permission_denied_parent_group': - _(u"Parent repository group `{}` does not exist"), + _(u"You do not have the permissions to store " + u"repository groups inside repository group `{}`"), 'permission_denied_root': _(u"You do not have the permission to store " u"repository groups in the root location.") @@ -100,9 +101,15 @@ def deferred_can_write_to_group_validato # we want to allow this... forbidden = not (group_admin or (group_write and create_on_write and 0)) + old_name = old_values.get('group_name') + if old_name and old_name == old_values.get('submitted_repo_group_name'): + # we're editing a repository group, we didn't change the name + # we skip the check for write into parent group now + # this allows changing settings for this repo group + return + if parent_group and forbidden: - msg = messages['permission_denied_parent_group'].format( - parent_group_name) + msg = messages['permission_denied_parent_group'].format(parent_group_name) raise colander.Invalid(node, msg) return can_write_group_validator @@ -248,6 +255,9 @@ class RepoGroupSchema(colander.Schema): validated_name = appstruct['repo_group_name'] # second pass to validate permissions to repo_group + if 'old_values' in self.bindings: + # save current repo name for name change checks + self.bindings['old_values']['submitted_repo_group_name'] = validated_name second = RepoGroupAccessSchema().bind(**self.bindings) appstruct_second = second.deserialize({'repo_group': validated_name}) # save result @@ -286,6 +296,9 @@ class RepoGroupSettingsSchema(RepoGroupS validated_name = separator.join([group.group_name, validated_name]) # second pass to validate permissions to repo_group + if 'old_values' in self.bindings: + # save current repo name for name change checks + self.bindings['old_values']['submitted_repo_group_name'] = validated_name second = RepoGroupAccessSchema().bind(**self.bindings) appstruct_second = second.deserialize({'repo_group': validated_name}) # save result diff --git a/rhodecode/model/validation_schema/schemas/repo_schema.py b/rhodecode/model/validation_schema/schemas/repo_schema.py --- a/rhodecode/model/validation_schema/schemas/repo_schema.py +++ b/rhodecode/model/validation_schema/schemas/repo_schema.py @@ -141,17 +141,23 @@ def deferred_can_write_to_group_validato is_root_location = value is types.RootLocation # NOT initialized validators, we must call them - can_create_repos_at_root = HasPermissionAny( - 'hg.admin', 'hg.create.repository') + can_create_repos_at_root = HasPermissionAny('hg.admin', 'hg.create.repository') # if values is root location, we simply need to check if we can write # to root location ! if is_root_location: + if can_create_repos_at_root(user=request_user): # we can create repo group inside tool-level. No more checks # are required return else: + old_name = old_values.get('repo_name') + if old_name and old_name == old_values.get('submitted_repo_name'): + # since we didn't change the name, we can skip validation and + # allow current users without store-in-root permissions to update + return + # "fake" node name as repo_name, otherwise we oddly report # the error as if it was coming form repo_group # however repo_group is empty when using root location. @@ -372,6 +378,9 @@ class RepoSchema(colander.MappingSchema) validated_name = appstruct['repo_name'] # second pass to validate permissions to repo_group + if 'old_values' in self.bindings: + # save current repo name for name change checks + self.bindings['old_values']['submitted_repo_name'] = validated_name second = RepoGroupAccessSchema().bind(**self.bindings) appstruct_second = second.deserialize({'repo_group': validated_name}) # save result @@ -429,6 +438,9 @@ class RepoSettingsSchema(RepoSchema): validated_name = separator.join([group.group_name, validated_name]) # second pass to validate permissions to repo_group + if 'old_values' in self.bindings: + # save current repo name for name change checks + self.bindings['old_values']['submitted_repo_name'] = validated_name second = RepoGroupAccessSchema().bind(**self.bindings) appstruct_second = second.deserialize({'repo_group': validated_name}) # save result diff --git a/rhodecode/public/css/buttons.less b/rhodecode/public/css/buttons.less --- a/rhodecode/public/css/buttons.less +++ b/rhodecode/public/css/buttons.less @@ -259,21 +259,34 @@ input[type="button"] { &:not(.open) .btn-action-switcher-container { display: none; } + + .btn-more-option { + margin-left: -1px; + padding-left: 2px; + padding-right: 2px; + border-left: 1px solid @grey3; + } } -.btn-action-switcher-container{ +.btn-action-switcher-container { position: absolute; - top: 30px; - left: -82px; + top: 100%; + + &.left-align { + left: 0; + } + &.right-align { + right: 0; + } + } .btn-action-switcher { display: block; position: relative; z-index: 300; - min-width: 240px; - max-width: 500px; + max-width: 600px; margin-top: 4px; margin-bottom: 24px; font-size: 14px; @@ -283,6 +296,7 @@ input[type="button"] { border: 1px solid @grey4; border-radius: 3px; box-shadow: @dropdown-shadow; + overflow: auto; li { display: block; diff --git a/rhodecode/public/css/code-block.less b/rhodecode/public/css/code-block.less --- a/rhodecode/public/css/code-block.less +++ b/rhodecode/public/css/code-block.less @@ -998,6 +998,21 @@ input.filediff-collapse-state { /**** END COMMENTS ****/ + + .nav-chunk { + position: absolute; + right: 20px; + margin-top: -17px; + } + + .nav-chunk.selected { + visibility: visible !important; + } + + #diff_nav { + color: @grey3; + } + } @@ -1063,6 +1078,10 @@ input.filediff-collapse-state { background: @color5; color: white; } + &[op="comments"] { /* comments on file */ + background: @grey4; + color: white; + } } } diff --git a/rhodecode/public/css/comments.less b/rhodecode/public/css/comments.less --- a/rhodecode/public/css/comments.less +++ b/rhodecode/public/css/comments.less @@ -65,7 +65,7 @@ tr.inline-comments div { float: left; padding: 0.4em 0.4em; - margin: 3px 5px 0px -10px; + margin: 2px 4px 0px 0px; display: inline-block; min-height: 0; @@ -76,12 +76,13 @@ tr.inline-comments div { font-family: @text-italic; font-style: italic; background: #fff none; - color: @grey4; + color: @grey3; border: 1px solid @grey4; white-space: nowrap; text-transform: uppercase; - min-width: 40px; + min-width: 50px; + border-radius: 4px; &.todo { color: @color5; @@ -253,12 +254,10 @@ tr.inline-comments div { } .pr-version { - float: left; - margin: 0px 4px; + display: inline-block; } .pr-version-inline { - float: left; - margin: 0px 4px; + display: inline-block; } .pr-version-num { font-size: 10px; @@ -447,6 +446,13 @@ form.comment-form { } } +.comment-version-select { + margin: 0px; + border-radius: inherit; + border-color: @grey6; + height: 20px; +} + .comment-type { margin: 0px; border-radius: inherit; diff --git a/rhodecode/public/css/main.less b/rhodecode/public/css/main.less --- a/rhodecode/public/css/main.less +++ b/rhodecode/public/css/main.less @@ -97,6 +97,11 @@ input + .action-link, .action-link.first border-left: none; } +.link-disabled { + color: @grey4; + cursor: default; +} + .action-link.last{ margin-right: @padding; padding-right: @padding; diff --git a/rhodecode/public/css/select2.less b/rhodecode/public/css/select2.less --- a/rhodecode/public/css/select2.less +++ b/rhodecode/public/css/select2.less @@ -148,6 +148,38 @@ select.select2{height:28px;visibility:hi margin: 0; } + +.drop-menu-comment-history { + .drop-menu-core; + border: none; + padding: 0 6px 0 0; + width: auto; + min-width: 0; + margin: 0; + position: relative; + display: inline-block; + line-height: 1em; + z-index: 2; + cursor: pointer; + + a { + display:block; + padding: 0; + position: relative; + + &:after { + position: absolute; + content: "\00A0\25BE"; + right: -0.80em; + line-height: 1em; + top: -0.20em; + width: 1em; + font-size: 16px; + } + } + +} + .field-sm .drop-menu { padding: 1px 0 0 0; a { diff --git a/rhodecode/public/css/tables.less b/rhodecode/public/css/tables.less --- a/rhodecode/public/css/tables.less +++ b/rhodecode/public/css/tables.less @@ -33,6 +33,12 @@ table.dataTable { .rc-user { white-space: nowrap; } + .user-perm-duplicate { + color: @grey4; + a { + color: @grey4; + } + } } .td-email { diff --git a/rhodecode/public/css/tags.less b/rhodecode/public/css/tags.less --- a/rhodecode/public/css/tags.less +++ b/rhodecode/public/css/tags.less @@ -37,6 +37,10 @@ &:hover { border-color: @grey4; } + + &.authortag { + padding: 2px; + } } .tag0 { .border ( @border-thickness-tags, @grey4 ); color:@grey4; } diff --git a/rhodecode/public/js/rhodecode/routes.js b/rhodecode/public/js/rhodecode/routes.js --- a/rhodecode/public/js/rhodecode/routes.js +++ b/rhodecode/public/js/rhodecode/routes.js @@ -185,8 +185,10 @@ function registerRCRoutes() { pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); + pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_history_id)s/history_view', ['repo_name', 'commit_id', 'comment_history_id']); pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); + pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']); pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); @@ -242,6 +244,7 @@ function registerRCRoutes() { pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); + pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']); pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); diff --git a/rhodecode/public/js/src/components/rhodecode-toast/rhodecode-toast.less b/rhodecode/public/js/src/components/rhodecode-toast/rhodecode-toast.less --- a/rhodecode/public/js/src/components/rhodecode-toast/rhodecode-toast.less +++ b/rhodecode/public/js/src/components/rhodecode-toast/rhodecode-toast.less @@ -9,6 +9,7 @@ margin: 0; float: right; cursor: pointer; + padding: 8px 0 8px 8px; } .toast-message-holder{ diff --git a/rhodecode/public/js/src/plugins/jquery.scrollstop.js b/rhodecode/public/js/src/plugins/jquery.scrollstop.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/src/plugins/jquery.scrollstop.js @@ -0,0 +1,91 @@ +// jQuery Scrollstop Plugin v1.2.0 +// https://github.com/ssorallen/jquery-scrollstop + +(function (factory) { + // UMD[2] wrapper for jQuery plugins to work in AMD or in CommonJS. + // + // [2] https://github.com/umdjs/umd + + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(['jquery'], factory); + } else if (typeof exports === 'object') { + // Node/CommonJS + module.exports = factory(require('jquery')); + } else { + // Browser globals + factory(jQuery); + } +}(function ($) { + // $.event.dispatch was undocumented and was deprecated in jQuery 1.7[1]. It + // was replaced by $.event.handle in jQuery 1.9. + // + // Use the first of the available functions to support jQuery <1.8. + // + // [1] https://github.com/jquery/jquery-migrate/blob/master/src/event.js#L25 + var dispatch = $.event.dispatch || $.event.handle; + + var special = $.event.special, + uid1 = 'D' + (+new Date()), + uid2 = 'D' + (+new Date() + 1); + + special.scrollstart = { + setup: function(data) { + var _data = $.extend({ + latency: special.scrollstop.latency + }, data); + + var timer, + handler = function(evt) { + var _self = this, + _args = arguments; + + if (timer) { + clearTimeout(timer); + } else { + evt.type = 'scrollstart'; + dispatch.apply(_self, _args); + } + + timer = setTimeout(function() { + timer = null; + }, _data.latency); + }; + + $(this).bind('scroll', handler).data(uid1, handler); + }, + teardown: function() { + $(this).unbind('scroll', $(this).data(uid1)); + } + }; + + special.scrollstop = { + latency: 250, + setup: function(data) { + var _data = $.extend({ + latency: special.scrollstop.latency + }, data); + + var timer, + handler = function(evt) { + var _self = this, + _args = arguments; + + if (timer) { + clearTimeout(timer); + } + + timer = setTimeout(function() { + timer = null; + evt.type = 'scrollstop'; + dispatch.apply(_self, _args); + }, _data.latency); + }; + + $(this).bind('scroll', handler).data(uid2, handler); + }, + teardown: function() { + $(this).unbind('scroll', $(this).data(uid2)); + } + }; +})); diff --git a/rhodecode/public/js/src/plugins/jquery.within-viewport.js b/rhodecode/public/js/src/plugins/jquery.within-viewport.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/src/plugins/jquery.within-viewport.js @@ -0,0 +1,171 @@ +/** + * Within Viewport jQuery Plugin + * + * @description Companion plugin for withinviewport.js - determines whether an element is completely within the browser viewport + * @author Craig Patik, http://patik.com/ + * @version 2.1.2 + * @date 2019-08-16 + */ +(function ($) { + /** + * $.withinviewport() + * @description jQuery method + * @param {Object} [settings] optional settings + * @return {Collection} Contains all elements that were within the viewport + */ + $.fn.withinviewport = function (settings) { + var opts; + var elems; + + if (typeof settings === 'string') { + settings = { + sides: settings + }; + } + + opts = $.extend({}, settings, { + sides: 'all' + }); + elems = []; + + this.each(function () { + if (withinviewport(this, opts)) { + elems.push(this); + } + }); + + return $(elems); + }; + + // Main custom selector + $.extend($.expr[':'], { + 'within-viewport': function (element) { + return withinviewport(element, 'all'); + } + }); + + /** + * Optional enhancements and shortcuts + * + * @description Uncomment or comment these pieces as they apply to your project and coding preferences + */ + + // Shorthand jQuery methods + + $.fn.withinviewporttop = function (settings) { + var opts; + var elems; + + if (typeof settings === 'string') { + settings = { + sides: settings + }; + } + + opts = $.extend({}, settings, { + sides: 'top' + }); + elems = []; + + this.each(function () { + if (withinviewport(this, opts)) { + elems.push(this); + } + }); + + return $(elems); + }; + + $.fn.withinviewportright = function (settings) { + var opts; + var elems; + + if (typeof settings === 'string') { + settings = { + sides: settings + }; + } + + opts = $.extend({}, settings, { + sides: 'right' + }); + elems = []; + + this.each(function () { + if (withinviewport(this, opts)) { + elems.push(this); + } + }); + + return $(elems); + }; + + $.fn.withinviewportbottom = function (settings) { + var opts; + var elems; + + if (typeof settings === 'string') { + settings = { + sides: settings + }; + } + + opts = $.extend({}, settings, { + sides: 'bottom' + }); + elems = []; + + this.each(function () { + if (withinviewport(this, opts)) { + elems.push(this); + } + }); + + return $(elems); + }; + + $.fn.withinviewportleft = function (settings) { + var opts; + var elems; + + if (typeof settings === 'string') { + settings = { + sides: settings + }; + } + + opts = $.extend({}, settings, { + sides: 'left' + }); + elems = []; + + this.each(function () { + if (withinviewport(this, opts)) { + elems.push(this); + } + }); + + return $(elems); + }; + + // Custom jQuery selectors + $.extend($.expr[':'], { + 'within-viewport-top': function (element) { + return withinviewport(element, 'top'); + }, + 'within-viewport-right': function (element) { + return withinviewport(element, 'right'); + }, + 'within-viewport-bottom': function (element) { + return withinviewport(element, 'bottom'); + }, + 'within-viewport-left': function (element) { + return withinviewport(element, 'left'); + } + // Example custom selector: + //, + // 'within-viewport-top-left-45': function (element) { + // return withinviewport(element, {sides:'top left', top: 45, left: 45}); + // } + }); +}(jQuery)); \ No newline at end of file diff --git a/rhodecode/public/js/src/plugins/within_viewport.js b/rhodecode/public/js/src/plugins/within_viewport.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/src/plugins/within_viewport.js @@ -0,0 +1,235 @@ +/** + * Within Viewport + * + * @description Determines whether an element is completely within the browser viewport + * @author Craig Patik, http://patik.com/ + * @version 2.1.2 + * @date 2019-08-16 + */ +(function (root, name, factory) { + // AMD + if (typeof define === 'function' && define.amd) { + define([], factory); + } + // Node and CommonJS-like environments + else if (typeof module !== 'undefined' && typeof exports === 'object') { + module.exports = factory(); + } + // Browser global + else { + root[name] = factory(); + } +}(this, 'withinviewport', function () { + var canUseWindowDimensions = typeof window !== 'undefined' && window.innerHeight !== undefined; // IE 8 and lower fail this + + /** + * Determines whether an element is within the viewport + * @param {Object} elem DOM Element (required) + * @param {Object} options Optional settings + * @return {Boolean} Whether the element was completely within the viewport + */ + var withinviewport = function withinviewport(elem, options) { + var result = false; + var metadata = {}; + var config = {}; + var settings; + var isWithin; + var isContainerTheWindow; + var elemBoundingRect; + var containerBoundingRect; + var containerScrollTop; + var containerScrollLeft; + var scrollBarWidths = [0, 0]; + var sideNamesPattern; + var sides; + var side; + var i; + + // If invoked by the jQuery plugin, get the actual DOM element + if (typeof jQuery !== 'undefined' && elem instanceof jQuery) { + elem = elem.get(0); + } + + if (typeof elem !== 'object' || elem.nodeType !== 1) { + throw new Error('First argument must be an element'); + } + + // Look for inline settings on the element + if (elem.getAttribute('data-withinviewport-settings') && window.JSON) { + metadata = JSON.parse(elem.getAttribute('data-withinviewport-settings')); + } + + // Settings argument may be a simple string (`top`, `right`, etc) + if (typeof options === 'string') { + settings = { + sides: options + }; + } else { + settings = options || {}; + } + + // Build configuration from defaults and user-provided settings and metadata + config.container = settings.container || metadata.container || withinviewport.defaults.container || window; + config.sides = settings.sides || metadata.sides || withinviewport.defaults.sides || 'all'; + config.top = settings.top || metadata.top || withinviewport.defaults.top || 0; + config.right = settings.right || metadata.right || withinviewport.defaults.right || 0; + config.bottom = settings.bottom || metadata.bottom || withinviewport.defaults.bottom || 0; + config.left = settings.left || metadata.left || withinviewport.defaults.left || 0; + + // Extract the DOM node from a jQuery collection + if (typeof jQuery !== 'undefined' && config.container instanceof jQuery) { + config.container = config.container.get(0); + } + + // Use the window as the container if the user specified the body or a non-element + if (config.container === document.body || config.container.nodeType !== 1) { + config.container = window; + } + + isContainerTheWindow = (config.container === window); + + // Element testing methods + isWithin = { + // Element is below the top edge of the viewport + top: function _isWithin_top() { + if (isContainerTheWindow) { + return (elemBoundingRect.top >= config.top); + } else { + return (elemBoundingRect.top >= containerScrollTop - (containerScrollTop - containerBoundingRect.top) + config.top); + } + }, + + // Element is to the left of the right edge of the viewport + right: function _isWithin_right() { + // Note that `elemBoundingRect.right` is the distance from the *left* of the viewport to the element's far right edge + + if (isContainerTheWindow) { + return (elemBoundingRect.right <= (containerBoundingRect.right + containerScrollLeft) - config.right); + } else { + return (elemBoundingRect.right <= containerBoundingRect.right - scrollBarWidths[0] - config.right); + } + }, + + // Element is above the bottom edge of the viewport + bottom: function _isWithin_bottom() { + var containerHeight = 0; + + if (isContainerTheWindow) { + if (canUseWindowDimensions) { + containerHeight = config.container.innerHeight; + } else if (document && document.documentElement) { + containerHeight = document.documentElement.clientHeight; + } + } else { + containerHeight = containerBoundingRect.bottom; + } + + // Note that `elemBoundingRect.bottom` is the distance from the *top* of the viewport to the element's bottom edge + return (elemBoundingRect.bottom <= containerHeight - scrollBarWidths[1] - config.bottom); + }, + + // Element is to the right of the left edge of the viewport + left: function _isWithin_left() { + if (isContainerTheWindow) { + return (elemBoundingRect.left >= config.left); + } else { + return (elemBoundingRect.left >= containerScrollLeft - (containerScrollLeft - containerBoundingRect.left) + config.left); + } + }, + + // Element is within all four boundaries + all: function _isWithin_all() { + // Test each boundary in order of efficiency and likeliness to be false. This way we can avoid running all four functions on most elements. + // 1. Top: Quickest to calculate + most likely to be false + // 2. Bottom: Note quite as quick to calculate, but also very likely to be false + // 3-4. Left and right are both equally unlikely to be false since most sites only scroll vertically, but left is faster to calculate + return (isWithin.top() && isWithin.bottom() && isWithin.left() && isWithin.right()); + } + }; + + // Get the element's bounding rectangle with respect to the viewport + elemBoundingRect = elem.getBoundingClientRect(); + + // Get viewport dimensions and offsets + if (isContainerTheWindow) { + containerBoundingRect = document.documentElement.getBoundingClientRect(); + containerScrollTop = document.body.scrollTop; + containerScrollLeft = window.scrollX || document.body.scrollLeft; + } else { + containerBoundingRect = config.container.getBoundingClientRect(); + containerScrollTop = config.container.scrollTop; + containerScrollLeft = config.container.scrollLeft; + } + + // Don't count the space consumed by scrollbars + if (containerScrollLeft) { + scrollBarWidths[0] = 18; + } + + if (containerScrollTop) { + scrollBarWidths[1] = 16; + } + + // Test the element against each side of the viewport that was requested + sideNamesPattern = /^top$|^right$|^bottom$|^left$|^all$/; + + // Loop through all of the sides + sides = config.sides.split(' '); + i = sides.length; + + while (i--) { + side = sides[i].toLowerCase(); + + if (sideNamesPattern.test(side)) { + if (isWithin[side]()) { + result = true; + } else { + result = false; + + // Quit as soon as the first failure is found + break; + } + } + } + + return result; + }; + + // Default settings + withinviewport.prototype.defaults = { + container: typeof document !== 'undefined' ? document.body : {}, + sides: 'all', + top: 0, + right: 0, + bottom: 0, + left: 0 + }; + + withinviewport.defaults = withinviewport.prototype.defaults; + + /** + * Optional enhancements and shortcuts + * + * @description Uncomment or comment these pieces as they apply to your project and coding preferences + */ + + // Shortcut methods for each side of the viewport + // Example: `withinviewport.top(elem)` is the same as `withinviewport(elem, 'top')` + withinviewport.prototype.top = function _withinviewport_top(element) { + return withinviewport(element, 'top'); + }; + + withinviewport.prototype.right = function _withinviewport_right(element) { + return withinviewport(element, 'right'); + }; + + withinviewport.prototype.bottom = function _withinviewport_bottom(element) { + return withinviewport(element, 'bottom'); + }; + + withinviewport.prototype.left = function _withinviewport_left(element) { + return withinviewport(element, 'left'); + }; + + return withinviewport; +})); \ No newline at end of file diff --git a/rhodecode/public/js/src/rhodecode/comments.js b/rhodecode/public/js/src/rhodecode/comments.js --- a/rhodecode/public/js/src/rhodecode/comments.js +++ b/rhodecode/public/js/src/rhodecode/comments.js @@ -80,9 +80,10 @@ var _submitAjaxPOST = function(url, post })(function() { "use strict"; - function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId) { + function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id) { + if (!(this instanceof CommentForm)) { - return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId); + return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id); } // bind the element instance to our Form @@ -126,10 +127,20 @@ var _submitAjaxPOST = function(url, post this.submitButton = $(this.submitForm).find('input[type="submit"]'); this.submitButtonText = this.submitButton.val(); + this.previewUrl = pyroutes.url('repo_commit_comment_preview', {'repo_name': templateContext.repo_name, 'commit_id': templateContext.commit_data.commit_id}); + if (edit){ + this.submitButtonText = _gettext('Updated Comment'); + $(this.commentType).prop('disabled', true); + $(this.commentType).addClass('disabled'); + var editInfo = + ''; + $(editInfo).insertBefore($(this.editButton).parent()); + } + if (resolvesCommentId){ this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId); this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId); @@ -153,17 +164,27 @@ var _submitAjaxPOST = function(url, post // based on commitId, or pullRequestId decide where do we submit // out data if (this.commitId){ - this.submitUrl = pyroutes.url('repo_commit_comment_create', + var pyurl = 'repo_commit_comment_create'; + if(edit){ + pyurl = 'repo_commit_comment_edit'; + } + this.submitUrl = pyroutes.url(pyurl, {'repo_name': templateContext.repo_name, - 'commit_id': this.commitId}); + 'commit_id': this.commitId, + 'comment_id': comment_id}); this.selfUrl = pyroutes.url('repo_commit', {'repo_name': templateContext.repo_name, 'commit_id': this.commitId}); } else if (this.pullRequestId) { - this.submitUrl = pyroutes.url('pullrequest_comment_create', + var pyurl = 'pullrequest_comment_create'; + if(edit){ + pyurl = 'pullrequest_comment_edit'; + } + this.submitUrl = pyroutes.url(pyurl, {'repo_name': templateContext.repo_name, - 'pull_request_id': this.pullRequestId}); + 'pull_request_id': this.pullRequestId, + 'comment_id': comment_id}); this.selfUrl = pyroutes.url('pullrequest_show', {'repo_name': templateContext.repo_name, 'pull_request_id': this.pullRequestId}); @@ -277,7 +298,7 @@ var _submitAjaxPOST = function(url, post this.globalSubmitSuccessCallback = function(){ // default behaviour is to call GLOBAL hook, if it's registered. if (window.commentFormGlobalSubmitSuccessCallback !== undefined){ - commentFormGlobalSubmitSuccessCallback() + commentFormGlobalSubmitSuccessCallback(); } }; @@ -475,18 +496,97 @@ var _submitAjaxPOST = function(url, post return CommentForm; }); +/* selector for comment versions */ +var initVersionSelector = function(selector, initialData) { + + var formatResult = function(result, container, query, escapeMarkup) { + + return renderTemplate('commentVersion', { + show_disabled: true, + version: result.comment_version, + user_name: result.comment_author_username, + gravatar_url: result.comment_author_gravatar, + size: 16, + timeago_component: result.comment_created_on, + }) + }; + + $(selector).select2({ + placeholder: "Edited", + containerCssClass: "drop-menu-comment-history", + dropdownCssClass: "drop-menu-dropdown", + dropdownAutoWidth: true, + minimumResultsForSearch: -1, + data: initialData, + formatResult: formatResult, + }); + + $(selector).on('select2-selecting', function (e) { + // hide the mast as we later do preventDefault() + $("#select2-drop-mask").click(); + e.preventDefault(); + e.choice.action(); + }); + + $(selector).on("select2-open", function() { + timeagoActivate(); + }); +}; + /* comments controller */ var CommentsController = function() { var mainComment = '#text'; var self = this; - this.cancelComment = function(node) { + this.cancelComment = function (node) { var $node = $(node); - var $td = $node.closest('td'); + var edit = $(this).attr('edit'); + if (edit) { + var $general_comments = null; + var $inline_comments = $node.closest('div.inline-comments'); + if (!$inline_comments.length) { + $general_comments = $('#comments'); + var $comment = $general_comments.parent().find('div.comment:hidden'); + // show hidden general comment form + $('#cb-comment-general-form-placeholder').show(); + } else { + var $comment = $inline_comments.find('div.comment:hidden'); + } + $comment.show(); + } $node.closest('.comment-inline-form').remove(); return false; }; + this.showVersion = function (comment_id, comment_history_id) { + + var historyViewUrl = pyroutes.url( + 'repo_commit_comment_history_view', + { + 'repo_name': templateContext.repo_name, + 'commit_id': comment_id, + 'comment_history_id': comment_history_id, + } + ); + successRenderCommit = function (data) { + SwalNoAnimation.fire({ + html: data, + title: '', + }); + }; + failRenderCommit = function () { + SwalNoAnimation.fire({ + html: 'Error while loading comment history', + title: '', + }); + }; + _submitAjaxPOST( + historyViewUrl, {'csrf_token': CSRF_TOKEN}, + successRenderCommit, + failRenderCommit + ); + }; + this.getLineNumber = function(node) { var $node = $(node); var lineNo = $node.closest('td').attr('data-line-no'); @@ -638,12 +738,12 @@ var CommentsController = function() { $node.closest('tr').toggleClass('hide-line-comments'); }; - this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId){ + this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId, edit, comment_id){ var pullRequestId = templateContext.pull_request_data.pull_request_id; var commitId = templateContext.commit_data.commit_id; var commentForm = new CommentForm( - formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId); + formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId, edit, comment_id); var cm = commentForm.getCmInstance(); if (resolvesCommentId){ @@ -780,18 +880,234 @@ var CommentsController = function() { var _form = $($form[0]); var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo']; + var edit = false; + var comment_id = null; var commentForm = this.createCommentForm( - _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId); + _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId, edit, comment_id); commentForm.initStatusChangeSelector(); return commentForm; }; + this.editComment = function(node) { + var $node = $(node); + var $comment = $(node).closest('.comment'); + var comment_id = $comment.attr('data-comment-id'); + var $form = null + + var $comments = $node.closest('div.inline-comments'); + var $general_comments = null; + var lineno = null; + + if($comments.length){ + // inline comments setup + $form = $comments.find('.comment-inline-form'); + lineno = self.getLineNumber(node) + } + else{ + // general comments setup + $comments = $('#comments'); + $form = $comments.find('.comment-inline-form'); + lineno = $comment[0].id + $('#cb-comment-general-form-placeholder').hide(); + } + + this.edit = true; + + if (!$form.length) { + + var $filediff = $node.closest('.filediff'); + $filediff.removeClass('hide-comments'); + var f_path = $filediff.attr('data-f-path'); + + // create a new HTML from template + + var tmpl = $('#cb-comment-inline-form-template').html(); + tmpl = tmpl.format(escapeHtml(f_path), lineno); + $form = $(tmpl); + $comment.after($form) + + var _form = $($form[0]).find('form'); + var autocompleteActions = ['as_note',]; + var commentForm = this.createCommentForm( + _form, lineno, '', autocompleteActions, resolvesCommentId, + this.edit, comment_id); + var old_comment_text_binary = $comment.attr('data-comment-text'); + var old_comment_text = b64DecodeUnicode(old_comment_text_binary); + commentForm.cm.setValue(old_comment_text); + $comment.hide(); + + $.Topic('/ui/plugins/code/comment_form_built').prepareOrPublish({ + form: _form, + parent: $comments, + lineno: lineno, + f_path: f_path} + ); + + // set a CUSTOM submit handler for inline comments. + commentForm.setHandleFormSubmit(function(o) { + var text = commentForm.cm.getValue(); + var commentType = commentForm.getCommentType(); + + if (text === "") { + return; + } + + if (old_comment_text == text) { + SwalNoAnimation.fire({ + title: 'Unable to edit comment', + html: _gettext('Comment body was not changed.'), + }); + return; + } + var excludeCancelBtn = false; + var submitEvent = true; + commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent); + commentForm.cm.setOption("readOnly", true); + + // Read last version known + var versionSelector = $('#comment_versions_{0}'.format(comment_id)); + var version = versionSelector.data('lastVersion'); + + if (!version) { + version = 0; + } + + var postData = { + 'text': text, + 'f_path': f_path, + 'line': lineno, + 'comment_type': commentType, + 'version': version, + 'csrf_token': CSRF_TOKEN + }; + + var submitSuccessCallback = function(json_data) { + $form.remove(); + $comment.show(); + var postData = { + 'text': text, + 'renderer': $comment.attr('data-comment-renderer'), + 'csrf_token': CSRF_TOKEN + }; + + /* Inject new edited version selector */ + var updateCommentVersionDropDown = function () { + var versionSelectId = '#comment_versions_'+comment_id; + var preLoadVersionData = [ + { + id: json_data['comment_version'], + text: "v{0}".format(json_data['comment_version']), + action: function () { + Rhodecode.comments.showVersion( + json_data['comment_id'], + json_data['comment_history_id'] + ) + }, + comment_version: json_data['comment_version'], + comment_author_username: json_data['comment_author_username'], + comment_author_gravatar: json_data['comment_author_gravatar'], + comment_created_on: json_data['comment_created_on'], + }, + ] + + + if ($(versionSelectId).data('select2')) { + var oldData = $(versionSelectId).data('select2').opts.data.results; + $(versionSelectId).select2("destroy"); + preLoadVersionData = oldData.concat(preLoadVersionData) + } + + initVersionSelector(versionSelectId, {results: preLoadVersionData}); + + $comment.attr('data-comment-text', utf8ToB64(text)); + + var versionSelector = $('#comment_versions_'+comment_id); + + // set lastVersion so we know our last edit version + versionSelector.data('lastVersion', json_data['comment_version']) + versionSelector.parent().show(); + } + updateCommentVersionDropDown(); + + // by default we reset state of comment preserving the text + var failRenderCommit = function(jqXHR, textStatus, errorThrown) { + var prefix = "Error while editing this comment.\n" + var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix); + ajaxErrorSwal(message); + }; + + var successRenderCommit = function(o){ + $comment.show(); + $comment[0].lastElementChild.innerHTML = o; + }; + + var previewUrl = pyroutes.url( + 'repo_commit_comment_preview', + {'repo_name': templateContext.repo_name, + 'commit_id': templateContext.commit_data.commit_id}); + + _submitAjaxPOST( + previewUrl, postData, successRenderCommit, + failRenderCommit + ); + + try { + var html = json_data.rendered_text; + var lineno = json_data.line_no; + var target_id = json_data.target_id; + + $comments.find('.cb-comment-add-button').before(html); + + // run global callback on submit + commentForm.globalSubmitSuccessCallback(); + + } catch (e) { + console.error(e); + } + + // re trigger the linkification of next/prev navigation + linkifyComments($('.inline-comment-injected')); + timeagoActivate(); + tooltipActivate(); + + if (window.updateSticky !== undefined) { + // potentially our comments change the active window size, so we + // notify sticky elements + updateSticky() + } + + commentForm.setActionButtonsDisabled(false); + + }; + var submitFailCallback = function(jqXHR, textStatus, errorThrown) { + var prefix = "Error while editing comment.\n" + var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix); + if (jqXHR.status == 409){ + message = 'This comment was probably changed somewhere else. Please reload the content of this comment.' + ajaxErrorSwal(message, 'Comment version mismatch.'); + } else { + ajaxErrorSwal(message); + } + + commentForm.resetCommentFormState(text) + }; + commentForm.submitAjaxPOST( + commentForm.submitUrl, postData, + submitSuccessCallback, + submitFailCallback); + }); + } + + $form.addClass('comment-inline-form-open'); + }; + this.createComment = function(node, resolutionComment) { var resolvesCommentId = resolutionComment || null; var $node = $(node); var $td = $node.closest('td'); var $form = $td.find('.comment-inline-form'); + this.edit = false; if (!$form.length) { @@ -816,8 +1132,9 @@ var CommentsController = function() { var placeholderText = _gettext('Leave a comment on line {0}.').format(lineno); var _form = $($form[0]).find('form'); var autocompleteActions = ['as_note', 'as_todo']; + var comment_id=null; var commentForm = this.createCommentForm( - _form, lineno, placeholderText, autocompleteActions, resolvesCommentId); + _form, lineno, placeholderText, autocompleteActions, resolvesCommentId, this.edit, comment_id); $.Topic('/ui/plugins/code/comment_form_built').prepareOrPublish({ form: _form, diff --git a/rhodecode/public/js/src/rhodecode/pullrequests.js b/rhodecode/public/js/src/rhodecode/pullrequests.js --- a/rhodecode/public/js/src/rhodecode/pullrequests.js +++ b/rhodecode/public/js/src/rhodecode/pullrequests.js @@ -70,7 +70,7 @@ replacing '-' and '_' into spaces * @param limit * @returns {*[]} */ -var getTitleAndDescription = function(sourceRef, elements, limit) { +var getTitleAndDescription = function(sourceRefType, sourceRef, elements, limit) { var title = ''; var desc = ''; @@ -85,7 +85,9 @@ var getTitleAndDescription = function(so } else { // use reference name - title = sourceRef.replace(/-/g, ' ').replace(/_/g, ' ').capitalizeFirstLetter(); + var normalizedRef = sourceRef.replace(/-/g, ' ').replace(/_/g, ' ').capitalizeFirstLetter() + var refType = sourceRefType; + title = 'Changes from {0}: {1}'.format(refType, normalizedRef); } return [title, desc] diff --git a/rhodecode/public/js/src/rhodecode/utils/ajax.js b/rhodecode/public/js/src/rhodecode/utils/ajax.js --- a/rhodecode/public/js/src/rhodecode/utils/ajax.js +++ b/rhodecode/public/js/src/rhodecode/utils/ajax.js @@ -130,10 +130,13 @@ function formatErrorMessage(jqXHR, textS } } -function ajaxErrorSwal(message) { +function ajaxErrorSwal(message, title) { + + var title = (typeof title !== 'undefined') ? title : _gettext('Ajax Request Error'); + SwalNoAnimation.fire({ icon: 'error', - title: _gettext('Ajax Request Error'), + title: title, html: '{0}'.format(message), showClass: { popup: 'swal2-noanimation', diff --git a/rhodecode/public/js/src/rhodecode/utils/string.js b/rhodecode/public/js/src/rhodecode/utils/string.js --- a/rhodecode/public/js/src/rhodecode/utils/string.js +++ b/rhodecode/public/js/src/rhodecode/utils/string.js @@ -182,3 +182,13 @@ var htmlEnDeCode = (function() { htmlDecode: htmlDecode }; })(); + +function b64DecodeUnicode(str) { + return decodeURIComponent(atob(str).split('').map(function (c) { + return '%' + ('00' + c.charCodeAt(0).toString(16)).slice(-2); + }).join('')); +} + +function utf8ToB64( str ) { + return window.btoa(unescape(encodeURIComponent( str ))); +} \ No newline at end of file diff --git a/rhodecode/templates/admin/my_account/my_account_auth_tokens.mako b/rhodecode/templates/admin/my_account/my_account_auth_tokens.mako --- a/rhodecode/templates/admin/my_account/my_account_auth_tokens.mako +++ b/rhodecode/templates/admin/my_account/my_account_auth_tokens.mako @@ -11,9 +11,15 @@

- ${_('Authentication tokens can be used to interact with the API, or VCS-over-http. ' - 'Each token can have a role. Token with a role can be used only in given context, ' - 'e.g. VCS tokens can be used together with the authtoken auth plugin for git/hg/svn operations only.')} + ${_('Available roles')}: +

    + % for role in h.UserApiKeys.ROLES: +
  • + ${h.UserApiKeys._get_role_name(role)} + ${h.UserApiKeys._get_role_description(role) |n} +
  • + % endfor +

@@ -36,7 +42,7 @@ @@ -116,10 +121,15 @@ - ${_('permission for other logged in users')} % endif % else: - ${h.link_to_user(_user.username)} - %if getattr(_user, 'duplicate_perm', None): - (${_('inactive duplicate')}) - %endif + % if getattr(_user, 'duplicate_perm', None): + + ${h.link_to_user(_user.username)} + (${_('inactive duplicate')}) + + + % else: + ${h.link_to_user(_user.username)} + % endif % endif (${_('delegated admin')}) diff --git a/rhodecode/templates/admin/repos/repo_add_base.mako b/rhodecode/templates/admin/repos/repo_add_base.mako --- a/rhodecode/templates/admin/repos/repo_add_base.mako +++ b/rhodecode/templates/admin/repos/repo_add_base.mako @@ -46,7 +46,7 @@ ${h.select('repo_group',request.GET.get('parent_group'),c.repo_groups,class_="medium")} % if c.personal_repo_group: - ${_('Select my personal group (%(repo_group_name)s)') % {'repo_group_name': c.personal_repo_group.group_name}} + ${_('Select my personal group ({})').format(c.personal_repo_group.group_name)} % endif ${_('Optionally select a group to put this repository into.')} diff --git a/rhodecode/templates/admin/repos/repo_edit_advanced.mako b/rhodecode/templates/admin/repos/repo_edit_advanced.mako --- a/rhodecode/templates/admin/repos/repo_edit_advanced.mako +++ b/rhodecode/templates/admin/repos/repo_edit_advanced.mako @@ -167,11 +167,16 @@
+ % if c.rhodecode_db_repo.archived: + This repository is already archived. Only super-admin users can un-archive this repository. + % else: + % endif +
diff --git a/rhodecode/templates/admin/repos/repo_edit_permissions.mako b/rhodecode/templates/admin/repos/repo_edit_permissions.mako --- a/rhodecode/templates/admin/repos/repo_edit_permissions.mako +++ b/rhodecode/templates/admin/repos/repo_edit_permissions.mako @@ -94,10 +94,16 @@ - ${_('permission for other logged in users')} % endif % else: - ${h.link_to_user(_user.username)} - %if getattr(_user, 'duplicate_perm', None): - (${_('inactive duplicate')}) - %endif + % if getattr(_user, 'duplicate_perm', None): + + ${h.link_to_user(_user.username)} + (${_('inactive duplicate')}) + + + % else: + ${h.link_to_user(_user.username)} + % endif + %if getattr(_user, 'branch_rules', None): % if used_by_n_rules == 1: (${_('used by {} branch rule, requires write+ permissions').format(used_by_n_rules)}) diff --git a/rhodecode/templates/admin/user_groups/user_group_edit_perms.mako b/rhodecode/templates/admin/user_groups/user_group_edit_perms.mako --- a/rhodecode/templates/admin/user_groups/user_group_edit_perms.mako +++ b/rhodecode/templates/admin/user_groups/user_group_edit_perms.mako @@ -74,10 +74,15 @@ - ${_('permission for other logged in users')} % endif % else: - ${h.link_to_user(_user.username)} - %if getattr(_user, 'duplicate_perm', None): - (${_('inactive duplicate')}) - %endif + % if getattr(_user, 'duplicate_perm', None): + + ${h.link_to_user(_user.username)} + (${_('inactive duplicate')}) + + + % else: + ${h.link_to_user(_user.username)} + % endif % endif @@ -122,10 +127,15 @@ - ${_('permission for other logged in users')} % endif % else: - ${h.link_to_user(_user.username)} - %if getattr(_user, 'duplicate_perm', None): - (${_('inactive duplicate')}) - %endif + % if getattr(_user, 'duplicate_perm', None): + + ${h.link_to_user(_user.username)} + (${_('inactive duplicate')}) + + + % else: + ${h.link_to_user(_user.username)} + % endif % endif (${_('delegated admin')}) diff --git a/rhodecode/templates/admin/users/user_edit.mako b/rhodecode/templates/admin/users/user_edit.mako --- a/rhodecode/templates/admin/users/user_edit.mako +++ b/rhodecode/templates/admin/users/user_edit.mako @@ -27,8 +27,8 @@ <%def name="main()">
${auth_token.description} - ${auth_token.role_humanized} + ${auth_token.role_humanized} ${auth_token.scope_humanized} diff --git a/rhodecode/templates/admin/repo_groups/repo_group_add.mako b/rhodecode/templates/admin/repo_groups/repo_group_add.mako --- a/rhodecode/templates/admin/repo_groups/repo_group_add.mako +++ b/rhodecode/templates/admin/repo_groups/repo_group_add.mako @@ -44,7 +44,12 @@
- ${h.select('group_parent_id',request.GET.get('parent_group'),c.repo_groups,class_="medium")} + ${h.select('group_parent_id', request.GET.get('parent_group'),c.repo_groups,class_="medium")} + % if c.personal_repo_group: + + ${_('Select my personal group ({})').format(c.personal_repo_group.group_name)} + + % endif
@@ -106,6 +111,12 @@ setCopyPermsOption(e.val) }); $('#group_name').focus(); + + $('#select_my_group').on('click', function(e){ + e.preventDefault(); + $("#group_parent_id").val($(this).data('personalGroupId')).trigger("change"); + }) + }) diff --git a/rhodecode/templates/admin/repo_groups/repo_group_edit_permissions.mako b/rhodecode/templates/admin/repo_groups/repo_group_edit_permissions.mako --- a/rhodecode/templates/admin/repo_groups/repo_group_edit_permissions.mako +++ b/rhodecode/templates/admin/repo_groups/repo_group_edit_permissions.mako @@ -68,10 +68,15 @@ - ${_('permission for other logged in users')} % endif % else: - ${h.link_to_user(_user.username)} - %if getattr(_user, 'duplicate_perm', None): - (${_('inactive duplicate')}) - %endif + % if getattr(_user, 'duplicate_perm', None): + + ${h.link_to_user(_user.username)} + (${_('inactive duplicate')}) + + + % else: + ${h.link_to_user(_user.username)} + % endif % endif
@@ -41,7 +47,7 @@ + ${render_hunk_lines(filediff, c.user_session_attrs["diffmode"], hunk, use_comments=use_comments, inline_comments=inline_comments, active_pattern_entries=active_pattern_entries)} % endfor @@ -654,21 +658,28 @@ def get_comments_for(diff_type, comments %> <%def name="render_hunk_lines_sideside(filediff, hunk, use_comments=False, inline_comments=None, active_pattern_entries=None)"> - %for i, line in enumerate(hunk.sideside): + + <% chunk_count = 1 %> + %for loop_obj, item in h.looper(hunk.sideside): <% + line = item + i = loop_obj.index + prev_line = loop_obj.previous old_line_anchor, new_line_anchor = None, None if line.original.lineno: old_line_anchor = diff_line_anchor(filediff.raw_id, hunk.source_file_path, line.original.lineno, 'o') if line.modified.lineno: new_line_anchor = diff_line_anchor(filediff.raw_id, hunk.target_file_path, line.modified.lineno, 'n') + + line_action = line.modified.action or line.original.action + prev_line_action = prev_line and (prev_line.modified.action or prev_line.original.action) %> %endfor @@ -776,9 +793,9 @@ def get_comments_for(diff_type, comments % if comments: <% has_outdated = any([x.outdated for x in comments]) %> % if has_outdated: - + % else: - + % endif % endif @@ -838,7 +855,7 @@ def get_comments_for(diff_type, comments -<%def name="render_diffset_menu(diffset, range_diff_on=None)"> +<%def name="render_diffset_menu(diffset, range_diff_on=None, commit=None, pull_request_menu=None)"> <% diffset_container_id = h.md5(diffset.target_ref) %>
@@ -899,12 +916,33 @@ def get_comments_for(diff_type, comments
-
+
Context file:
+
+ + %if commit: + + ${h.show_id(commit)} + + %elif pull_request_menu and pull_request_menu.get('pull_request'): + + !${pull_request_menu['pull_request'].pull_request_id} + + %endif + % if commit or pull_request_menu: + Loading diff...: + + + + + + + % endif +
@@ -1027,10 +1065,86 @@ def get_comments_for(diff_type, comments e.preventDefault(); }); + diffNavText = 'diff navigation:' + + getCurrentChunk = function () { + + var chunksAll = $('.nav-chunk').filter(function () { + return $(this).parents('.filediff').prev().get(0).checked !== true + }) + var chunkSelected = $('.nav-chunk.selected'); + var initial = false; + + if (chunkSelected.length === 0) { + // no initial chunk selected, we pick first + chunkSelected = $(chunksAll.get(0)); + var initial = true; + } + + return { + 'all': chunksAll, + 'selected': chunkSelected, + 'initial': initial, + } + } + + animateDiffNavText = function () { + var $diffNav = $('#diff_nav') + + var callback = function () { + $diffNav.animate({'opacity': 1.00}, 200) + }; + $diffNav.animate({'opacity': 0.15}, 200, callback); + } + + scrollToChunk = function (moveBy) { + var chunk = getCurrentChunk(); + var all = chunk.all + var selected = chunk.selected + + var curPos = all.index(selected); + var newPos = curPos; + if (!chunk.initial) { + var newPos = curPos + moveBy; + } + + var curElem = all.get(newPos); + + if (curElem === undefined) { + // end or back + $('#diff_nav').html('no next diff element:') + animateDiffNavText() + return + } else if (newPos < 0) { + $('#diff_nav').html('no previous diff element:') + animateDiffNavText() + return + } else { + $('#diff_nav').html(diffNavText) + } + + curElem = $(curElem) + var offset = 100; + $(window).scrollTop(curElem.position().top - offset); + + //clear selection + all.removeClass('selected') + curElem.addClass('selected') + } + + scrollToPrevChunk = function () { + scrollToChunk(-1) + } + scrollToNextChunk = function () { + scrollToChunk(1) + } + % endif diff --git a/rhodecode/templates/debug_style/code-block.html b/rhodecode/templates/debug_style/code-block.html --- a/rhodecode/templates/debug_style/code-block.html +++ b/rhodecode/templates/debug_style/code-block.html @@ -940,7 +940,7 @@ with multiple lines

Commenting on line o80.
- Comments parsed using RST syntax with @mention support. + Comments parsed using RST syntax with @mention support.
diff --git a/rhodecode/templates/debug_style/collapsable-content.html b/rhodecode/templates/debug_style/collapsable-content.html --- a/rhodecode/templates/debug_style/collapsable-content.html +++ b/rhodecode/templates/debug_style/collapsable-content.html @@ -735,7 +735,7 @@ Commenting on line {1}.
- Comments parsed using RST syntax with @mention support. + Comments parsed using RST syntax with @mention support.
@@ -786,7 +786,7 @@ Create a comment on this Pull Request.
- Comments parsed using RST syntax with @mention support. + Comments parsed using RST syntax with @mention support.
diff --git a/rhodecode/templates/debug_style/email.mako b/rhodecode/templates/debug_style/email.mako --- a/rhodecode/templates/debug_style/email.mako +++ b/rhodecode/templates/debug_style/email.mako @@ -8,11 +8,6 @@ SUBJECT:
${c.subject}
-HEADERS: -
-${c.headers}
-
- PLAINTEXT:
 ${c.email_body_plaintext|n}
diff --git a/rhodecode/templates/ejs_templates/templates.html b/rhodecode/templates/ejs_templates/templates.html
--- a/rhodecode/templates/ejs_templates/templates.html
+++ b/rhodecode/templates/ejs_templates/templates.html
@@ -130,6 +130,34 @@ var CG = new ColorGenerator();
 
 
 
+
+
+
 
 
 
${auth_token.description} - ${auth_token.role_humanized} + ${auth_token.role_humanized} ${auth_token.scope_humanized} diff --git a/rhodecode/templates/base/base.mako b/rhodecode/templates/base/base.mako --- a/rhodecode/templates/base/base.mako +++ b/rhodecode/templates/base/base.mako @@ -1,11 +1,7 @@ ## -*- coding: utf-8 -*- <%! - ## base64 filter e.g ${ example | base64 } - def base64(text): - import base64 - from rhodecode.lib.helpers import safe_str - return base64.encodestring(safe_str(text)) + from rhodecode.lib import html_filters %> <%inherit file="root.mako"/> @@ -247,7 +243,9 @@
${self.gravatar(email, size, tooltip=tooltip, tooltip_alt=contact, user=rc_user)} - ${h.link_to_user(rc_user or contact)} + + ${h.link_to_user(rc_user or contact)} +
@@ -396,7 +394,7 @@ - %if h.HasRepoPermissionAll('repository.admin')(c.repo_name): + %if not c.rhodecode_db_repo.archived and h.HasRepoPermissionAll('repository.admin')(c.repo_name):
  • %endif @@ -510,7 +508,7 @@ ## create action
  • - +
  • -
    <% line_old_comments = None %> %if line.original.get_comment_args: @@ -677,12 +688,11 @@ def get_comments_for(diff_type, comments %if line_old_comments: <% has_outdated = any([x.outdated for x in line_old_comments]) %> % if has_outdated: - + % else: - + % endif %endif -
    %endif %if line_new_comments: + <% has_outdated = any([x.outdated for x in line_new_comments]) %> % if has_outdated: - + % else: - + % endif %endif @@ -747,6 +758,12 @@ def get_comments_for(diff_type, comments %if use_comments and line.modified.lineno and line_new_comments: ${inline_comments_container(line_new_comments, active_pattern_entries=active_pattern_entries)} %endif + % if line_action in ['+', '-'] and prev_line_action not in ['+', '-']: + + <% chunk_count +=1 %> + % endif