Show More
@@ -0,0 +1,53 b'' | |||||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
|
2 | # Copyright (C) 2014-2023 RhodeCode GmbH | |||
|
3 | # | |||
|
4 | # This program is free software; you can redistribute it and/or modify | |||
|
5 | # it under the terms of the GNU General Public License as published by | |||
|
6 | # the Free Software Foundation; either version 3 of the License, or | |||
|
7 | # (at your option) any later version. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU General Public License | |||
|
15 | # along with this program; if not, write to the Free Software Foundation, | |||
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
|
17 | ||||
|
18 | import hashlib | |||
|
19 | from vcsserver.lib.str_utils import safe_bytes, safe_str | |||
|
20 | ||||
|
21 | ||||
|
22 | def md5(s): | |||
|
23 | return hashlib.md5(s).hexdigest() | |||
|
24 | ||||
|
25 | ||||
|
26 | def md5_safe(s, return_type=''): | |||
|
27 | ||||
|
28 | val = md5(safe_bytes(s)) | |||
|
29 | if return_type == 'str': | |||
|
30 | val = safe_str(val) | |||
|
31 | return val | |||
|
32 | ||||
|
33 | ||||
|
34 | def sha1(s): | |||
|
35 | return hashlib.sha1(s).hexdigest() | |||
|
36 | ||||
|
37 | ||||
|
38 | def sha1_safe(s, return_type=''): | |||
|
39 | val = sha1(safe_bytes(s)) | |||
|
40 | if return_type == 'str': | |||
|
41 | val = safe_str(val) | |||
|
42 | return val | |||
|
43 | ||||
|
44 | ||||
|
45 | def sha256(s): | |||
|
46 | return hashlib.sha256(s).hexdigest() | |||
|
47 | ||||
|
48 | ||||
|
49 | def sha256_safe(s, return_type=''): | |||
|
50 | val = sha256(safe_bytes(s)) | |||
|
51 | if return_type == 'str': | |||
|
52 | val = safe_str(val) | |||
|
53 | return val |
@@ -1,12 +1,49 b'' | |||||
|
1 | .DEFAULT_GOAL := help | |||
|
2 | ||||
|
3 | # Pretty print values cf. https://misc.flogisoft.com/bash/tip_colors_and_formatting | |||
|
4 | RESET := \033[0m # Reset all formatting | |||
|
5 | GREEN := \033[0;32m # Resets before setting 16b colour (32 -- green) | |||
|
6 | YELLOW := \033[0;33m | |||
|
7 | ORANGE := \033[0;38;5;208m # Reset then set 256b colour (208 -- orange) | |||
|
8 | PEACH := \033[0;38;5;216m | |||
|
9 | ||||
|
10 | ||||
|
11 | ## ---------------------------------------------------------------------------------- ## | |||
|
12 | ## ------------------------- Help usage builder ------------------------------------- ## | |||
|
13 | ## ---------------------------------------------------------------------------------- ## | |||
|
14 | # use '# >>> Build commands' to create section | |||
|
15 | # use '# target: target description' to create help for target | |||
|
16 | .PHONY: help | |||
|
17 | help: | |||
|
18 | @echo "Usage:" | |||
|
19 | @cat $(MAKEFILE_LIST) | grep -E '^# >>>|^# [A-Za-z0-9_.-]+:' | sed -E 's/^# //' | awk ' \ | |||
|
20 | BEGIN { \ | |||
|
21 | green="\033[32m"; \ | |||
|
22 | yellow="\033[33m"; \ | |||
|
23 | reset="\033[0m"; \ | |||
|
24 | section=""; \ | |||
|
25 | } \ | |||
|
26 | /^>>>/ { \ | |||
|
27 | section=substr($$0, 5); \ | |||
|
28 | printf "\n" green ">>> %s" reset "\n", section; \ | |||
|
29 | next; \ | |||
|
30 | } \ | |||
|
31 | /^([A-Za-z0-9_.-]+):/ { \ | |||
|
32 | target=$$1; \ | |||
|
33 | gsub(/:$$/, "", target); \ | |||
|
34 | description=substr($$0, index($$0, ":") + 2); \ | |||
|
35 | if (description == "") { description="-"; } \ | |||
|
36 | printf " - " yellow "%-35s" reset " %s\n", target, description; \ | |||
|
37 | } \ | |||
|
38 | ' | |||
|
39 | ||||
1 | # required for pushd to work.. |
|
40 | # required for pushd to work.. | |
2 | SHELL = /bin/bash |
|
41 | SHELL = /bin/bash | |
3 |
|
42 | |||
4 |
|
43 | # >>> Tests commands | ||
5 | # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py |
|
|||
6 | OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES} |
|
|||
7 |
|
44 | |||
8 | .PHONY: clean |
|
45 | .PHONY: clean | |
9 |
# |
|
46 | # clean: Cleanup compiled and cache py files | |
10 | clean: |
|
47 | clean: | |
11 | make test-clean |
|
48 | make test-clean | |
12 | find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';' |
|
49 | find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';' | |
@@ -14,14 +51,14 b' clean:' | |||||
14 |
|
51 | |||
15 |
|
52 | |||
16 | .PHONY: test |
|
53 | .PHONY: test | |
17 |
# |
|
54 | # test: run test-clean and tests | |
18 | test: |
|
55 | test: | |
19 | make test-clean |
|
56 | make test-clean | |
20 | make test-only |
|
57 | unset RC_SQLALCHEMY_DB1_URL && unset RC_DB_URL && make test-only | |
21 |
|
58 | |||
22 |
|
59 | |||
23 | .PHONY: test-clean |
|
60 | .PHONY: test-clean | |
24 |
# |
|
61 | # test-clean: run test-clean and tests | |
25 | test-clean: |
|
62 | test-clean: | |
26 | rm -rf coverage.xml htmlcov junit.xml pylint.log result |
|
63 | rm -rf coverage.xml htmlcov junit.xml pylint.log result | |
27 | find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';' |
|
64 | find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';' | |
@@ -29,33 +66,19 b' test-clean:' | |||||
29 |
|
66 | |||
30 |
|
67 | |||
31 | .PHONY: test-only |
|
68 | .PHONY: test-only | |
32 |
# |
|
69 | # test-only: Run tests only without cleanup | |
33 | test-only: |
|
70 | test-only: | |
34 | PYTHONHASHSEED=random \ |
|
71 | PYTHONHASHSEED=random \ | |
35 | py.test -x -vv -r xw -p no:sugar \ |
|
72 | py.test -x -vv -r xw -p no:sugar \ | |
36 | --cov-report=term-missing --cov-report=html \ |
|
73 | --cov-report=term-missing --cov-report=html \ | |
37 | --cov=vcsserver vcsserver |
|
74 | --cov=vcsserver vcsserver | |
38 |
|
75 | |||
39 |
|
76 | # >>> Dev commands | ||
40 | .PHONY: ruff-check |
|
|||
41 | ## run a ruff analysis |
|
|||
42 | ruff-check: |
|
|||
43 | ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev . |
|
|||
44 |
|
77 | |||
45 | .PHONY: pip-packages |
|
|||
46 | ## Show outdated packages |
|
|||
47 | pip-packages: |
|
|||
48 | python ${OUTDATED_PACKAGES} |
|
|||
49 |
|
||||
50 |
|
||||
51 | .PHONY: build |
|
|||
52 | ## Build sdist/egg |
|
|||
53 | build: |
|
|||
54 | python -m build |
|
|||
55 |
|
78 | |||
56 |
|
79 | |||
57 | .PHONY: dev-sh |
|
80 | .PHONY: dev-sh | |
58 |
# |
|
81 | # dev-sh: make dev-sh | |
59 | dev-sh: |
|
82 | dev-sh: | |
60 | sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list" |
|
83 | sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list" | |
61 | sudo apt-get update |
|
84 | sudo apt-get update | |
@@ -68,14 +91,14 b' dev-sh:' | |||||
68 |
|
91 | |||
69 |
|
92 | |||
70 | .PHONY: dev-cleanup |
|
93 | .PHONY: dev-cleanup | |
71 |
# |
|
94 | # dev-cleanup: Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y | |
72 | dev-cleanup: |
|
95 | dev-cleanup: | |
73 | pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y |
|
96 | pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y | |
74 | rm -rf /tmp/* |
|
97 | rm -rf /tmp/* | |
75 |
|
98 | |||
76 |
|
99 | |||
77 | .PHONY: dev-env |
|
100 | .PHONY: dev-env | |
78 |
# |
|
101 | # dev-env: make dev-env based on the requirements files and install develop of packages | |
79 | ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y |
|
102 | ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y | |
80 | dev-env: |
|
103 | dev-env: | |
81 | sudo -u root chown rhodecode:rhodecode /home/rhodecode/.cache/pip/ |
|
104 | sudo -u root chown rhodecode:rhodecode /home/rhodecode/.cache/pip/ | |
@@ -86,7 +109,7 b' dev-env:' | |||||
86 |
|
109 | |||
87 |
|
110 | |||
88 | .PHONY: sh |
|
111 | .PHONY: sh | |
89 |
# |
|
112 | # sh: shortcut for make dev-sh dev-env | |
90 | sh: |
|
113 | sh: | |
91 | make dev-env |
|
114 | make dev-env | |
92 | make dev-sh |
|
115 | make dev-sh | |
@@ -96,49 +119,12 b' sh:' | |||||
96 | workers?=1 |
|
119 | workers?=1 | |
97 |
|
120 | |||
98 | .PHONY: dev-srv |
|
121 | .PHONY: dev-srv | |
99 |
# |
|
122 | # dev-srv: run gunicorn web server with reloader, use workers=N to set multiworker mode, workers=N allows changes of workers | |
100 | dev-srv: |
|
123 | dev-srv: | |
101 | gunicorn --paste=.dev/dev.ini --bind=0.0.0.0:10010 --config=.dev/gunicorn_config.py --reload --workers=$(workers) |
|
124 | gunicorn --paste=.dev/dev.ini --bind=0.0.0.0:10010 --config=.dev/gunicorn_config.py --reload --workers=$(workers) | |
102 |
|
125 | |||
103 |
|
126 | .PHONY: ruff-check | ||
104 | # Default command on calling make |
|
127 | # ruff-check: run a ruff analysis | |
105 | .DEFAULT_GOAL := show-help |
|
128 | ruff-check: | |
|
129 | ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev . | |||
106 |
|
|
130 | ||
107 | .PHONY: show-help |
|
|||
108 | show-help: |
|
|||
109 | @echo "$$(tput bold)Available rules:$$(tput sgr0)" |
|
|||
110 | @echo |
|
|||
111 | @sed -n -e "/^## / { \ |
|
|||
112 | h; \ |
|
|||
113 | s/.*//; \ |
|
|||
114 | :doc" \ |
|
|||
115 | -e "H; \ |
|
|||
116 | n; \ |
|
|||
117 | s/^## //; \ |
|
|||
118 | t doc" \ |
|
|||
119 | -e "s/:.*//; \ |
|
|||
120 | G; \ |
|
|||
121 | s/\\n## /---/; \ |
|
|||
122 | s/\\n/ /g; \ |
|
|||
123 | p; \ |
|
|||
124 | }" ${MAKEFILE_LIST} \ |
|
|||
125 | | LC_ALL='C' sort --ignore-case \ |
|
|||
126 | | awk -F '---' \ |
|
|||
127 | -v ncol=$$(tput cols) \ |
|
|||
128 | -v indent=19 \ |
|
|||
129 | -v col_on="$$(tput setaf 6)" \ |
|
|||
130 | -v col_off="$$(tput sgr0)" \ |
|
|||
131 | '{ \ |
|
|||
132 | printf "%s%*s%s ", col_on, -indent, $$1, col_off; \ |
|
|||
133 | n = split($$2, words, " "); \ |
|
|||
134 | line_length = ncol - indent; \ |
|
|||
135 | for (i = 1; i <= n; i++) { \ |
|
|||
136 | line_length -= length(words[i]) + 1; \ |
|
|||
137 | if (line_length <= 0) { \ |
|
|||
138 | line_length = ncol - indent - length(words[i]) - 1; \ |
|
|||
139 | printf "\n%*s ", -indent, " "; \ |
|
|||
140 | } \ |
|
|||
141 | printf "%s ", words[i]; \ |
|
|||
142 | } \ |
|
|||
143 | printf "\n"; \ |
|
|||
144 | }' |
|
@@ -13,6 +13,7 b' import traceback' | |||||
13 | import random |
|
13 | import random | |
14 | import socket |
|
14 | import socket | |
15 | import dataclasses |
|
15 | import dataclasses | |
|
16 | import json | |||
16 | from gunicorn.glogging import Logger |
|
17 | from gunicorn.glogging import Logger | |
17 |
|
18 | |||
18 |
|
19 | |||
@@ -37,17 +38,41 b" accesslog = '-'" | |||||
37 | worker_tmp_dir = None |
|
38 | worker_tmp_dir = None | |
38 | tmp_upload_dir = None |
|
39 | tmp_upload_dir = None | |
39 |
|
40 | |||
40 | # use re-use port logic |
|
41 | # use re-use port logic to let linux internals load-balance the requests better. | |
41 |
|
|
42 | reuse_port = True | |
42 |
|
43 | |||
43 | # Custom log format |
|
44 | # Custom log format | |
44 | #access_log_format = ( |
|
45 | #access_log_format = ( | |
45 | # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') |
|
46 | # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') | |
46 |
|
47 | |||
47 | # loki format for easier parsing in grafana |
|
48 | # loki format for easier parsing in grafana | |
48 | access_log_format = ( |
|
49 | loki_access_log_format = ( | |
49 | 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"') |
|
50 | 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"') | |
50 |
|
51 | |||
|
52 | # JSON format | |||
|
53 | json_access_log_format = json.dumps({ | |||
|
54 | 'time': r'%(t)s', | |||
|
55 | 'pid': r'%(p)s', | |||
|
56 | 'level': 'INFO', | |||
|
57 | 'ip': r'%(h)s', | |||
|
58 | 'request_time': r'%(L)s', | |||
|
59 | 'remote_address': r'%(h)s', | |||
|
60 | 'user_name': r'%(u)s', | |||
|
61 | 'status': r'%(s)s', | |||
|
62 | 'method': r'%(m)s', | |||
|
63 | 'url_path': r'%(U)s', | |||
|
64 | 'query_string': r'%(q)s', | |||
|
65 | 'protocol': r'%(H)s', | |||
|
66 | 'response_length': r'%(B)s', | |||
|
67 | 'referer': r'%(f)s', | |||
|
68 | 'user_agent': r'%(a)s', | |||
|
69 | ||||
|
70 | }) | |||
|
71 | ||||
|
72 | access_log_format = loki_access_log_format | |||
|
73 | if os.environ.get('RC_LOGGING_FORMATTER') == 'json': | |||
|
74 | access_log_format = json_access_log_format | |||
|
75 | ||||
51 | # self adjust workers based on CPU count, to use maximum of CPU and not overquota the resources |
|
76 | # self adjust workers based on CPU count, to use maximum of CPU and not overquota the resources | |
52 | # workers = get_workers() |
|
77 | # workers = get_workers() | |
53 |
|
78 |
@@ -11,8 +11,8 b' celery==5.3.6' | |||||
11 | click==8.1.3 |
|
11 | click==8.1.3 | |
12 | click-repl==0.2.0 |
|
12 | click-repl==0.2.0 | |
13 | click==8.1.3 |
|
13 | click==8.1.3 | |
14 |
prompt |
|
14 | prompt_toolkit==3.0.47 | |
15 |
wcwidth==0.2. |
|
15 | wcwidth==0.2.13 | |
16 | six==1.16.0 |
|
16 | six==1.16.0 | |
17 | kombu==5.3.5 |
|
17 | kombu==5.3.5 | |
18 | amqp==5.2.0 |
|
18 | amqp==5.2.0 | |
@@ -29,22 +29,22 b' dogpile.cache==1.3.3' | |||||
29 | pbr==5.11.1 |
|
29 | pbr==5.11.1 | |
30 | dulwich==0.21.6 |
|
30 | dulwich==0.21.6 | |
31 | urllib3==1.26.14 |
|
31 | urllib3==1.26.14 | |
32 |
fsspec==2024. |
|
32 | fsspec==2024.9.0 | |
33 |
gunicorn==2 |
|
33 | gunicorn==23.0.0 | |
34 |
packaging==24. |
|
34 | packaging==24.1 | |
35 | hg-evolve==11.1.3 |
|
35 | hg-evolve==11.1.3 | |
36 | importlib-metadata==6.0.0 |
|
36 | importlib-metadata==6.0.0 | |
37 | zipp==3.15.0 |
|
37 | zipp==3.15.0 | |
38 | mercurial==6.7.4 |
|
38 | mercurial==6.7.4 | |
39 | more-itertools==9.1.0 |
|
39 | more-itertools==9.1.0 | |
40 | msgpack==1.0.8 |
|
40 | msgpack==1.0.8 | |
41 |
orjson==3.10. |
|
41 | orjson==3.10.7 | |
42 | psutil==5.9.8 |
|
42 | psutil==5.9.8 | |
43 | py==1.11.0 |
|
43 | py==1.11.0 | |
44 | pygit2==1.13.3 |
|
44 | pygit2==1.13.3 | |
45 | cffi==1.16.0 |
|
45 | cffi==1.16.0 | |
46 | pycparser==2.21 |
|
46 | pycparser==2.21 | |
47 |
pygments==2.1 |
|
47 | pygments==2.18.0 | |
48 | pyparsing==3.1.1 |
|
48 | pyparsing==3.1.1 | |
49 | pyramid==2.0.2 |
|
49 | pyramid==2.0.2 | |
50 | hupper==1.12 |
|
50 | hupper==1.12 | |
@@ -56,11 +56,11 b' pyramid==2.0.2' | |||||
56 | venusian==3.0.0 |
|
56 | venusian==3.0.0 | |
57 | webob==1.8.7 |
|
57 | webob==1.8.7 | |
58 | zope.deprecation==5.0.0 |
|
58 | zope.deprecation==5.0.0 | |
59 |
zope.interface==6. |
|
59 | zope.interface==6.4.post2 | |
60 |
redis==5. |
|
60 | redis==5.1.0 | |
61 | async-timeout==4.0.3 |
|
61 | async-timeout==4.0.3 | |
62 | repoze.lru==0.7 |
|
62 | repoze.lru==0.7 | |
63 |
s3fs==2024. |
|
63 | s3fs==2024.9.0 | |
64 | aiobotocore==2.13.0 |
|
64 | aiobotocore==2.13.0 | |
65 | aiohttp==3.9.5 |
|
65 | aiohttp==3.9.5 | |
66 | aiosignal==1.3.1 |
|
66 | aiosignal==1.3.1 | |
@@ -87,12 +87,12 b' s3fs==2024.6.0' | |||||
87 | yarl==1.9.4 |
|
87 | yarl==1.9.4 | |
88 | idna==3.4 |
|
88 | idna==3.4 | |
89 | multidict==6.0.5 |
|
89 | multidict==6.0.5 | |
90 |
fsspec==2024. |
|
90 | fsspec==2024.9.0 | |
91 | scandir==1.10.0 |
|
91 | scandir==1.10.0 | |
92 | setproctitle==1.3.3 |
|
92 | setproctitle==1.3.3 | |
93 | subvertpy==0.11.0 |
|
93 | subvertpy==0.11.0 | |
94 | waitress==3.0.0 |
|
94 | waitress==3.0.0 | |
95 |
wcwidth==0.2. |
|
95 | wcwidth==0.2.13 | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | ## test related requirements |
|
98 | ## test related requirements |
@@ -4,38 +4,38 b' pytest-cov==4.1.0' | |||||
4 | coverage==7.4.3 |
|
4 | coverage==7.4.3 | |
5 | pytest==8.1.1 |
|
5 | pytest==8.1.1 | |
6 | iniconfig==2.0.0 |
|
6 | iniconfig==2.0.0 | |
7 |
packaging==24. |
|
7 | packaging==24.1 | |
8 | pluggy==1.4.0 |
|
8 | pluggy==1.4.0 | |
9 | pytest-env==1.1.3 |
|
9 | pytest-env==1.1.3 | |
10 | pytest==8.1.1 |
|
10 | pytest==8.1.1 | |
11 | iniconfig==2.0.0 |
|
11 | iniconfig==2.0.0 | |
12 |
packaging==24. |
|
12 | packaging==24.1 | |
13 | pluggy==1.4.0 |
|
13 | pluggy==1.4.0 | |
14 | pytest-profiling==1.7.0 |
|
14 | pytest-profiling==1.7.0 | |
15 | gprof2dot==2022.7.29 |
|
15 | gprof2dot==2022.7.29 | |
16 | pytest==8.1.1 |
|
16 | pytest==8.1.1 | |
17 | iniconfig==2.0.0 |
|
17 | iniconfig==2.0.0 | |
18 |
packaging==24. |
|
18 | packaging==24.1 | |
19 | pluggy==1.4.0 |
|
19 | pluggy==1.4.0 | |
20 | six==1.16.0 |
|
20 | six==1.16.0 | |
21 | pytest-rerunfailures==13.0 |
|
21 | pytest-rerunfailures==13.0 | |
22 |
packaging==24. |
|
22 | packaging==24.1 | |
23 | pytest==8.1.1 |
|
23 | pytest==8.1.1 | |
24 | iniconfig==2.0.0 |
|
24 | iniconfig==2.0.0 | |
25 |
packaging==24. |
|
25 | packaging==24.1 | |
26 | pluggy==1.4.0 |
|
26 | pluggy==1.4.0 | |
27 | pytest-runner==6.0.1 |
|
27 | pytest-runner==6.0.1 | |
28 | pytest-sugar==1.0.0 |
|
28 | pytest-sugar==1.0.0 | |
29 |
packaging==24. |
|
29 | packaging==24.1 | |
30 | pytest==8.1.1 |
|
30 | pytest==8.1.1 | |
31 | iniconfig==2.0.0 |
|
31 | iniconfig==2.0.0 | |
32 |
packaging==24. |
|
32 | packaging==24.1 | |
33 | pluggy==1.4.0 |
|
33 | pluggy==1.4.0 | |
34 | termcolor==2.4.0 |
|
34 | termcolor==2.4.0 | |
35 | pytest-timeout==2.3.1 |
|
35 | pytest-timeout==2.3.1 | |
36 | pytest==8.1.1 |
|
36 | pytest==8.1.1 | |
37 | iniconfig==2.0.0 |
|
37 | iniconfig==2.0.0 | |
38 |
packaging==24. |
|
38 | packaging==24.1 | |
39 | pluggy==1.4.0 |
|
39 | pluggy==1.4.0 | |
40 | webtest==3.0.0 |
|
40 | webtest==3.0.0 | |
41 | beautifulsoup4==4.12.3 |
|
41 | beautifulsoup4==4.12.3 |
@@ -53,6 +53,12 b' def ArchiveException(org_exc=None):' | |||||
53 | return _make_exception_wrapper |
|
53 | return _make_exception_wrapper | |
54 |
|
54 | |||
55 |
|
55 | |||
|
56 | def ClientNotSupportedException(org_exc=None): | |||
|
57 | def _make_exception_wrapper(*args): | |||
|
58 | return _make_exception('client_not_supported', org_exc, *args) | |||
|
59 | return _make_exception_wrapper | |||
|
60 | ||||
|
61 | ||||
56 | def LookupException(org_exc=None): |
|
62 | def LookupException(org_exc=None): | |
57 | def _make_exception_wrapper(*args): |
|
63 | def _make_exception_wrapper(*args): | |
58 | return _make_exception('lookup', org_exc, *args) |
|
64 | return _make_exception('lookup', org_exc, *args) |
@@ -18,6 +18,7 b'' | |||||
18 | import re |
|
18 | import re | |
19 | import logging |
|
19 | import logging | |
20 |
|
20 | |||
|
21 | from gunicorn.http.errors import NoMoreData | |||
21 | from pyramid.config import Configurator |
|
22 | from pyramid.config import Configurator | |
22 | from pyramid.response import Response, FileIter |
|
23 | from pyramid.response import Response, FileIter | |
23 | from pyramid.httpexceptions import ( |
|
24 | from pyramid.httpexceptions import ( | |
@@ -166,9 +167,14 b' def lfs_objects_oid_upload(request):' | |||||
166 | # read in chunks as stream comes in from Gunicorn |
|
167 | # read in chunks as stream comes in from Gunicorn | |
167 | # this is a specific Gunicorn support function. |
|
168 | # this is a specific Gunicorn support function. | |
168 | # might work differently on waitress |
|
169 | # might work differently on waitress | |
|
170 | try: | |||
169 | chunk = body.read(blksize) |
|
171 | chunk = body.read(blksize) | |
|
172 | except NoMoreData: | |||
|
173 | chunk = None | |||
|
174 | ||||
170 | if not chunk: |
|
175 | if not chunk: | |
171 | break |
|
176 | break | |
|
177 | ||||
172 | f.write(chunk) |
|
178 | f.write(chunk) | |
173 |
|
179 | |||
174 | return {'upload': 'ok'} |
|
180 | return {'upload': 'ok'} |
@@ -167,6 +167,8 b' def _handle_exception(result):' | |||||
167 |
|
167 | |||
168 | if exception_class == 'HTTPLockedRC': |
|
168 | if exception_class == 'HTTPLockedRC': | |
169 | raise exceptions.RepositoryLockedException()(*result['exception_args']) |
|
169 | raise exceptions.RepositoryLockedException()(*result['exception_args']) | |
|
170 | elif exception_class == 'ClientNotSupportedError': | |||
|
171 | raise exceptions.ClientNotSupportedException()(*result['exception_args']) | |||
170 | elif exception_class == 'HTTPBranchProtected': |
|
172 | elif exception_class == 'HTTPBranchProtected': | |
171 | raise exceptions.RepositoryBranchProtectedException()(*result['exception_args']) |
|
173 | raise exceptions.RepositoryBranchProtectedException()(*result['exception_args']) | |
172 | elif exception_class == 'RepositoryError': |
|
174 | elif exception_class == 'RepositoryError': |
@@ -58,7 +58,7 b' class S3Shard(BaseShard):' | |||||
58 | # ensure folder in bucket exists |
|
58 | # ensure folder in bucket exists | |
59 | destination = self.bucket |
|
59 | destination = self.bucket | |
60 | if not self.fs.exists(destination): |
|
60 | if not self.fs.exists(destination): | |
61 |
self.fs.mkdir(destination |
|
61 | self.fs.mkdir(destination) | |
62 |
|
62 | |||
63 | writer = self._get_writer(full_path, mode) |
|
63 | writer = self._get_writer(full_path, mode) | |
64 |
|
64 |
@@ -37,9 +37,9 b' from dogpile.cache.backends import redis' | |||||
37 | from dogpile.cache.backends.file import FileLock |
|
37 | from dogpile.cache.backends.file import FileLock | |
38 | from dogpile.cache.util import memoized_property |
|
38 | from dogpile.cache.util import memoized_property | |
39 |
|
39 | |||
40 |
from |
|
40 | from ...lib.memory_lru_dict import LRUDict, LRUDictDebug | |
41 |
from |
|
41 | from ...lib.str_utils import safe_bytes, safe_str | |
42 |
from |
|
42 | from ...lib.type_utils import str2bool | |
43 |
|
43 | |||
44 | _default_max_size = 1024 |
|
44 | _default_max_size = 1024 | |
45 |
|
45 | |||
@@ -166,6 +166,13 b' class FileNamespaceBackend(PickleSeriali' | |||||
166 | def get_store(self): |
|
166 | def get_store(self): | |
167 | return self.filename |
|
167 | return self.filename | |
168 |
|
168 | |||
|
169 | def cleanup_store(self): | |||
|
170 | for ext in ("db", "dat", "pag", "dir"): | |||
|
171 | final_filename = self.filename + os.extsep + ext | |||
|
172 | if os.path.exists(final_filename): | |||
|
173 | os.remove(final_filename) | |||
|
174 | log.warning('Removed dbm file %s', final_filename) | |||
|
175 | ||||
169 |
|
176 | |||
170 | class BaseRedisBackend(redis_backend.RedisBackend): |
|
177 | class BaseRedisBackend(redis_backend.RedisBackend): | |
171 | key_prefix = '' |
|
178 | key_prefix = '' | |
@@ -257,7 +264,7 b' class RedisMsgPackBackend(MsgPackSeriali' | |||||
257 |
|
264 | |||
258 |
|
265 | |||
259 | def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False): |
|
266 | def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False): | |
260 |
from |
|
267 | from ...lib._vendor import redis_lock | |
261 |
|
268 | |||
262 | class _RedisLockWrapper: |
|
269 | class _RedisLockWrapper: | |
263 | """LockWrapper for redis_lock""" |
|
270 | """LockWrapper for redis_lock""" |
@@ -25,9 +25,9 b' import decorator' | |||||
25 | from dogpile.cache import CacheRegion |
|
25 | from dogpile.cache import CacheRegion | |
26 |
|
26 | |||
27 |
|
27 | |||
28 |
from |
|
28 | from ...lib.hash_utils import sha1 | |
29 |
from |
|
29 | from ...lib.str_utils import safe_bytes | |
30 |
from |
|
30 | from ...lib.type_utils import str2bool # noqa :required by imports from .utils | |
31 |
|
31 | |||
32 | from . import region_meta |
|
32 | from . import region_meta | |
33 |
|
33 |
@@ -64,7 +64,7 b' class RequestWrapperTween:' | |||||
64 |
|
64 | |||
65 | def __call__(self, request): |
|
65 | def __call__(self, request): | |
66 | start = time.time() |
|
66 | start = time.time() | |
67 |
log.debug('Starting request |
|
67 | log.debug('Starting request processing') | |
68 | response = None |
|
68 | response = None | |
69 |
|
69 | |||
70 | try: |
|
70 | try: | |
@@ -88,7 +88,7 b' class RequestWrapperTween:' | |||||
88 | total = time.time() - start |
|
88 | total = time.time() - start | |
89 |
|
89 | |||
90 | log.info( |
|
90 | log.info( | |
91 |
' |
|
91 | 'Finished request processing: reqq[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s', | |
92 | count, ip, request.environ.get('REQUEST_METHOD'), |
|
92 | count, ip, request.environ.get('REQUEST_METHOD'), | |
93 | _view_path, total, ua, _ver_, |
|
93 | _view_path, total, ua, _ver_, | |
94 | extra={"time": total, "ver": _ver_, "code": resp_code, |
|
94 | extra={"time": total, "ver": _ver_, "code": resp_code, |
General Comments 0
You need to be logged in to leave comments.
Login now