From 75432303788df25278d97db4a1d3a65ad014355f Mon Sep 17 00:00:00 2001 From: Miksir Date: Wed, 27 May 2020 19:17:22 +0300 Subject: [PATCH 001/398] Gitlab OAuth authentication --- src/auth/auth_base.py | 3 + src/auth/auth_gitlab.py | 276 +++++++++++++++++++++++++ src/auth/tornado_auth.py | 4 +- src/model/server_conf.py | 3 + web-src/public/login.html | 7 + web-src/src/assets/css/index.css | 37 ++++ web-src/src/assets/gitlab-icon-rgb.png | Bin 0 -> 1182 bytes web-src/src/login/login.js | 32 +++ 8 files changed, 361 insertions(+), 1 deletion(-) create mode 100644 src/auth/auth_gitlab.py create mode 100644 web-src/src/assets/gitlab-icon-rgb.png diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index a5ddefb1..e5e2553f 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -16,6 +16,9 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] + def is_active(self, user): + return True + class AuthRejectedError(Exception): """Credentials, provided by user, were rejected by the authentication mechanism (user is unknown to the server)""" diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py new file mode 100644 index 00000000..939fa2b0 --- /dev/null +++ b/src/auth/auth_gitlab.py @@ -0,0 +1,276 @@ +import json +import logging +import os +import time +import urllib.parse as urllib_parse + +import tornado.auth +import tornado.ioloop +from tornado.auth import OAuth2Mixin +from tornado import gen, httpclient, escape + +from auth import auth_base +from auth.auth_base import AuthFailureError, AuthBadRequestException +from model import model_helper + +from typing import List, Any, Dict, cast, Iterable, Union, Optional + +LOGGER = logging.getLogger('script_server.GitlabAuthorizer') + + +class GitlabOAuth2Mixin(OAuth2Mixin): + _OAUTH_AUTHORIZE_URL = '%s/oauth/authorize' + _OAUTH_ACCESS_TOKEN_URL = '%s/oauth/token' + _OAUTH_GITLAB_USERINFO = '%s/api/v4/user' + _OAUTH_GITLAB_GROUPS = '%s/api/v4/groups' + _GITLAB_PREFIX = 'https://gitlab.com' + + async def oauth2_request(self, url: str, access_token: str = None, post_args: Dict[str, Any] = None, + **args: Any) -> Any: + try: + return await super().oauth2_request(url, access_token, post_args, **args) + except tornado.httpclient.HTTPClientError as e: + LOGGER.error("HTTP error " + str(e.message)) + return None + + async def get_authenticated_user( + self, + redirect_uri: str, + client_id: str, + client_secret: str, + code: str, + extra_fields: Dict[str, Any] = None, + ) -> Optional[Dict[str, Any]]: + http = self.get_auth_http_client() + args = { + "redirect_uri": redirect_uri, + "code": code, + "client_id": client_id, + "client_secret": client_secret, + "grant_type": "authorization_code", + } + + fields = {"id", "username", "name", "email", "state"} + if extra_fields: + fields.update(extra_fields) + + body = urllib_parse.urlencode(args) + http_client = httpclient.AsyncHTTPClient() + response = await http_client.fetch( + self._OAUTH_ACCESS_TOKEN_URL % self._GITLAB_PREFIX, + method='POST', + headers={'Content-Type': 'application/x-www-form-urlencoded'}, + body=body, + raise_error=False) + + response_values = {} + if response.body: + response_values = escape.json_decode(response.body) + + if response.error: + if response_values.get('error_description'): + error_text = response_values.get('error_description') + elif response_values.get('error'): + error_text = response_values.get('error') + else: + error_text = str(response.error) + + error_message = 'Failed to load access_token: ' + error_text + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + access_token = response_values.get('access_token') + + if not access_token: + message = 'No access token in response: ' + str(response.body) + LOGGER.error(message) + raise AuthFailureError(message) + + user = await self.oauth2_request( + self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, + access_token) + + if user is None: + error_message = 'Failed to load user info' + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + fieldmap = response_values + for field in fields: + fieldmap[field] = user.get(field) + + return fieldmap + + +# noinspection PyProtectedMember +class GitlabOAuthAuthenticator(auth_base.Authenticator, GitlabOAuth2Mixin): + def __init__(self, params_dict): + super().__init__() + + LOGGER.debug("Init gitlab oauth provider with " + str(params_dict)) + + self.client_id = model_helper.read_obligatory(params_dict, 'client_id', ' for Gitlab OAuth') + + secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for Gitlab OAuth') + self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) + + gitlabPrefix = params_dict.get('url') + if not model_helper.is_empty(gitlabPrefix): + self._GITLAB_PREFIX = gitlabPrefix + + self.states = {} + self.user_states = {} + self.gitlab_update = params_dict.get('ttl', 60) + self.gitlab_dump = params_dict.get('dump') + self.session_expire = int(params_dict.get('session_expire_min', 0)) * 60 + + if self.gitlab_dump and os.path.exists(self.gitlab_dump): + dumpFile = open(self.gitlab_dump, "r") + stateStr = dumpFile.read() + self.user_states = escape.json_decode(stateStr) + dumpFile.close() + LOGGER.info("Readed state from file %s: " % self.gitlab_dump + stateStr) + + self.gitlab_group_search = params_dict.get('group_search') + + self._client_visible_config['client_id'] = self.client_id + self._client_visible_config['oauth_url'] = self._OAUTH_AUTHORIZE_URL % self._GITLAB_PREFIX + self._client_visible_config['oauth_scope'] = 'api' + + def authenticate(self, request_handler): + code = request_handler.get_argument('code', False) + + if not code: + LOGGER.error('Code is not specified') + raise AuthBadRequestException('Missing authorization information. Please contact your administrator') + + return self.validate_user(code, request_handler) + + def is_active(self, user): + if self.user_states.get(user) is None: + LOGGER.info("User %s not found in state" % user) + return False + if self.user_states[user]['groups'] is None: + LOGGER.info("User %s state without groups" % user) + return False + now = time.time() + if self.session_expire and (self.user_states[user]['visit'] + self.session_expire) < now: + del self.user_states[user] + LOGGER.info("User %s session expired, logged out" % user) + self.dump_sessions_to_file() + return False + self.user_states[user]['visit'] = now + return True + + def get_groups(self, user, known_groups=None): + if self.user_states.get(user) is None: + return [] + now = time.time() + if (self.user_states[user]['updated'] + self.gitlab_update) < now and not self.user_states[user]['updating']: + self.user_states[user]['updating'] = True + tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + return self.user_states[user]['groups'] + + def clean_expired_sessions(self): + now = time.time() + if self.session_expire: + for userData in list(self.user_states.keys()): + if (self.user_states[userData]['visit'] + self.session_expire) < now: + LOGGER.debug("User %s session expired and removed" % userData) + del self.user_states[userData] + + def dump_sessions_to_file(self): + if self.gitlab_dump: + dumpFile = open(self.gitlab_dump, "w") + dumpFile.write(escape.json_encode(self.user_states)) + dumpFile.close() + LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) + + @gen.coroutine + def update_state(self, user): + group_list = yield self.read_groups(self.user_states[user]['access_token']) + if group_list is None: + LOGGER.error("Failed to refresh groups for %s" % user) + else: + LOGGER.info("Groups for %s refreshed: " % user + str(group_list)) + now = time.time() + self.user_states[user]['groups'] = group_list + self.user_states[user]['updating'] = False + self.user_states[user]['updated'] = now + self.user_states[user]['visit'] = now + self.clean_expired_sessions() + self.dump_sessions_to_file() + return + + @gen.coroutine + def read_groups(self, access_token): + args = { + 'access_token': access_token, + 'all_available': 'false', + 'per_page': 100, + } + if not self.gitlab_group_search is None: + args['search'] = self.gitlab_group_search + + group_list_future = self.oauth2_request( + self._OAUTH_GITLAB_GROUPS % self._GITLAB_PREFIX, + **args + ) + + group_list = yield group_list_future + + if group_list is None: + return None + + groups = [] + for group in group_list: + if group.get('full_path'): + groups.append(group['full_path']) + + return groups + + @gen.coroutine + def validate_user(self, code, request_handler): + user_response_future = self.get_authenticated_user( + get_path_for_redirect(request_handler), + self.client_id, + self.secret, + code + ) + user_response = yield user_response_future + + if user_response.get('email') is None: + error_message = 'No email field in user response. The response: ' + str(user_response) + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + user_groups = yield self.read_groups(user_response.get('access_token')) + if user_groups is None: + error_message = 'Cant read user groups' + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + LOGGER.info("User %s group list: " % user_response['email'] + str(user_groups)) + user_response['groups'] = user_groups + user_response['updated'] = time.time() + user_response['visit'] = time.time() + user_response['updating'] = False + self.user_states[user_response['email']] = user_response + self.clean_expired_sessions() + self.dump_sessions_to_file() + + return user_response['email'] + + +def get_path_for_redirect(request_handler): + referer = request_handler.request.headers.get('Referer') + if not referer: + LOGGER.error('No referer') + raise AuthFailureError('Missing request header. Please contact system administrator') + + parse_result = urllib_parse.urlparse(referer) + protocol = parse_result[0] + host = parse_result[1] + path = parse_result[2] + + return urllib_parse.urlunparse((protocol, host, path, '', '', '')) diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 2bca3d7e..5a695477 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -23,8 +23,10 @@ def is_authenticated(self, request_handler): return True username = self._get_current_user(request_handler) + if not username: + return False - return bool(username) + return self.authenticator.is_active(username) @staticmethod def _get_current_user(request_handler): diff --git a/src/model/server_conf.py b/src/model/server_conf.py index 325abe36..0a0d54a1 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -147,6 +147,9 @@ def create_authenticator(auth_object, temp_folder): elif auth_type == 'google_oauth': from auth.auth_google_oauth import GoogleOauthAuthenticator authenticator = GoogleOauthAuthenticator(auth_object) + elif auth_type == 'gitlab': + from auth.auth_gitlab import GitlabOAuthAuthenticator + authenticator = GitlabOAuthAuthenticator(auth_object) elif auth_type == 'htpasswd': from auth.auth_htpasswd import HtpasswdAuthenticator authenticator = HtpasswdAuthenticator(auth_object) diff --git a/web-src/public/login.html b/web-src/public/login.html index 04904bff..1c5df7dd 100644 --- a/web-src/public/login.html +++ b/web-src/public/login.html @@ -43,4 +43,11 @@ + + \ No newline at end of file diff --git a/web-src/src/assets/css/index.css b/web-src/src/assets/css/index.css index da2b1fc9..4d278d3e 100644 --- a/web-src/src/assets/css/index.css +++ b/web-src/src/assets/css/index.css @@ -156,3 +156,40 @@ input[type=checkbox]:not(.browser-default) + span { #login-google_oauth-button[disabled] { color: #B0B0B0; } + + +#login-panel .login-gitlab .login-info-label { + margin-top: 16px; +} + +#login-gitlab-button { + height: 40px; + width: 188px; + padding-left: 34px; + margin: auto; + margin-top: 34px; + display: block; + + font-size: 14px; + font-weight: 500; + color: #757575; + + border-radius: 2px; + box-shadow: 0 1px 3px -1px #202020; + border: none; + + background-image: url('../gitlab-icon-rgb.png'); + background-color: white; + background-position-y: 50%; + background-position-x: -4px; + background-size: 48px; + background-repeat: no-repeat; +} + +#login-gitlab-button:active { + background-color: #EEE; +} + +#login-gitlab-button[disabled] { + color: #B0B0B0; +} diff --git a/web-src/src/assets/gitlab-icon-rgb.png b/web-src/src/assets/gitlab-icon-rgb.png new file mode 100644 index 0000000000000000000000000000000000000000..21a02db58782252d22ed6d2c913b4cdb3d32a76d GIT binary patch literal 1182 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGoY)RhkE(}p1m{?jr6DY!2;1O92 z)Nvhz8J#p{R{#asOFVsD*`KgViE6PNS+_`(fq_Na)5S5Q;?~<6r}L#(r#-X&J$oyo5-#wI{l%He z;?yPYZ{2^J4y0zCn`53`I&V`L`?fu1|L@(}^e5?Q(5D|0#qB5C?3*`rzW)02`%g2h zdSdm+HN!#j*^P<6Bl_=M;ySc-LK?%>Df3N~zKQK(I8bO%^n@X&kd>Mr5*gg z@-*Dvobwm`dXj-JQT&MU(Ryw6PS`IR+AZDecmU$`+gxLAX;Nl_}Pdhi>%@_B7)1}Ojx(7$_YQ8wZFz7 zRJ)k(Y7oQ57TtgC{&QWJayytTq+?_jbJk|qUu-jK{if5v(ZF)+ zYs#u-#vlBVx&CkTs*D?i8Jl?-KQ+8RV#MO1yx`%I>&wO47C+HtdvJe=VC~8$hcX#D z|Gv-8(Ts1CYCh%9y|7` z{a-1Nf3IHr$ls+eTfW~+T0O(mY@5~5BWG&s1g#r>JDzvn<+_jWWv@T;56*`2`Uuv9 zm)9>{-nQ3UF{kR_5k{N9Eqg__oLJ@Gv$6e;=AuPKCf^FKLj$e!!{2I8k?&RuxhzCL*KXBXK5_2Bs{!|sR(9gt~oJ#b4P;#Y3<|1D~t|ahe1G0`GNfJl>U||8x#Rr@KCNNQSg`dr>pYg0=E|Lke4X3(&0js;@@go9tLjVb zwS}htnN>^@__B0c_ox?LKHs@J;H$~>T|5j7YrY4sUt#}oFVdQ&MBb@09(5onE(I) literal 0 HcmV?d00001 diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index 20cd8c65..0d30118d 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -30,6 +30,8 @@ function onLoad() { var config = JSON.parse(configResponse); if (config['type'] === 'google_oauth') { setupGoogleOAuth(loginContainer, config); + } else if (config['type'] === 'gitlab') { + setupGitlabOAuth(loginContainer, config); } else { setupCredentials(loginContainer); } @@ -84,6 +86,36 @@ function setupGoogleOAuth(loginContainer, authConfig) { processCurrentOauthState(); } +function setupGitlabOAuth(loginContainer, authConfig) { + var credentialsTemplate = createTemplateElement('login-gitlab-template'); + loginContainer.appendChild(credentialsTemplate); + + var oauthLoginButton = document.getElementById('login-gitlab-button'); + oauthLoginButton.onclick = function () { + var token = guid(32); + + var localState = { + 'token': token, + 'urlFragment': window.location.hash + }; + localState[NEXT_URL_KEY] = getQueryParameter(NEXT_URL_KEY); + + saveState(localState); + + const queryArgs = { + 'redirect_uri': getUnparameterizedUrl(), + 'state': token, + 'client_id': authConfig['client_id'], + 'scope': authConfig['oauth_scope'], + 'response_type': OAUTH_RESPONSE_KEY + }; + const query = toQueryArgs(queryArgs); + window.location = authConfig['oauth_url'] + '?' + query; + }; + + processCurrentOauthState(); +} + function processCurrentOauthState() { var oauthState = restoreState(); From 4a02f493b6865567142e689cf7de7a38e91158d8 Mon Sep 17 00:00:00 2001 From: MiksIr Date: Thu, 4 Jun 2020 16:41:52 +0300 Subject: [PATCH 002/398] Gitlab Oauth tests --- src/auth/auth_gitlab.py | 9 +- src/tests/auth/test_auth_gitlab.py | 130 +++++++++++++++++++++++++++++ src/tests/server_conf_test.py | 27 ++++++ 3 files changed, 163 insertions(+), 3 deletions(-) create mode 100644 src/tests/auth/test_auth_gitlab.py diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 939fa2b0..12e44c48 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -129,7 +129,7 @@ def __init__(self, params_dict): stateStr = dumpFile.read() self.user_states = escape.json_decode(stateStr) dumpFile.close() - LOGGER.info("Readed state from file %s: " % self.gitlab_dump + stateStr) + LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) self.gitlab_group_search = params_dict.get('group_search') @@ -167,8 +167,7 @@ def get_groups(self, user, known_groups=None): return [] now = time.time() if (self.user_states[user]['updated'] + self.gitlab_update) < now and not self.user_states[user]['updating']: - self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + self.do_update_groups(user) return self.user_states[user]['groups'] def clean_expired_sessions(self): @@ -186,6 +185,10 @@ def dump_sessions_to_file(self): dumpFile.close() LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) + def do_update_groups(self, user): + self.user_states[user]['updating'] = True + tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + @gen.coroutine def update_state(self, user): group_list = yield self.read_groups(self.user_states[user]['access_token']) diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py new file mode 100644 index 00000000..fe59c5b9 --- /dev/null +++ b/src/tests/auth/test_auth_gitlab.py @@ -0,0 +1,130 @@ +import json +import os +import tempfile +import time +import unittest + +from tornado import escape + +from auth.auth_gitlab import GitlabOAuthAuthenticator +from model import server_conf +from tests import test_utils +from utils import file_utils +from unittest import TestCase +from unittest.mock import patch, Mock + +if __name__ == '__main__': + unittest.main() + +mock_time = Mock() +mock_time.return_value = 10000.01 +mock_dump_sessions_to_file = Mock() +mock_do_update_groups = Mock() + + +class TestAuthConfig(TestCase): + @patch('time.time', mock_time) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.dump_sessions_to_file', mock_dump_sessions_to_file) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) + def test_gitlab_oauth(self): + tmp = tempfile.mkstemp('.json', 'test_auth_gitlab-') + now = time.time() + state = { + "user@test.com": { + "groups": ["testgroup"], + "updating": False, + "updated": now-10, + "visit": now-10, + "id": 1, + "username": "test", + "name": "John", + "email": "user@test.com", + "state": "active" + }, + "nogroups@test.com": { + "groups": None, + "updating": False, + "updated": now-10, + "visit": now-10, + "id": 2, + "username": "nogroups", + "name": "John", + "email": "nogroups@test.com", + "state": "active" + } + } + + os.write(tmp[0], str.encode(escape.json_encode(state))) + os.fsync(tmp[0]) + + config = _from_json({ + 'auth': { + "type": "gitlab", + "url": "https://gitlab", + "client_id": "1234", + "secret": "abcd", + "group_search": "script-server", + "ttl": 80, + "dump": tmp[1], + "session_expire_min": 1 + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEqual('1234', config.authenticator.client_id) + self.assertEqual('abcd', config.authenticator.secret) + self.assertEqual('https://gitlab', config.authenticator._GITLAB_PREFIX) + self.assertEqual('script-server', config.authenticator.gitlab_group_search) + self.assertEqual(80, config.authenticator.gitlab_update) + self.assertEqual(tmp[1], config.authenticator.gitlab_dump) + self.assertEqual(60, config.authenticator.session_expire) + self.assertDictEqual(state, config.authenticator.user_states) + self.assertEqual(False, config.authenticator.is_active("unknown@test.com")) + self.assertEqual(False, config.authenticator.is_active("nogroups@test.com")) + self.assertEqual(True, config.authenticator.is_active("user@test.com")) + self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"]) + + # session expire test + saved_state = config.authenticator.user_states["user@test.com"].copy() + config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 + self.assertEqual(False, config.authenticator.is_active("user@test.com")) + self.assertEqual(True, mock_dump_sessions_to_file.called) + mock_dump_sessions_to_file.reset_mock() + self.assertIsNone(config.authenticator.user_states.get("user@test.com")) + config.authenticator.user_states["user@test.com"] = saved_state + + self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) + + # do not update because new + self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) + self.assertEqual(False, mock_do_update_groups.called) + mock_do_update_groups.reset_mock() + # update because old + config.authenticator.user_states["user@test.com"]["updated"] = time.time() - 81 + self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) + mock_do_update_groups.assert_called_with("user@test.com") + mock_do_update_groups.reset_mock() + # do not update because already updating + config.authenticator.user_states["user@test.com"]["updating"] = True + self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) + self.assertEqual(False, mock_do_update_groups.called) + config.authenticator.user_states["user@test.com"]["updating"] = False + + # test clean expire + saved_state = config.authenticator.user_states["user@test.com"].copy() + config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 + config.authenticator.clean_expired_sessions() + self.assertIsNone(config.authenticator.user_states.get("user@test.com")) + config.authenticator.user_states["user@test.com"] = saved_state + + os.close(tmp[0]) + os.unlink(tmp[1]) + + +def _from_json(content): + json_obj = json.dumps(content) + conf_path = os.path.join(test_utils.temp_folder, 'conf.json') + file_utils.write_file(conf_path, json_obj) + return server_conf.from_json(conf_path, test_utils.temp_folder) \ No newline at end of file diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index 566b68a2..9ea7a0db 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -2,6 +2,7 @@ import os import unittest +from auth.auth_gitlab import GitlabOAuthAuthenticator from auth.auth_google_oauth import GoogleOauthAuthenticator from auth.auth_htpasswd import HtpasswdAuthenticator from auth.auth_ldap import LdapAuthenticator @@ -203,6 +204,32 @@ def test_google_oauth_without_allowed_users(self): 'client_id': '1234', 'secret': 'abcd'}}) + def test_gitlab_oauth(self): + config = _from_json({ + 'auth': { + "type": "gitlab", + "url": "https://gitlab", + "client_id": "1234", + "secret": "abcd", + "group_search": "script-server", + "ttl": 60, + "dump": "/tmp/dump.json", + "session_expire_min": 60 + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEquals('1234', config.authenticator.client_id) + self.assertEquals('abcd', config.authenticator.secret) + self.assertEquals('https://gitlab', config.authenticator._GITLAB_PREFIX) + self.assertEquals('script-server', config.authenticator.gitlab_group_search) + self.assertEquals(60, config.authenticator.gitlab_update) + self.assertEquals("/tmp/dump.json", config.authenticator.gitlab_dump) + self.assertEquals(60*60, config.authenticator.session_expire) + + def test_ldap(self): config = _from_json({'auth': {'type': 'ldap', 'url': 'http://test-ldap.net', From b4202382a4e2744d89342733148bbc243ec2b60f Mon Sep 17 00:00:00 2001 From: MiksIr Date: Thu, 4 Jun 2020 23:39:21 +0300 Subject: [PATCH 003/398] + Store Gitlab keys to cookes so dump file now free from private information + group_support on/off, is off - read_user scope used --- src/auth/auth_base.py | 5 +- src/auth/auth_gitlab.py | 120 +++++++++++++++++------ src/auth/tornado_auth.py | 8 +- src/tests/auth/test_auth_gitlab.py | 148 +++++++++++++++++++++-------- src/tests/server_conf_test.py | 26 ++++- src/web/server.py | 18 ++-- 6 files changed, 246 insertions(+), 79 deletions(-) diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index e5e2553f..64a2e2ad 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -16,9 +16,12 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] - def is_active(self, user): + def is_active(self, user, request_handler): return True + def logout(self, user, request_handler): + return None + class AuthRejectedError(Exception): """Credentials, provided by user, were rejected by the authentication mechanism (user is unknown to the server)""" diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 12e44c48..7d00899d 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -39,7 +39,6 @@ async def get_authenticated_user( client_id: str, client_secret: str, code: str, - extra_fields: Dict[str, Any] = None, ) -> Optional[Dict[str, Any]]: http = self.get_auth_http_client() args = { @@ -50,10 +49,6 @@ async def get_authenticated_user( "grant_type": "authorization_code", } - fields = {"id", "username", "name", "email", "state"} - if extra_fields: - fields.update(extra_fields) - body = urllib_parse.urlencode(args) http_client = httpclient.AsyncHTTPClient() response = await http_client.fetch( @@ -86,22 +81,28 @@ async def get_authenticated_user( LOGGER.error(message) raise AuthFailureError(message) - user = await self.oauth2_request( - self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, - access_token) + user = await self.fetch_user(access_token) if user is None: error_message = 'Failed to load user info' LOGGER.error(error_message) raise AuthFailureError(error_message) - fieldmap = response_values - for field in fields: + return {**response_values, **user} + + async def fetch_user(self, access_token): + user = await self.oauth2_request( + self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, + access_token) + if user is None: + return None + + fieldmap = {} + for field in {"id", "username", "name", "email", "state"}: fieldmap[field] = user.get(field) return fieldmap - # noinspection PyProtectedMember class GitlabOAuthAuthenticator(auth_base.Authenticator, GitlabOAuth2Mixin): def __init__(self, params_dict): @@ -120,22 +121,29 @@ def __init__(self, params_dict): self.states = {} self.user_states = {} - self.gitlab_update = params_dict.get('ttl', 60) + self.gitlab_update = params_dict.get('ttl') self.gitlab_dump = params_dict.get('dump') + self.gitlab_group_support = params_dict.get('group_support', True) self.session_expire = int(params_dict.get('session_expire_min', 0)) * 60 + now = time.time() if self.gitlab_dump and os.path.exists(self.gitlab_dump): dumpFile = open(self.gitlab_dump, "r") stateStr = dumpFile.read() self.user_states = escape.json_decode(stateStr) dumpFile.close() + for userData in list(self.user_states.keys()): + # force to update user from gitlab + self.user_states[userData]['updating'] = False + if self.gitlab_update: + self.user_states[userData]['updated'] = now - self.gitlab_update - 1 LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) self.gitlab_group_search = params_dict.get('group_search') self._client_visible_config['client_id'] = self.client_id self._client_visible_config['oauth_url'] = self._OAUTH_AUTHORIZE_URL % self._GITLAB_PREFIX - self._client_visible_config['oauth_scope'] = 'api' + self._client_visible_config['oauth_scope'] = 'api' if self.gitlab_group_support else 'read_user' def authenticate(self, request_handler): code = request_handler.get_argument('code', False) @@ -146,30 +154,55 @@ def authenticate(self, request_handler): return self.validate_user(code, request_handler) - def is_active(self, user): + def is_active(self, user, request_handler): + access_token = request_handler.get_secure_cookie('token') + if access_token is None: + return False + access_token = access_token.decode("utf-8") + if self.user_states.get(user) is None: - LOGGER.info("User %s not found in state" % user) + LOGGER.debug("User %s not found in state" % user) return False - if self.user_states[user]['groups'] is None: - LOGGER.info("User %s state without groups" % user) + + if self.user_states[user]['state'] is None or self.user_states[user]['state'] != "active": + LOGGER.info("User %s state inactive: " % user + str(self.user_states[user])) + del self.user_states[user] + self.dump_sessions_to_file() return False + now = time.time() + # check session ttl if self.session_expire and (self.user_states[user]['visit'] + self.session_expire) < now: del self.user_states[user] LOGGER.info("User %s session expired, logged out" % user) self.dump_sessions_to_file() return False + self.user_states[user]['visit'] = now + + # check gitlab response ttl, also check for stale updating (ttl*2) + if self.gitlab_update is not None: + stale = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now + ttl_expired = (self.user_states[user]['updated'] + self.gitlab_update) < now + updating_now = self.user_states[user]['updating'] is True + if ttl_expired and (not updating_now or stale): + if self.gitlab_group_support: + self.do_update_groups(user, access_token) + else: + self.do_update_user(user, access_token) + return True def get_groups(self, user, known_groups=None): if self.user_states.get(user) is None: return [] - now = time.time() - if (self.user_states[user]['updated'] + self.gitlab_update) < now and not self.user_states[user]['updating']: - self.do_update_groups(user) + if self.user_states[user]['groups'] is None: + return [] return self.user_states[user]['groups'] + def logout(self, user, request_handler): + request_handler.clear_cookie('token') + def clean_expired_sessions(self): now = time.time() if self.session_expire: @@ -185,19 +218,41 @@ def dump_sessions_to_file(self): dumpFile.close() LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) - def do_update_groups(self, user): + def do_update_user(self, user, access_token): + self.user_states[user]['updating'] = True + tornado.ioloop.IOLoop.current().spawn_callback(self.update_user_state, user, access_token) + + def do_update_groups(self, user, access_token): self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + tornado.ioloop.IOLoop.current().spawn_callback(self.update_group_list, user, access_token) @gen.coroutine - def update_state(self, user): - group_list = yield self.read_groups(self.user_states[user]['access_token']) + def update_group_list(self, user, access_token): + group_list = yield self.read_groups(access_token) if group_list is None: LOGGER.error("Failed to refresh groups for %s" % user) + self.user_states[user]['state'] = "error" else: LOGGER.info("Groups for %s refreshed: " % user + str(group_list)) + self.user_states[user]['groups'] = group_list + now = time.time() + self.user_states[user]['updating'] = False + self.user_states[user]['updated'] = now + self.user_states[user]['visit'] = now + self.clean_expired_sessions() + self.dump_sessions_to_file() + return + + @gen.coroutine + def update_user_state(self, user, access_token): + user_state = yield self.fetch_user(access_token) + if user_state is None: + LOGGER.error("Failed to fetch user %s" % user) + self.user_states[user]['state'] = "error" + else: + LOGGER.info("User %s refreshed: " % user + str(user_state)) + self.user_states[user] = {**self.user_states[user], **user_state} now = time.time() - self.user_states[user]['groups'] = group_list self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now @@ -247,20 +302,25 @@ def validate_user(self, code, request_handler): LOGGER.error(error_message) raise AuthFailureError(error_message) - user_groups = yield self.read_groups(user_response.get('access_token')) - if user_groups is None: - error_message = 'Cant read user groups' - LOGGER.error(error_message) - raise AuthFailureError(error_message) + user_groups = [] + if self.gitlab_group_support: + user_groups = yield self.read_groups(user_response.get('access_token')) + if user_groups is None: + error_message = 'Cant read user groups' + LOGGER.error(error_message) + raise AuthFailureError(error_message) LOGGER.info("User %s group list: " % user_response['email'] + str(user_groups)) user_response['groups'] = user_groups user_response['updated'] = time.time() user_response['visit'] = time.time() user_response['updating'] = False + oauth_access_token = user_response.pop('access_token') + oauth_refresh_token = user_response.pop('refresh_token') # not used atm self.user_states[user_response['email']] = user_response self.clean_expired_sessions() self.dump_sessions_to_file() + request_handler.set_secure_cookie('token', oauth_access_token) return user_response['email'] diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 5a695477..b7d9e0ad 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -26,7 +26,11 @@ def is_authenticated(self, request_handler): if not username: return False - return self.authenticator.is_active(username) + active = self.authenticator.is_active(username, request_handler) + if not active: + self.logout(request_handler) + + return active @staticmethod def _get_current_user(request_handler): @@ -100,3 +104,5 @@ def logout(self, request_handler): LOGGER.info('Logging out ' + username) request_handler.clear_cookie('username') + + self.authenticator.logout(username, request_handler) diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index fe59c5b9..530ae841 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -1,3 +1,4 @@ +import copy import json import os import tempfile @@ -20,6 +21,8 @@ mock_time.return_value = 10000.01 mock_dump_sessions_to_file = Mock() mock_do_update_groups = Mock() +mock_do_update_user = Mock() +mock_request_handler = Mock(**{'get_secure_cookie.return_value': "12345".encode()}) class TestAuthConfig(TestCase): @@ -43,14 +46,14 @@ def test_gitlab_oauth(self): }, "nogroups@test.com": { "groups": None, - "updating": False, + "updating": True, "updated": now-10, "visit": now-10, "id": 2, "username": "nogroups", "name": "John", "email": "nogroups@test.com", - "state": "active" + "state": "blocked" } } @@ -66,62 +69,131 @@ def test_gitlab_oauth(self): "group_search": "script-server", "ttl": 80, "dump": tmp[1], - "session_expire_min": 1 + "session_expire_min": 10 }, 'access': { 'allowed_users': [] }}) self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEqual('1234', config.authenticator.client_id) - self.assertEqual('abcd', config.authenticator.secret) - self.assertEqual('https://gitlab', config.authenticator._GITLAB_PREFIX) - self.assertEqual('script-server', config.authenticator.gitlab_group_search) - self.assertEqual(80, config.authenticator.gitlab_update) self.assertEqual(tmp[1], config.authenticator.gitlab_dump) - self.assertEqual(60, config.authenticator.session_expire) - self.assertDictEqual(state, config.authenticator.user_states) - self.assertEqual(False, config.authenticator.is_active("unknown@test.com")) - self.assertEqual(False, config.authenticator.is_active("nogroups@test.com")) - self.assertEqual(True, config.authenticator.is_active("user@test.com")) - self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"]) + self.assertEqual("1234", config.authenticator._client_visible_config['client_id']) + self.assertEqual("https://gitlab/oauth/authorize", config.authenticator._client_visible_config['oauth_url']) + self.assertEqual("api", config.authenticator._client_visible_config['oauth_scope']) + + assert_state = state.copy() + for key in list(assert_state.keys()): + assert_state[key]['updating'] = False + assert_state[key]['updated'] = 10000.01 - 80 - 1 + self.assertDictEqual(assert_state, config.authenticator.user_states) + saved_state = copy.deepcopy(config.authenticator.user_states) + + self.assertEqual(False, config.authenticator.is_active("unknown@test.com", mock_request_handler)) + self.assertEqual(False, config.authenticator.is_active("nogroups@test.com", mock_request_handler)) + self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) + self.assertListEqual([], config.authenticator.get_groups("nogroups@test.com")) - # session expire test - saved_state = config.authenticator.user_states["user@test.com"].copy() - config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 - self.assertEqual(False, config.authenticator.is_active("user@test.com")) - self.assertEqual(True, mock_dump_sessions_to_file.called) - mock_dump_sessions_to_file.reset_mock() - self.assertIsNone(config.authenticator.user_states.get("user@test.com")) - config.authenticator.user_states["user@test.com"] = saved_state + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"], "visit updated") + self.assertEqual(True, mock_do_update_groups.called, "state just loaded, gitlab updating") + mock_do_update_groups.reset_mock() - self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) + config.authenticator.user_states["user@test.com"]["updating"] = True + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "do not call parallel updated") + mock_do_update_groups.reset_mock() - # do not update because new - self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) - self.assertEqual(False, mock_do_update_groups.called) + mock_time.return_value = 10000.01 + 80*2 + 1 # stale request + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, mock_do_update_groups.called, "parallel but stale") mock_do_update_groups.reset_mock() - # update because old - config.authenticator.user_states["user@test.com"]["updated"] = time.time() - 81 - self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) - mock_do_update_groups.assert_called_with("user@test.com") + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 + + config.authenticator.user_states["user@test.com"]['updated'] = now # gitlab info updated + config.authenticator.user_states["user@test.com"]['updating'] = False + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "do not update gitlab because ttl not expired") mock_do_update_groups.reset_mock() - # do not update because already updating - config.authenticator.user_states["user@test.com"]["updating"] = True - self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) - self.assertEqual(False, mock_do_update_groups.called) - config.authenticator.user_states["user@test.com"]["updating"] = False + + mock_time.return_value = 10000.01 + 81 + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, mock_do_update_groups.called, "ttl expired") + mock_do_update_groups.reset_mock() + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 + + # session expire test + mock_time.return_value = 10000.01 + 601 + self.assertEqual(False, config.authenticator.is_active("user@test.com", mock_request_handler), "shoud be expired") + self.assertEqual(True, mock_dump_sessions_to_file.called, "dump state to file") + mock_dump_sessions_to_file.reset_mock() + self.assertIsNone(config.authenticator.user_states.get("user@test.com"), "removed from state") + self.assertListEqual([], config.authenticator.get_groups("user@test.com")) + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 # test clean expire - saved_state = config.authenticator.user_states["user@test.com"].copy() - config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 + mock_time.return_value = 10000.01 + 601 config.authenticator.clean_expired_sessions() self.assertIsNone(config.authenticator.user_states.get("user@test.com")) - config.authenticator.user_states["user@test.com"] = saved_state + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 os.close(tmp[0]) os.unlink(tmp[1]) + @patch('time.time', mock_time) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_user', mock_do_update_user) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) + def test_gitlab_oauth_user_read_scope(self): + now = time.time() + + state = { + "user@test.com": { + "groups": ["testgroup"], + "updating": False, + "updated": 0, + "visit": now-10, + "id": 1, + "username": "test", + "name": "John", + "email": "user@test.com", + "state": "active" + } + } + + config = _from_json({ + 'auth': { + "type": "gitlab", + "url": "https://gitlab", + "client_id": "1234", + "secret": "abcd", + "group_search": "script-server", + "ttl": 80, + "session_expire_min": 1, + "group_support": False + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEqual("read_user", config.authenticator._client_visible_config['oauth_scope']) + config.authenticator.user_states = state + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "update==0, gitlab updating but not groups") + self.assertEqual(True, mock_do_update_user.called, "update==0, gitlab updating only user") + mock_do_update_groups.reset_mock() + mock_do_update_user.reset_mock() + + config.authenticator.gitlab_update = None + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "gitab update disabled") + self.assertEqual(False, mock_do_update_user.called, "gitab update disabled") + mock_do_update_groups.reset_mock() + mock_do_update_user.reset_mock() + def _from_json(content): json_obj = json.dumps(content) diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index 9ea7a0db..c412df5b 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -212,9 +212,10 @@ def test_gitlab_oauth(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 60, + "ttl": 120, "dump": "/tmp/dump.json", - "session_expire_min": 60 + "session_expire_min": 60, + "group_support": False }, 'access': { 'allowed_users': [] @@ -225,10 +226,29 @@ def test_gitlab_oauth(self): self.assertEquals('abcd', config.authenticator.secret) self.assertEquals('https://gitlab', config.authenticator._GITLAB_PREFIX) self.assertEquals('script-server', config.authenticator.gitlab_group_search) - self.assertEquals(60, config.authenticator.gitlab_update) + self.assertEquals(120, config.authenticator.gitlab_update) self.assertEquals("/tmp/dump.json", config.authenticator.gitlab_dump) self.assertEquals(60*60, config.authenticator.session_expire) + self.assertEquals(False, config.authenticator.gitlab_group_support) + def test_gitlab_oauth_default(self): + config = _from_json({ + 'auth': { + "type": "gitlab", + "client_id": "1234", + "secret": "abcd", + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEquals('https://gitlab.com', config.authenticator._GITLAB_PREFIX) + self.assertIsNone(config.authenticator.gitlab_group_search) + self.assertIsNone(config.authenticator.gitlab_update) + self.assertIsNone(config.authenticator.gitlab_dump) + self.assertIsNone(config.authenticator.session_expire) + self.assertEquals(True, config.authenticator.gitlab_group_support) def test_ldap(self): config = _from_json({'auth': {'type': 'ldap', diff --git a/src/web/server.py b/src/web/server.py index 7a694e32..9870c77e 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -105,6 +105,13 @@ def wrapper(self, *args, **kwargs): auth = self.application.auth authorizer = self.application.authorizer + login_url = self.get_login_url() + request_path = self.request.path + + login_resource = is_allowed_during_login(request_path, login_url, self) + if login_resource: + return func(self, *args, **kwargs) + authenticated = auth.is_authenticated(self) access_allowed = authenticated and authorizer.is_allowed_in_app(_identify_user(self)) @@ -118,11 +125,7 @@ def wrapper(self, *args, **kwargs): else: raise tornado.web.HTTPError(code, message) - login_url = self.get_login_url() - request_path = self.request.path - - login_resource = is_allowed_during_login(request_path, login_url, self) - if (authenticated and access_allowed) or login_resource: + if authenticated and access_allowed: return func(self, *args, **kwargs) if not isinstance(self, tornado.web.StaticFileHandler): @@ -171,7 +174,10 @@ def wrapper(self, *args, **kwargs): def has_admin_rights(request_handler): - user_id = _identify_user(request_handler) + try: + user_id = _identify_user(request_handler) + except Exception: + return False return request_handler.application.authorizer.is_admin(user_id) From 0ca627071b53786effb9b798a42eca75d964e47a Mon Sep 17 00:00:00 2001 From: MiksIr Date: Thu, 4 Jun 2020 23:58:44 +0300 Subject: [PATCH 004/398] Rename config keys: dump -> state_dump_file, ttl -> auth_info_ttl, session_expire_min -> session_expire_minutes --- src/auth/auth_gitlab.py | 6 +++--- src/tests/auth/test_auth_gitlab.py | 10 +++++----- src/tests/server_conf_test.py | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 7d00899d..4fc916ae 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -121,10 +121,10 @@ def __init__(self, params_dict): self.states = {} self.user_states = {} - self.gitlab_update = params_dict.get('ttl') - self.gitlab_dump = params_dict.get('dump') + self.gitlab_update = params_dict.get('auth_info_ttl') + self.gitlab_dump = params_dict.get('state_dump_file') self.gitlab_group_support = params_dict.get('group_support', True) - self.session_expire = int(params_dict.get('session_expire_min', 0)) * 60 + self.session_expire = int(params_dict.get('session_expire_minutes', 0)) * 60 now = time.time() if self.gitlab_dump and os.path.exists(self.gitlab_dump): diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index 530ae841..969591b3 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -67,9 +67,9 @@ def test_gitlab_oauth(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 80, - "dump": tmp[1], - "session_expire_min": 10 + "auth_info_ttl": 80, + "state_dump_file": tmp[1], + "session_expire_minutes": 10 }, 'access': { 'allowed_users': [] @@ -170,8 +170,8 @@ def test_gitlab_oauth_user_read_scope(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 80, - "session_expire_min": 1, + "auth_info_ttl": 80, + "session_expire_minutes": 1, "group_support": False }, 'access': { diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index c412df5b..c26a7ef9 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -212,9 +212,9 @@ def test_gitlab_oauth(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 120, - "dump": "/tmp/dump.json", - "session_expire_min": 60, + "auth_info_ttl": 120, + "state_dump_file": "/tmp/dump.json", + "session_expire_minutes": 60, "group_support": False }, 'access': { From 45a8987033d805af053b186014cffa31677b2e7e Mon Sep 17 00:00:00 2001 From: MiksIr Date: Wed, 10 Jun 2020 03:20:02 +0300 Subject: [PATCH 005/398] Refactoring of gitlab auth class --- src/auth/auth_base.py | 2 +- src/auth/auth_gitlab.py | 98 ++++++++++++++---------------- src/auth/tornado_auth.py | 2 +- src/tests/auth/test_auth_gitlab.py | 44 +++++++------- src/web/server.py | 12 ++-- web-src/src/login/login.js | 44 +++++--------- 6 files changed, 90 insertions(+), 112 deletions(-) diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index 64a2e2ad..45e3edaa 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -16,7 +16,7 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] - def is_active(self, user, request_handler): + def validate_user(self, user, request_handler): return True def logout(self, user, request_handler): diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 4fc916ae..76064350 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -15,6 +15,8 @@ from typing import List, Any, Dict, cast, Iterable, Union, Optional +from utils import file_utils + LOGGER = logging.getLogger('script_server.GitlabAuthorizer') @@ -58,6 +60,7 @@ async def get_authenticated_user( body=body, raise_error=False) + default_response_values = {"state": "unknown"} response_values = {} if response.body: response_values = escape.json_decode(response.body) @@ -88,7 +91,7 @@ async def get_authenticated_user( LOGGER.error(error_message) raise AuthFailureError(error_message) - return {**response_values, **user} + return {**default_response_values, **response_values, **user} async def fetch_user(self, access_token): user = await self.oauth2_request( @@ -115,9 +118,9 @@ def __init__(self, params_dict): secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for Gitlab OAuth') self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) - gitlabPrefix = params_dict.get('url') - if not model_helper.is_empty(gitlabPrefix): - self._GITLAB_PREFIX = gitlabPrefix + gitlab_prefix = params_dict.get('url') + if not model_helper.is_empty(gitlab_prefix): + self._GITLAB_PREFIX = gitlab_prefix self.states = {} self.user_states = {} @@ -128,15 +131,13 @@ def __init__(self, params_dict): now = time.time() if self.gitlab_dump and os.path.exists(self.gitlab_dump): - dumpFile = open(self.gitlab_dump, "r") - stateStr = dumpFile.read() - self.user_states = escape.json_decode(stateStr) - dumpFile.close() - for userData in list(self.user_states.keys()): + state_str = file_utils.read_file(self.gitlab_dump) + self.user_states = escape.json_decode(state_str) + for user_data in list(self.user_states.keys()): # force to update user from gitlab - self.user_states[userData]['updating'] = False + self.user_states[user_data]['updating'] = False if self.gitlab_update: - self.user_states[userData]['updated'] = now - self.gitlab_update - 1 + self.user_states[user_data]['updated'] = now - self.gitlab_update - 1 LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) self.gitlab_group_search = params_dict.get('group_search') @@ -152,40 +153,29 @@ def authenticate(self, request_handler): LOGGER.error('Code is not specified') raise AuthBadRequestException('Missing authorization information. Please contact your administrator') - return self.validate_user(code, request_handler) + return self.read_user(code, request_handler) - def is_active(self, user, request_handler): + def validate_user(self, user, request_handler): access_token = request_handler.get_secure_cookie('token') if access_token is None: return False access_token = access_token.decode("utf-8") + self.clean_and_persist_sessions() + if self.user_states.get(user) is None: LOGGER.debug("User %s not found in state" % user) return False - if self.user_states[user]['state'] is None or self.user_states[user]['state'] != "active": - LOGGER.info("User %s state inactive: " % user + str(self.user_states[user])) - del self.user_states[user] - self.dump_sessions_to_file() - return False - now = time.time() - # check session ttl - if self.session_expire and (self.user_states[user]['visit'] + self.session_expire) < now: - del self.user_states[user] - LOGGER.info("User %s session expired, logged out" % user) - self.dump_sessions_to_file() - return False - self.user_states[user]['visit'] = now # check gitlab response ttl, also check for stale updating (ttl*2) if self.gitlab_update is not None: - stale = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now + stale_update = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now ttl_expired = (self.user_states[user]['updated'] + self.gitlab_update) < now updating_now = self.user_states[user]['updating'] is True - if ttl_expired and (not updating_now or stale): + if ttl_expired and (not updating_now or stale_update): if self.gitlab_group_support: self.do_update_groups(user, access_token) else: @@ -203,31 +193,38 @@ def get_groups(self, user, known_groups=None): def logout(self, user, request_handler): request_handler.clear_cookie('token') - def clean_expired_sessions(self): + def clean_sessions(self): now = time.time() - if self.session_expire: - for userData in list(self.user_states.keys()): - if (self.user_states[userData]['visit'] + self.session_expire) < now: - LOGGER.debug("User %s session expired and removed" % userData) - del self.user_states[userData] - - def dump_sessions_to_file(self): + for user_data in list(self.user_states.keys()): + if self.session_expire and (self.user_states[user_data]['visit'] + self.session_expire) < now: + LOGGER.info("User %s removed because session expired" % user_data) + del self.user_states[user_data] + continue + if self.user_states[user_data]['state'] is None or self.user_states[user_data]['state'] != "active": + LOGGER.info("User %s removed because state '%s' != 'active'" % + (user_data, self.user_states[user_data]['state'])) + del self.user_states[user_data] + continue + + def clean_and_persist_sessions(self): + self.clean_sessions() if self.gitlab_dump: - dumpFile = open(self.gitlab_dump, "w") - dumpFile.write(escape.json_encode(self.user_states)) - dumpFile.close() - LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) + self.persist_session() + + def persist_session(self): + file_utils.write_file(self.gitlab_dump, escape.json_encode(self.user_states)) + LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) def do_update_user(self, user, access_token): self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_user_state, user, access_token) + tornado.ioloop.IOLoop.current().spawn_callback(self.update_user, user, access_token) def do_update_groups(self, user, access_token): self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_group_list, user, access_token) + tornado.ioloop.IOLoop.current().spawn_callback(self.update_groups, user, access_token) @gen.coroutine - def update_group_list(self, user, access_token): + def update_groups(self, user, access_token): group_list = yield self.read_groups(access_token) if group_list is None: LOGGER.error("Failed to refresh groups for %s" % user) @@ -239,12 +236,10 @@ def update_group_list(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_expired_sessions() - self.dump_sessions_to_file() - return + self.clean_and_persist_sessions() @gen.coroutine - def update_user_state(self, user, access_token): + def update_user(self, user, access_token): user_state = yield self.fetch_user(access_token) if user_state is None: LOGGER.error("Failed to fetch user %s" % user) @@ -256,8 +251,7 @@ def update_user_state(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_expired_sessions() - self.dump_sessions_to_file() + self.clean_and_persist_sessions() return @gen.coroutine @@ -288,7 +282,7 @@ def read_groups(self, access_token): return groups @gen.coroutine - def validate_user(self, code, request_handler): + def read_user(self, code, request_handler): user_response_future = self.get_authenticated_user( get_path_for_redirect(request_handler), self.client_id, @@ -316,10 +310,8 @@ def validate_user(self, code, request_handler): user_response['visit'] = time.time() user_response['updating'] = False oauth_access_token = user_response.pop('access_token') - oauth_refresh_token = user_response.pop('refresh_token') # not used atm self.user_states[user_response['email']] = user_response - self.clean_expired_sessions() - self.dump_sessions_to_file() + self.clean_and_persist_sessions() request_handler.set_secure_cookie('token', oauth_access_token) return user_response['email'] diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index b7d9e0ad..6caeff41 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -26,7 +26,7 @@ def is_authenticated(self, request_handler): if not username: return False - active = self.authenticator.is_active(username, request_handler) + active = self.authenticator.validate_user(username, request_handler) if not active: self.logout(request_handler) diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index 969591b3..6b17e83f 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -19,7 +19,7 @@ mock_time = Mock() mock_time.return_value = 10000.01 -mock_dump_sessions_to_file = Mock() +mock_persist_session = Mock() mock_do_update_groups = Mock() mock_do_update_user = Mock() mock_request_handler = Mock(**{'get_secure_cookie.return_value': "12345".encode()}) @@ -27,10 +27,9 @@ class TestAuthConfig(TestCase): @patch('time.time', mock_time) - @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.dump_sessions_to_file', mock_dump_sessions_to_file) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.persist_session', mock_persist_session) @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) def test_gitlab_oauth(self): - tmp = tempfile.mkstemp('.json', 'test_auth_gitlab-') now = time.time() state = { "user@test.com": { @@ -57,8 +56,7 @@ def test_gitlab_oauth(self): } } - os.write(tmp[0], str.encode(escape.json_encode(state))) - os.fsync(tmp[0]) + state_file = test_utils.create_file("gitlab_state.json", text=escape.json_encode(state)) config = _from_json({ 'auth': { @@ -68,7 +66,7 @@ def test_gitlab_oauth(self): "secret": "abcd", "group_search": "script-server", "auth_info_ttl": 80, - "state_dump_file": tmp[1], + "state_dump_file": state_file, "session_expire_minutes": 10 }, 'access': { @@ -76,7 +74,7 @@ def test_gitlab_oauth(self): }}) self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEqual(tmp[1], config.authenticator.gitlab_dump) + self.assertEqual(state_file, config.authenticator.gitlab_dump) self.assertEqual("1234", config.authenticator._client_visible_config['client_id']) self.assertEqual("https://gitlab/oauth/authorize", config.authenticator._client_visible_config['oauth_url']) self.assertEqual("api", config.authenticator._client_visible_config['oauth_scope']) @@ -88,23 +86,23 @@ def test_gitlab_oauth(self): self.assertDictEqual(assert_state, config.authenticator.user_states) saved_state = copy.deepcopy(config.authenticator.user_states) - self.assertEqual(False, config.authenticator.is_active("unknown@test.com", mock_request_handler)) - self.assertEqual(False, config.authenticator.is_active("nogroups@test.com", mock_request_handler)) + self.assertEqual(False, config.authenticator.validate_user("unknown@test.com", mock_request_handler)) + self.assertEqual(False, config.authenticator.validate_user("nogroups@test.com", mock_request_handler)) self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) self.assertListEqual([], config.authenticator.get_groups("nogroups@test.com")) - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"], "visit updated") self.assertEqual(True, mock_do_update_groups.called, "state just loaded, gitlab updating") mock_do_update_groups.reset_mock() config.authenticator.user_states["user@test.com"]["updating"] = True - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "do not call parallel updated") mock_do_update_groups.reset_mock() mock_time.return_value = 10000.01 + 80*2 + 1 # stale request - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(True, mock_do_update_groups.called, "parallel but stale") mock_do_update_groups.reset_mock() config.authenticator.user_states = copy.deepcopy(saved_state) @@ -112,12 +110,12 @@ def test_gitlab_oauth(self): config.authenticator.user_states["user@test.com"]['updated'] = now # gitlab info updated config.authenticator.user_states["user@test.com"]['updating'] = False - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "do not update gitlab because ttl not expired") mock_do_update_groups.reset_mock() mock_time.return_value = 10000.01 + 81 - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(True, mock_do_update_groups.called, "ttl expired") mock_do_update_groups.reset_mock() config.authenticator.user_states = copy.deepcopy(saved_state) @@ -125,9 +123,9 @@ def test_gitlab_oauth(self): # session expire test mock_time.return_value = 10000.01 + 601 - self.assertEqual(False, config.authenticator.is_active("user@test.com", mock_request_handler), "shoud be expired") - self.assertEqual(True, mock_dump_sessions_to_file.called, "dump state to file") - mock_dump_sessions_to_file.reset_mock() + self.assertEqual(False, config.authenticator.validate_user("user@test.com", mock_request_handler), "shoud be expired") + self.assertEqual(True, mock_persist_session.called, "dump state to file") + mock_persist_session.reset_mock() self.assertIsNone(config.authenticator.user_states.get("user@test.com"), "removed from state") self.assertListEqual([], config.authenticator.get_groups("user@test.com")) config.authenticator.user_states = copy.deepcopy(saved_state) @@ -135,14 +133,11 @@ def test_gitlab_oauth(self): # test clean expire mock_time.return_value = 10000.01 + 601 - config.authenticator.clean_expired_sessions() + config.authenticator.clean_sessions() self.assertIsNone(config.authenticator.user_states.get("user@test.com")) config.authenticator.user_states = copy.deepcopy(saved_state) mock_time.return_value = 10000.01 - os.close(tmp[0]) - os.unlink(tmp[1]) - @patch('time.time', mock_time) @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_user', mock_do_update_user) @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) @@ -181,19 +176,22 @@ def test_gitlab_oauth_user_read_scope(self): self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) self.assertEqual("read_user", config.authenticator._client_visible_config['oauth_scope']) config.authenticator.user_states = state - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "update==0, gitlab updating but not groups") self.assertEqual(True, mock_do_update_user.called, "update==0, gitlab updating only user") mock_do_update_groups.reset_mock() mock_do_update_user.reset_mock() config.authenticator.gitlab_update = None - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "gitab update disabled") self.assertEqual(False, mock_do_update_user.called, "gitab update disabled") mock_do_update_groups.reset_mock() mock_do_update_user.reset_mock() + def tearDown(self): + test_utils.cleanup() + def _from_json(content): json_obj = json.dumps(content) diff --git a/src/web/server.py b/src/web/server.py index 9870c77e..029f4e80 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -174,10 +174,7 @@ def wrapper(self, *args, **kwargs): def has_admin_rights(request_handler): - try: - user_id = _identify_user(request_handler) - except Exception: - return False + user_id = _identify_user(request_handler) return request_handler.application.authorizer.is_admin(user_id) @@ -696,10 +693,15 @@ def get(self): if auth.is_enabled(): username = auth.get_username(self) + try: + admin_rights = has_admin_rights(self) + except Exception: + admin_rights = False + info = { 'enabled': auth.is_enabled(), 'username': username, - 'admin': has_admin_rights(self) + 'admin': admin_rights } self.write(info) diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index 0d30118d..94b89572 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -57,40 +57,26 @@ function setupCredentials(loginContainer) { } function setupGoogleOAuth(loginContainer, authConfig) { - var credentialsTemplate = createTemplateElement('login-google_oauth-template'); - loginContainer.appendChild(credentialsTemplate); - - var oauthLoginButton = document.getElementById('login-google_oauth-button'); - oauthLoginButton.onclick = function () { - var token = guid(32); - - var localState = { - 'token': token, - 'urlFragment': window.location.hash - }; - localState[NEXT_URL_KEY] = getQueryParameter(NEXT_URL_KEY); - - saveState(localState); - - const queryArgs = { - 'redirect_uri': getUnparameterizedUrl(), - 'state': token, - 'client_id': authConfig['client_id'], - 'scope': authConfig['oauth_scope'], - 'response_type': OAUTH_RESPONSE_KEY - }; - const query = toQueryArgs(queryArgs); - window.location = authConfig['oauth_url'] + '?' + query; - }; - - processCurrentOauthState(); + setupOAuth( + loginContainer, + authConfig, + 'login-google_oauth-template', + 'login-google_oauth-button') } function setupGitlabOAuth(loginContainer, authConfig) { - var credentialsTemplate = createTemplateElement('login-gitlab-template'); + setupOAuth( + loginContainer, + authConfig, + 'login-gitlab-template', + 'login-gitlab-button') +} + +function setupOAuth(loginContainer, authConfig, templateName, buttonId) { + var credentialsTemplate = createTemplateElement(templateName); loginContainer.appendChild(credentialsTemplate); - var oauthLoginButton = document.getElementById('login-gitlab-button'); + var oauthLoginButton = document.getElementById(buttonId); oauthLoginButton.onclick = function () { var token = guid(32); From 02bc00580c2959c15776b25b40cbbc2f3422c250 Mon Sep 17 00:00:00 2001 From: MiksIr Date: Wed, 10 Jun 2020 13:16:04 +0300 Subject: [PATCH 006/398] Removing unnecessary persists --- src/auth/auth_gitlab.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 76064350..bf5ea48c 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -161,7 +161,7 @@ def validate_user(self, user, request_handler): return False access_token = access_token.decode("utf-8") - self.clean_and_persist_sessions() + self.validate_sessions() if self.user_states.get(user) is None: LOGGER.debug("User %s not found in state" % user) @@ -195,20 +195,24 @@ def logout(self, user, request_handler): def clean_sessions(self): now = time.time() + changed = False for user_data in list(self.user_states.keys()): if self.session_expire and (self.user_states[user_data]['visit'] + self.session_expire) < now: LOGGER.info("User %s removed because session expired" % user_data) del self.user_states[user_data] + changed = True continue if self.user_states[user_data]['state'] is None or self.user_states[user_data]['state'] != "active": LOGGER.info("User %s removed because state '%s' != 'active'" % (user_data, self.user_states[user_data]['state'])) del self.user_states[user_data] + changed = True continue + return changed - def clean_and_persist_sessions(self): - self.clean_sessions() - if self.gitlab_dump: + def validate_sessions(self, force_persist=False): + changed = self.clean_sessions() + if self.gitlab_dump and (changed or force_persist): self.persist_session() def persist_session(self): @@ -236,7 +240,7 @@ def update_groups(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_and_persist_sessions() + self.validate_sessions(force_persist=True) @gen.coroutine def update_user(self, user, access_token): @@ -251,7 +255,7 @@ def update_user(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_and_persist_sessions() + self.validate_sessions(force_persist=True) return @gen.coroutine @@ -311,7 +315,7 @@ def read_user(self, code, request_handler): user_response['updating'] = False oauth_access_token = user_response.pop('access_token') self.user_states[user_response['email']] = user_response - self.clean_and_persist_sessions() + self.validate_sessions(force_persist=True) request_handler.set_secure_cookie('token', oauth_access_token) return user_response['email'] From 1c1a2e6a516454a99b9f137d517fd3938383d178 Mon Sep 17 00:00:00 2001 From: Miksir Date: Wed, 27 May 2020 19:17:22 +0300 Subject: [PATCH 007/398] Gitlab OAuth authentication --- src/auth/auth_base.py | 3 + src/auth/auth_gitlab.py | 276 +++++++++++++++++++++++++ src/auth/tornado_auth.py | 4 +- src/model/server_conf.py | 3 + web-src/public/login.html | 7 + web-src/src/assets/css/index.css | 37 ++++ web-src/src/assets/gitlab-icon-rgb.png | Bin 0 -> 1182 bytes web-src/src/login/login.js | 32 +++ 8 files changed, 361 insertions(+), 1 deletion(-) create mode 100644 src/auth/auth_gitlab.py create mode 100644 web-src/src/assets/gitlab-icon-rgb.png diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index a5ddefb1..e5e2553f 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -16,6 +16,9 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] + def is_active(self, user): + return True + class AuthRejectedError(Exception): """Credentials, provided by user, were rejected by the authentication mechanism (user is unknown to the server)""" diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py new file mode 100644 index 00000000..939fa2b0 --- /dev/null +++ b/src/auth/auth_gitlab.py @@ -0,0 +1,276 @@ +import json +import logging +import os +import time +import urllib.parse as urllib_parse + +import tornado.auth +import tornado.ioloop +from tornado.auth import OAuth2Mixin +from tornado import gen, httpclient, escape + +from auth import auth_base +from auth.auth_base import AuthFailureError, AuthBadRequestException +from model import model_helper + +from typing import List, Any, Dict, cast, Iterable, Union, Optional + +LOGGER = logging.getLogger('script_server.GitlabAuthorizer') + + +class GitlabOAuth2Mixin(OAuth2Mixin): + _OAUTH_AUTHORIZE_URL = '%s/oauth/authorize' + _OAUTH_ACCESS_TOKEN_URL = '%s/oauth/token' + _OAUTH_GITLAB_USERINFO = '%s/api/v4/user' + _OAUTH_GITLAB_GROUPS = '%s/api/v4/groups' + _GITLAB_PREFIX = 'https://gitlab.com' + + async def oauth2_request(self, url: str, access_token: str = None, post_args: Dict[str, Any] = None, + **args: Any) -> Any: + try: + return await super().oauth2_request(url, access_token, post_args, **args) + except tornado.httpclient.HTTPClientError as e: + LOGGER.error("HTTP error " + str(e.message)) + return None + + async def get_authenticated_user( + self, + redirect_uri: str, + client_id: str, + client_secret: str, + code: str, + extra_fields: Dict[str, Any] = None, + ) -> Optional[Dict[str, Any]]: + http = self.get_auth_http_client() + args = { + "redirect_uri": redirect_uri, + "code": code, + "client_id": client_id, + "client_secret": client_secret, + "grant_type": "authorization_code", + } + + fields = {"id", "username", "name", "email", "state"} + if extra_fields: + fields.update(extra_fields) + + body = urllib_parse.urlencode(args) + http_client = httpclient.AsyncHTTPClient() + response = await http_client.fetch( + self._OAUTH_ACCESS_TOKEN_URL % self._GITLAB_PREFIX, + method='POST', + headers={'Content-Type': 'application/x-www-form-urlencoded'}, + body=body, + raise_error=False) + + response_values = {} + if response.body: + response_values = escape.json_decode(response.body) + + if response.error: + if response_values.get('error_description'): + error_text = response_values.get('error_description') + elif response_values.get('error'): + error_text = response_values.get('error') + else: + error_text = str(response.error) + + error_message = 'Failed to load access_token: ' + error_text + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + access_token = response_values.get('access_token') + + if not access_token: + message = 'No access token in response: ' + str(response.body) + LOGGER.error(message) + raise AuthFailureError(message) + + user = await self.oauth2_request( + self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, + access_token) + + if user is None: + error_message = 'Failed to load user info' + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + fieldmap = response_values + for field in fields: + fieldmap[field] = user.get(field) + + return fieldmap + + +# noinspection PyProtectedMember +class GitlabOAuthAuthenticator(auth_base.Authenticator, GitlabOAuth2Mixin): + def __init__(self, params_dict): + super().__init__() + + LOGGER.debug("Init gitlab oauth provider with " + str(params_dict)) + + self.client_id = model_helper.read_obligatory(params_dict, 'client_id', ' for Gitlab OAuth') + + secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for Gitlab OAuth') + self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) + + gitlabPrefix = params_dict.get('url') + if not model_helper.is_empty(gitlabPrefix): + self._GITLAB_PREFIX = gitlabPrefix + + self.states = {} + self.user_states = {} + self.gitlab_update = params_dict.get('ttl', 60) + self.gitlab_dump = params_dict.get('dump') + self.session_expire = int(params_dict.get('session_expire_min', 0)) * 60 + + if self.gitlab_dump and os.path.exists(self.gitlab_dump): + dumpFile = open(self.gitlab_dump, "r") + stateStr = dumpFile.read() + self.user_states = escape.json_decode(stateStr) + dumpFile.close() + LOGGER.info("Readed state from file %s: " % self.gitlab_dump + stateStr) + + self.gitlab_group_search = params_dict.get('group_search') + + self._client_visible_config['client_id'] = self.client_id + self._client_visible_config['oauth_url'] = self._OAUTH_AUTHORIZE_URL % self._GITLAB_PREFIX + self._client_visible_config['oauth_scope'] = 'api' + + def authenticate(self, request_handler): + code = request_handler.get_argument('code', False) + + if not code: + LOGGER.error('Code is not specified') + raise AuthBadRequestException('Missing authorization information. Please contact your administrator') + + return self.validate_user(code, request_handler) + + def is_active(self, user): + if self.user_states.get(user) is None: + LOGGER.info("User %s not found in state" % user) + return False + if self.user_states[user]['groups'] is None: + LOGGER.info("User %s state without groups" % user) + return False + now = time.time() + if self.session_expire and (self.user_states[user]['visit'] + self.session_expire) < now: + del self.user_states[user] + LOGGER.info("User %s session expired, logged out" % user) + self.dump_sessions_to_file() + return False + self.user_states[user]['visit'] = now + return True + + def get_groups(self, user, known_groups=None): + if self.user_states.get(user) is None: + return [] + now = time.time() + if (self.user_states[user]['updated'] + self.gitlab_update) < now and not self.user_states[user]['updating']: + self.user_states[user]['updating'] = True + tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + return self.user_states[user]['groups'] + + def clean_expired_sessions(self): + now = time.time() + if self.session_expire: + for userData in list(self.user_states.keys()): + if (self.user_states[userData]['visit'] + self.session_expire) < now: + LOGGER.debug("User %s session expired and removed" % userData) + del self.user_states[userData] + + def dump_sessions_to_file(self): + if self.gitlab_dump: + dumpFile = open(self.gitlab_dump, "w") + dumpFile.write(escape.json_encode(self.user_states)) + dumpFile.close() + LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) + + @gen.coroutine + def update_state(self, user): + group_list = yield self.read_groups(self.user_states[user]['access_token']) + if group_list is None: + LOGGER.error("Failed to refresh groups for %s" % user) + else: + LOGGER.info("Groups for %s refreshed: " % user + str(group_list)) + now = time.time() + self.user_states[user]['groups'] = group_list + self.user_states[user]['updating'] = False + self.user_states[user]['updated'] = now + self.user_states[user]['visit'] = now + self.clean_expired_sessions() + self.dump_sessions_to_file() + return + + @gen.coroutine + def read_groups(self, access_token): + args = { + 'access_token': access_token, + 'all_available': 'false', + 'per_page': 100, + } + if not self.gitlab_group_search is None: + args['search'] = self.gitlab_group_search + + group_list_future = self.oauth2_request( + self._OAUTH_GITLAB_GROUPS % self._GITLAB_PREFIX, + **args + ) + + group_list = yield group_list_future + + if group_list is None: + return None + + groups = [] + for group in group_list: + if group.get('full_path'): + groups.append(group['full_path']) + + return groups + + @gen.coroutine + def validate_user(self, code, request_handler): + user_response_future = self.get_authenticated_user( + get_path_for_redirect(request_handler), + self.client_id, + self.secret, + code + ) + user_response = yield user_response_future + + if user_response.get('email') is None: + error_message = 'No email field in user response. The response: ' + str(user_response) + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + user_groups = yield self.read_groups(user_response.get('access_token')) + if user_groups is None: + error_message = 'Cant read user groups' + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + LOGGER.info("User %s group list: " % user_response['email'] + str(user_groups)) + user_response['groups'] = user_groups + user_response['updated'] = time.time() + user_response['visit'] = time.time() + user_response['updating'] = False + self.user_states[user_response['email']] = user_response + self.clean_expired_sessions() + self.dump_sessions_to_file() + + return user_response['email'] + + +def get_path_for_redirect(request_handler): + referer = request_handler.request.headers.get('Referer') + if not referer: + LOGGER.error('No referer') + raise AuthFailureError('Missing request header. Please contact system administrator') + + parse_result = urllib_parse.urlparse(referer) + protocol = parse_result[0] + host = parse_result[1] + path = parse_result[2] + + return urllib_parse.urlunparse((protocol, host, path, '', '', '')) diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 2bca3d7e..5a695477 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -23,8 +23,10 @@ def is_authenticated(self, request_handler): return True username = self._get_current_user(request_handler) + if not username: + return False - return bool(username) + return self.authenticator.is_active(username) @staticmethod def _get_current_user(request_handler): diff --git a/src/model/server_conf.py b/src/model/server_conf.py index 71d26ca5..f09064e4 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -150,6 +150,9 @@ def create_authenticator(auth_object, temp_folder): elif auth_type == 'google_oauth': from auth.auth_google_oauth import GoogleOauthAuthenticator authenticator = GoogleOauthAuthenticator(auth_object) + elif auth_type == 'gitlab': + from auth.auth_gitlab import GitlabOAuthAuthenticator + authenticator = GitlabOAuthAuthenticator(auth_object) elif auth_type == 'htpasswd': from auth.auth_htpasswd import HtpasswdAuthenticator authenticator = HtpasswdAuthenticator(auth_object) diff --git a/web-src/public/login.html b/web-src/public/login.html index 04904bff..1c5df7dd 100644 --- a/web-src/public/login.html +++ b/web-src/public/login.html @@ -43,4 +43,11 @@ + + \ No newline at end of file diff --git a/web-src/src/assets/css/index.css b/web-src/src/assets/css/index.css index da2b1fc9..4d278d3e 100644 --- a/web-src/src/assets/css/index.css +++ b/web-src/src/assets/css/index.css @@ -156,3 +156,40 @@ input[type=checkbox]:not(.browser-default) + span { #login-google_oauth-button[disabled] { color: #B0B0B0; } + + +#login-panel .login-gitlab .login-info-label { + margin-top: 16px; +} + +#login-gitlab-button { + height: 40px; + width: 188px; + padding-left: 34px; + margin: auto; + margin-top: 34px; + display: block; + + font-size: 14px; + font-weight: 500; + color: #757575; + + border-radius: 2px; + box-shadow: 0 1px 3px -1px #202020; + border: none; + + background-image: url('../gitlab-icon-rgb.png'); + background-color: white; + background-position-y: 50%; + background-position-x: -4px; + background-size: 48px; + background-repeat: no-repeat; +} + +#login-gitlab-button:active { + background-color: #EEE; +} + +#login-gitlab-button[disabled] { + color: #B0B0B0; +} diff --git a/web-src/src/assets/gitlab-icon-rgb.png b/web-src/src/assets/gitlab-icon-rgb.png new file mode 100644 index 0000000000000000000000000000000000000000..21a02db58782252d22ed6d2c913b4cdb3d32a76d GIT binary patch literal 1182 zcmeAS@N?(olHy`uVBq!ia0vp^2_VeD1|%QND7OGoY)RhkE(}p1m{?jr6DY!2;1O92 z)Nvhz8J#p{R{#asOFVsD*`KgViE6PNS+_`(fq_Na)5S5Q;?~<6r}L#(r#-X&J$oyo5-#wI{l%He z;?yPYZ{2^J4y0zCn`53`I&V`L`?fu1|L@(}^e5?Q(5D|0#qB5C?3*`rzW)02`%g2h zdSdm+HN!#j*^P<6Bl_=M;ySc-LK?%>Df3N~zKQK(I8bO%^n@X&kd>Mr5*gg z@-*Dvobwm`dXj-JQT&MU(Ryw6PS`IR+AZDecmU$`+gxLAX;Nl_}Pdhi>%@_B7)1}Ojx(7$_YQ8wZFz7 zRJ)k(Y7oQ57TtgC{&QWJayytTq+?_jbJk|qUu-jK{if5v(ZF)+ zYs#u-#vlBVx&CkTs*D?i8Jl?-KQ+8RV#MO1yx`%I>&wO47C+HtdvJe=VC~8$hcX#D z|Gv-8(Ts1CYCh%9y|7` z{a-1Nf3IHr$ls+eTfW~+T0O(mY@5~5BWG&s1g#r>JDzvn<+_jWWv@T;56*`2`Uuv9 zm)9>{-nQ3UF{kR_5k{N9Eqg__oLJ@Gv$6e;=AuPKCf^FKLj$e!!{2I8k?&RuxhzCL*KXBXK5_2Bs{!|sR(9gt~oJ#b4P;#Y3<|1D~t|ahe1G0`GNfJl>U||8x#Rr@KCNNQSg`dr>pYg0=E|Lke4X3(&0js;@@go9tLjVb zwS}htnN>^@__B0c_ox?LKHs@J;H$~>T|5j7YrY4sUt#}oFVdQ&MBb@09(5onE(I) literal 0 HcmV?d00001 diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index 20cd8c65..0d30118d 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -30,6 +30,8 @@ function onLoad() { var config = JSON.parse(configResponse); if (config['type'] === 'google_oauth') { setupGoogleOAuth(loginContainer, config); + } else if (config['type'] === 'gitlab') { + setupGitlabOAuth(loginContainer, config); } else { setupCredentials(loginContainer); } @@ -84,6 +86,36 @@ function setupGoogleOAuth(loginContainer, authConfig) { processCurrentOauthState(); } +function setupGitlabOAuth(loginContainer, authConfig) { + var credentialsTemplate = createTemplateElement('login-gitlab-template'); + loginContainer.appendChild(credentialsTemplate); + + var oauthLoginButton = document.getElementById('login-gitlab-button'); + oauthLoginButton.onclick = function () { + var token = guid(32); + + var localState = { + 'token': token, + 'urlFragment': window.location.hash + }; + localState[NEXT_URL_KEY] = getQueryParameter(NEXT_URL_KEY); + + saveState(localState); + + const queryArgs = { + 'redirect_uri': getUnparameterizedUrl(), + 'state': token, + 'client_id': authConfig['client_id'], + 'scope': authConfig['oauth_scope'], + 'response_type': OAUTH_RESPONSE_KEY + }; + const query = toQueryArgs(queryArgs); + window.location = authConfig['oauth_url'] + '?' + query; + }; + + processCurrentOauthState(); +} + function processCurrentOauthState() { var oauthState = restoreState(); From 662e2527a1d4243526c8f2498e4ff610a2d016c4 Mon Sep 17 00:00:00 2001 From: MiksIr Date: Thu, 4 Jun 2020 16:41:52 +0300 Subject: [PATCH 008/398] Gitlab Oauth tests --- src/auth/auth_gitlab.py | 9 +- src/tests/auth/test_auth_gitlab.py | 130 +++++++++++++++++++++++++++++ src/tests/server_conf_test.py | 27 ++++++ 3 files changed, 163 insertions(+), 3 deletions(-) create mode 100644 src/tests/auth/test_auth_gitlab.py diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 939fa2b0..12e44c48 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -129,7 +129,7 @@ def __init__(self, params_dict): stateStr = dumpFile.read() self.user_states = escape.json_decode(stateStr) dumpFile.close() - LOGGER.info("Readed state from file %s: " % self.gitlab_dump + stateStr) + LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) self.gitlab_group_search = params_dict.get('group_search') @@ -167,8 +167,7 @@ def get_groups(self, user, known_groups=None): return [] now = time.time() if (self.user_states[user]['updated'] + self.gitlab_update) < now and not self.user_states[user]['updating']: - self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + self.do_update_groups(user) return self.user_states[user]['groups'] def clean_expired_sessions(self): @@ -186,6 +185,10 @@ def dump_sessions_to_file(self): dumpFile.close() LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) + def do_update_groups(self, user): + self.user_states[user]['updating'] = True + tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + @gen.coroutine def update_state(self, user): group_list = yield self.read_groups(self.user_states[user]['access_token']) diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py new file mode 100644 index 00000000..fe59c5b9 --- /dev/null +++ b/src/tests/auth/test_auth_gitlab.py @@ -0,0 +1,130 @@ +import json +import os +import tempfile +import time +import unittest + +from tornado import escape + +from auth.auth_gitlab import GitlabOAuthAuthenticator +from model import server_conf +from tests import test_utils +from utils import file_utils +from unittest import TestCase +from unittest.mock import patch, Mock + +if __name__ == '__main__': + unittest.main() + +mock_time = Mock() +mock_time.return_value = 10000.01 +mock_dump_sessions_to_file = Mock() +mock_do_update_groups = Mock() + + +class TestAuthConfig(TestCase): + @patch('time.time', mock_time) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.dump_sessions_to_file', mock_dump_sessions_to_file) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) + def test_gitlab_oauth(self): + tmp = tempfile.mkstemp('.json', 'test_auth_gitlab-') + now = time.time() + state = { + "user@test.com": { + "groups": ["testgroup"], + "updating": False, + "updated": now-10, + "visit": now-10, + "id": 1, + "username": "test", + "name": "John", + "email": "user@test.com", + "state": "active" + }, + "nogroups@test.com": { + "groups": None, + "updating": False, + "updated": now-10, + "visit": now-10, + "id": 2, + "username": "nogroups", + "name": "John", + "email": "nogroups@test.com", + "state": "active" + } + } + + os.write(tmp[0], str.encode(escape.json_encode(state))) + os.fsync(tmp[0]) + + config = _from_json({ + 'auth': { + "type": "gitlab", + "url": "https://gitlab", + "client_id": "1234", + "secret": "abcd", + "group_search": "script-server", + "ttl": 80, + "dump": tmp[1], + "session_expire_min": 1 + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEqual('1234', config.authenticator.client_id) + self.assertEqual('abcd', config.authenticator.secret) + self.assertEqual('https://gitlab', config.authenticator._GITLAB_PREFIX) + self.assertEqual('script-server', config.authenticator.gitlab_group_search) + self.assertEqual(80, config.authenticator.gitlab_update) + self.assertEqual(tmp[1], config.authenticator.gitlab_dump) + self.assertEqual(60, config.authenticator.session_expire) + self.assertDictEqual(state, config.authenticator.user_states) + self.assertEqual(False, config.authenticator.is_active("unknown@test.com")) + self.assertEqual(False, config.authenticator.is_active("nogroups@test.com")) + self.assertEqual(True, config.authenticator.is_active("user@test.com")) + self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"]) + + # session expire test + saved_state = config.authenticator.user_states["user@test.com"].copy() + config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 + self.assertEqual(False, config.authenticator.is_active("user@test.com")) + self.assertEqual(True, mock_dump_sessions_to_file.called) + mock_dump_sessions_to_file.reset_mock() + self.assertIsNone(config.authenticator.user_states.get("user@test.com")) + config.authenticator.user_states["user@test.com"] = saved_state + + self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) + + # do not update because new + self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) + self.assertEqual(False, mock_do_update_groups.called) + mock_do_update_groups.reset_mock() + # update because old + config.authenticator.user_states["user@test.com"]["updated"] = time.time() - 81 + self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) + mock_do_update_groups.assert_called_with("user@test.com") + mock_do_update_groups.reset_mock() + # do not update because already updating + config.authenticator.user_states["user@test.com"]["updating"] = True + self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) + self.assertEqual(False, mock_do_update_groups.called) + config.authenticator.user_states["user@test.com"]["updating"] = False + + # test clean expire + saved_state = config.authenticator.user_states["user@test.com"].copy() + config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 + config.authenticator.clean_expired_sessions() + self.assertIsNone(config.authenticator.user_states.get("user@test.com")) + config.authenticator.user_states["user@test.com"] = saved_state + + os.close(tmp[0]) + os.unlink(tmp[1]) + + +def _from_json(content): + json_obj = json.dumps(content) + conf_path = os.path.join(test_utils.temp_folder, 'conf.json') + file_utils.write_file(conf_path, json_obj) + return server_conf.from_json(conf_path, test_utils.temp_folder) \ No newline at end of file diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index 566b68a2..9ea7a0db 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -2,6 +2,7 @@ import os import unittest +from auth.auth_gitlab import GitlabOAuthAuthenticator from auth.auth_google_oauth import GoogleOauthAuthenticator from auth.auth_htpasswd import HtpasswdAuthenticator from auth.auth_ldap import LdapAuthenticator @@ -203,6 +204,32 @@ def test_google_oauth_without_allowed_users(self): 'client_id': '1234', 'secret': 'abcd'}}) + def test_gitlab_oauth(self): + config = _from_json({ + 'auth': { + "type": "gitlab", + "url": "https://gitlab", + "client_id": "1234", + "secret": "abcd", + "group_search": "script-server", + "ttl": 60, + "dump": "/tmp/dump.json", + "session_expire_min": 60 + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEquals('1234', config.authenticator.client_id) + self.assertEquals('abcd', config.authenticator.secret) + self.assertEquals('https://gitlab', config.authenticator._GITLAB_PREFIX) + self.assertEquals('script-server', config.authenticator.gitlab_group_search) + self.assertEquals(60, config.authenticator.gitlab_update) + self.assertEquals("/tmp/dump.json", config.authenticator.gitlab_dump) + self.assertEquals(60*60, config.authenticator.session_expire) + + def test_ldap(self): config = _from_json({'auth': {'type': 'ldap', 'url': 'http://test-ldap.net', From b4623f1e5fbb3c8f728679ee56e61b296b47d588 Mon Sep 17 00:00:00 2001 From: MiksIr Date: Thu, 4 Jun 2020 23:39:21 +0300 Subject: [PATCH 009/398] + Store Gitlab keys to cookes so dump file now free from private information + group_support on/off, is off - read_user scope used --- src/auth/auth_base.py | 5 +- src/auth/auth_gitlab.py | 120 +++++++++++++++++------ src/auth/tornado_auth.py | 8 +- src/tests/auth/test_auth_gitlab.py | 148 +++++++++++++++++++++-------- src/tests/server_conf_test.py | 26 ++++- src/web/server.py | 18 ++-- 6 files changed, 246 insertions(+), 79 deletions(-) diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index e5e2553f..64a2e2ad 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -16,9 +16,12 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] - def is_active(self, user): + def is_active(self, user, request_handler): return True + def logout(self, user, request_handler): + return None + class AuthRejectedError(Exception): """Credentials, provided by user, were rejected by the authentication mechanism (user is unknown to the server)""" diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 12e44c48..7d00899d 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -39,7 +39,6 @@ async def get_authenticated_user( client_id: str, client_secret: str, code: str, - extra_fields: Dict[str, Any] = None, ) -> Optional[Dict[str, Any]]: http = self.get_auth_http_client() args = { @@ -50,10 +49,6 @@ async def get_authenticated_user( "grant_type": "authorization_code", } - fields = {"id", "username", "name", "email", "state"} - if extra_fields: - fields.update(extra_fields) - body = urllib_parse.urlencode(args) http_client = httpclient.AsyncHTTPClient() response = await http_client.fetch( @@ -86,22 +81,28 @@ async def get_authenticated_user( LOGGER.error(message) raise AuthFailureError(message) - user = await self.oauth2_request( - self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, - access_token) + user = await self.fetch_user(access_token) if user is None: error_message = 'Failed to load user info' LOGGER.error(error_message) raise AuthFailureError(error_message) - fieldmap = response_values - for field in fields: + return {**response_values, **user} + + async def fetch_user(self, access_token): + user = await self.oauth2_request( + self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, + access_token) + if user is None: + return None + + fieldmap = {} + for field in {"id", "username", "name", "email", "state"}: fieldmap[field] = user.get(field) return fieldmap - # noinspection PyProtectedMember class GitlabOAuthAuthenticator(auth_base.Authenticator, GitlabOAuth2Mixin): def __init__(self, params_dict): @@ -120,22 +121,29 @@ def __init__(self, params_dict): self.states = {} self.user_states = {} - self.gitlab_update = params_dict.get('ttl', 60) + self.gitlab_update = params_dict.get('ttl') self.gitlab_dump = params_dict.get('dump') + self.gitlab_group_support = params_dict.get('group_support', True) self.session_expire = int(params_dict.get('session_expire_min', 0)) * 60 + now = time.time() if self.gitlab_dump and os.path.exists(self.gitlab_dump): dumpFile = open(self.gitlab_dump, "r") stateStr = dumpFile.read() self.user_states = escape.json_decode(stateStr) dumpFile.close() + for userData in list(self.user_states.keys()): + # force to update user from gitlab + self.user_states[userData]['updating'] = False + if self.gitlab_update: + self.user_states[userData]['updated'] = now - self.gitlab_update - 1 LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) self.gitlab_group_search = params_dict.get('group_search') self._client_visible_config['client_id'] = self.client_id self._client_visible_config['oauth_url'] = self._OAUTH_AUTHORIZE_URL % self._GITLAB_PREFIX - self._client_visible_config['oauth_scope'] = 'api' + self._client_visible_config['oauth_scope'] = 'api' if self.gitlab_group_support else 'read_user' def authenticate(self, request_handler): code = request_handler.get_argument('code', False) @@ -146,30 +154,55 @@ def authenticate(self, request_handler): return self.validate_user(code, request_handler) - def is_active(self, user): + def is_active(self, user, request_handler): + access_token = request_handler.get_secure_cookie('token') + if access_token is None: + return False + access_token = access_token.decode("utf-8") + if self.user_states.get(user) is None: - LOGGER.info("User %s not found in state" % user) + LOGGER.debug("User %s not found in state" % user) return False - if self.user_states[user]['groups'] is None: - LOGGER.info("User %s state without groups" % user) + + if self.user_states[user]['state'] is None or self.user_states[user]['state'] != "active": + LOGGER.info("User %s state inactive: " % user + str(self.user_states[user])) + del self.user_states[user] + self.dump_sessions_to_file() return False + now = time.time() + # check session ttl if self.session_expire and (self.user_states[user]['visit'] + self.session_expire) < now: del self.user_states[user] LOGGER.info("User %s session expired, logged out" % user) self.dump_sessions_to_file() return False + self.user_states[user]['visit'] = now + + # check gitlab response ttl, also check for stale updating (ttl*2) + if self.gitlab_update is not None: + stale = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now + ttl_expired = (self.user_states[user]['updated'] + self.gitlab_update) < now + updating_now = self.user_states[user]['updating'] is True + if ttl_expired and (not updating_now or stale): + if self.gitlab_group_support: + self.do_update_groups(user, access_token) + else: + self.do_update_user(user, access_token) + return True def get_groups(self, user, known_groups=None): if self.user_states.get(user) is None: return [] - now = time.time() - if (self.user_states[user]['updated'] + self.gitlab_update) < now and not self.user_states[user]['updating']: - self.do_update_groups(user) + if self.user_states[user]['groups'] is None: + return [] return self.user_states[user]['groups'] + def logout(self, user, request_handler): + request_handler.clear_cookie('token') + def clean_expired_sessions(self): now = time.time() if self.session_expire: @@ -185,19 +218,41 @@ def dump_sessions_to_file(self): dumpFile.close() LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) - def do_update_groups(self, user): + def do_update_user(self, user, access_token): + self.user_states[user]['updating'] = True + tornado.ioloop.IOLoop.current().spawn_callback(self.update_user_state, user, access_token) + + def do_update_groups(self, user, access_token): self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_state, user) + tornado.ioloop.IOLoop.current().spawn_callback(self.update_group_list, user, access_token) @gen.coroutine - def update_state(self, user): - group_list = yield self.read_groups(self.user_states[user]['access_token']) + def update_group_list(self, user, access_token): + group_list = yield self.read_groups(access_token) if group_list is None: LOGGER.error("Failed to refresh groups for %s" % user) + self.user_states[user]['state'] = "error" else: LOGGER.info("Groups for %s refreshed: " % user + str(group_list)) + self.user_states[user]['groups'] = group_list + now = time.time() + self.user_states[user]['updating'] = False + self.user_states[user]['updated'] = now + self.user_states[user]['visit'] = now + self.clean_expired_sessions() + self.dump_sessions_to_file() + return + + @gen.coroutine + def update_user_state(self, user, access_token): + user_state = yield self.fetch_user(access_token) + if user_state is None: + LOGGER.error("Failed to fetch user %s" % user) + self.user_states[user]['state'] = "error" + else: + LOGGER.info("User %s refreshed: " % user + str(user_state)) + self.user_states[user] = {**self.user_states[user], **user_state} now = time.time() - self.user_states[user]['groups'] = group_list self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now @@ -247,20 +302,25 @@ def validate_user(self, code, request_handler): LOGGER.error(error_message) raise AuthFailureError(error_message) - user_groups = yield self.read_groups(user_response.get('access_token')) - if user_groups is None: - error_message = 'Cant read user groups' - LOGGER.error(error_message) - raise AuthFailureError(error_message) + user_groups = [] + if self.gitlab_group_support: + user_groups = yield self.read_groups(user_response.get('access_token')) + if user_groups is None: + error_message = 'Cant read user groups' + LOGGER.error(error_message) + raise AuthFailureError(error_message) LOGGER.info("User %s group list: " % user_response['email'] + str(user_groups)) user_response['groups'] = user_groups user_response['updated'] = time.time() user_response['visit'] = time.time() user_response['updating'] = False + oauth_access_token = user_response.pop('access_token') + oauth_refresh_token = user_response.pop('refresh_token') # not used atm self.user_states[user_response['email']] = user_response self.clean_expired_sessions() self.dump_sessions_to_file() + request_handler.set_secure_cookie('token', oauth_access_token) return user_response['email'] diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 5a695477..b7d9e0ad 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -26,7 +26,11 @@ def is_authenticated(self, request_handler): if not username: return False - return self.authenticator.is_active(username) + active = self.authenticator.is_active(username, request_handler) + if not active: + self.logout(request_handler) + + return active @staticmethod def _get_current_user(request_handler): @@ -100,3 +104,5 @@ def logout(self, request_handler): LOGGER.info('Logging out ' + username) request_handler.clear_cookie('username') + + self.authenticator.logout(username, request_handler) diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index fe59c5b9..530ae841 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -1,3 +1,4 @@ +import copy import json import os import tempfile @@ -20,6 +21,8 @@ mock_time.return_value = 10000.01 mock_dump_sessions_to_file = Mock() mock_do_update_groups = Mock() +mock_do_update_user = Mock() +mock_request_handler = Mock(**{'get_secure_cookie.return_value': "12345".encode()}) class TestAuthConfig(TestCase): @@ -43,14 +46,14 @@ def test_gitlab_oauth(self): }, "nogroups@test.com": { "groups": None, - "updating": False, + "updating": True, "updated": now-10, "visit": now-10, "id": 2, "username": "nogroups", "name": "John", "email": "nogroups@test.com", - "state": "active" + "state": "blocked" } } @@ -66,62 +69,131 @@ def test_gitlab_oauth(self): "group_search": "script-server", "ttl": 80, "dump": tmp[1], - "session_expire_min": 1 + "session_expire_min": 10 }, 'access': { 'allowed_users': [] }}) self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEqual('1234', config.authenticator.client_id) - self.assertEqual('abcd', config.authenticator.secret) - self.assertEqual('https://gitlab', config.authenticator._GITLAB_PREFIX) - self.assertEqual('script-server', config.authenticator.gitlab_group_search) - self.assertEqual(80, config.authenticator.gitlab_update) self.assertEqual(tmp[1], config.authenticator.gitlab_dump) - self.assertEqual(60, config.authenticator.session_expire) - self.assertDictEqual(state, config.authenticator.user_states) - self.assertEqual(False, config.authenticator.is_active("unknown@test.com")) - self.assertEqual(False, config.authenticator.is_active("nogroups@test.com")) - self.assertEqual(True, config.authenticator.is_active("user@test.com")) - self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"]) + self.assertEqual("1234", config.authenticator._client_visible_config['client_id']) + self.assertEqual("https://gitlab/oauth/authorize", config.authenticator._client_visible_config['oauth_url']) + self.assertEqual("api", config.authenticator._client_visible_config['oauth_scope']) + + assert_state = state.copy() + for key in list(assert_state.keys()): + assert_state[key]['updating'] = False + assert_state[key]['updated'] = 10000.01 - 80 - 1 + self.assertDictEqual(assert_state, config.authenticator.user_states) + saved_state = copy.deepcopy(config.authenticator.user_states) + + self.assertEqual(False, config.authenticator.is_active("unknown@test.com", mock_request_handler)) + self.assertEqual(False, config.authenticator.is_active("nogroups@test.com", mock_request_handler)) + self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) + self.assertListEqual([], config.authenticator.get_groups("nogroups@test.com")) - # session expire test - saved_state = config.authenticator.user_states["user@test.com"].copy() - config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 - self.assertEqual(False, config.authenticator.is_active("user@test.com")) - self.assertEqual(True, mock_dump_sessions_to_file.called) - mock_dump_sessions_to_file.reset_mock() - self.assertIsNone(config.authenticator.user_states.get("user@test.com")) - config.authenticator.user_states["user@test.com"] = saved_state + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"], "visit updated") + self.assertEqual(True, mock_do_update_groups.called, "state just loaded, gitlab updating") + mock_do_update_groups.reset_mock() - self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) + config.authenticator.user_states["user@test.com"]["updating"] = True + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "do not call parallel updated") + mock_do_update_groups.reset_mock() - # do not update because new - self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) - self.assertEqual(False, mock_do_update_groups.called) + mock_time.return_value = 10000.01 + 80*2 + 1 # stale request + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, mock_do_update_groups.called, "parallel but stale") mock_do_update_groups.reset_mock() - # update because old - config.authenticator.user_states["user@test.com"]["updated"] = time.time() - 81 - self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) - mock_do_update_groups.assert_called_with("user@test.com") + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 + + config.authenticator.user_states["user@test.com"]['updated'] = now # gitlab info updated + config.authenticator.user_states["user@test.com"]['updating'] = False + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "do not update gitlab because ttl not expired") mock_do_update_groups.reset_mock() - # do not update because already updating - config.authenticator.user_states["user@test.com"]["updating"] = True - self.assertListEqual(["testgroup"], config.authenticator.get_groups("user@test.com")) - self.assertEqual(False, mock_do_update_groups.called) - config.authenticator.user_states["user@test.com"]["updating"] = False + + mock_time.return_value = 10000.01 + 81 + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, mock_do_update_groups.called, "ttl expired") + mock_do_update_groups.reset_mock() + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 + + # session expire test + mock_time.return_value = 10000.01 + 601 + self.assertEqual(False, config.authenticator.is_active("user@test.com", mock_request_handler), "shoud be expired") + self.assertEqual(True, mock_dump_sessions_to_file.called, "dump state to file") + mock_dump_sessions_to_file.reset_mock() + self.assertIsNone(config.authenticator.user_states.get("user@test.com"), "removed from state") + self.assertListEqual([], config.authenticator.get_groups("user@test.com")) + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 # test clean expire - saved_state = config.authenticator.user_states["user@test.com"].copy() - config.authenticator.user_states["user@test.com"]["visit"] = time.time() - 61 + mock_time.return_value = 10000.01 + 601 config.authenticator.clean_expired_sessions() self.assertIsNone(config.authenticator.user_states.get("user@test.com")) - config.authenticator.user_states["user@test.com"] = saved_state + config.authenticator.user_states = copy.deepcopy(saved_state) + mock_time.return_value = 10000.01 os.close(tmp[0]) os.unlink(tmp[1]) + @patch('time.time', mock_time) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_user', mock_do_update_user) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) + def test_gitlab_oauth_user_read_scope(self): + now = time.time() + + state = { + "user@test.com": { + "groups": ["testgroup"], + "updating": False, + "updated": 0, + "visit": now-10, + "id": 1, + "username": "test", + "name": "John", + "email": "user@test.com", + "state": "active" + } + } + + config = _from_json({ + 'auth': { + "type": "gitlab", + "url": "https://gitlab", + "client_id": "1234", + "secret": "abcd", + "group_search": "script-server", + "ttl": 80, + "session_expire_min": 1, + "group_support": False + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEqual("read_user", config.authenticator._client_visible_config['oauth_scope']) + config.authenticator.user_states = state + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "update==0, gitlab updating but not groups") + self.assertEqual(True, mock_do_update_user.called, "update==0, gitlab updating only user") + mock_do_update_groups.reset_mock() + mock_do_update_user.reset_mock() + + config.authenticator.gitlab_update = None + self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(False, mock_do_update_groups.called, "gitab update disabled") + self.assertEqual(False, mock_do_update_user.called, "gitab update disabled") + mock_do_update_groups.reset_mock() + mock_do_update_user.reset_mock() + def _from_json(content): json_obj = json.dumps(content) diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index 9ea7a0db..c412df5b 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -212,9 +212,10 @@ def test_gitlab_oauth(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 60, + "ttl": 120, "dump": "/tmp/dump.json", - "session_expire_min": 60 + "session_expire_min": 60, + "group_support": False }, 'access': { 'allowed_users': [] @@ -225,10 +226,29 @@ def test_gitlab_oauth(self): self.assertEquals('abcd', config.authenticator.secret) self.assertEquals('https://gitlab', config.authenticator._GITLAB_PREFIX) self.assertEquals('script-server', config.authenticator.gitlab_group_search) - self.assertEquals(60, config.authenticator.gitlab_update) + self.assertEquals(120, config.authenticator.gitlab_update) self.assertEquals("/tmp/dump.json", config.authenticator.gitlab_dump) self.assertEquals(60*60, config.authenticator.session_expire) + self.assertEquals(False, config.authenticator.gitlab_group_support) + def test_gitlab_oauth_default(self): + config = _from_json({ + 'auth': { + "type": "gitlab", + "client_id": "1234", + "secret": "abcd", + }, + 'access': { + 'allowed_users': [] + }}) + + self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) + self.assertEquals('https://gitlab.com', config.authenticator._GITLAB_PREFIX) + self.assertIsNone(config.authenticator.gitlab_group_search) + self.assertIsNone(config.authenticator.gitlab_update) + self.assertIsNone(config.authenticator.gitlab_dump) + self.assertIsNone(config.authenticator.session_expire) + self.assertEquals(True, config.authenticator.gitlab_group_support) def test_ldap(self): config = _from_json({'auth': {'type': 'ldap', diff --git a/src/web/server.py b/src/web/server.py index 7a694e32..9870c77e 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -105,6 +105,13 @@ def wrapper(self, *args, **kwargs): auth = self.application.auth authorizer = self.application.authorizer + login_url = self.get_login_url() + request_path = self.request.path + + login_resource = is_allowed_during_login(request_path, login_url, self) + if login_resource: + return func(self, *args, **kwargs) + authenticated = auth.is_authenticated(self) access_allowed = authenticated and authorizer.is_allowed_in_app(_identify_user(self)) @@ -118,11 +125,7 @@ def wrapper(self, *args, **kwargs): else: raise tornado.web.HTTPError(code, message) - login_url = self.get_login_url() - request_path = self.request.path - - login_resource = is_allowed_during_login(request_path, login_url, self) - if (authenticated and access_allowed) or login_resource: + if authenticated and access_allowed: return func(self, *args, **kwargs) if not isinstance(self, tornado.web.StaticFileHandler): @@ -171,7 +174,10 @@ def wrapper(self, *args, **kwargs): def has_admin_rights(request_handler): - user_id = _identify_user(request_handler) + try: + user_id = _identify_user(request_handler) + except Exception: + return False return request_handler.application.authorizer.is_admin(user_id) From e1e08ce40d7f502e1a6eb190d770fd045fd8d3c3 Mon Sep 17 00:00:00 2001 From: MiksIr Date: Thu, 4 Jun 2020 23:58:44 +0300 Subject: [PATCH 010/398] Rename config keys: dump -> state_dump_file, ttl -> auth_info_ttl, session_expire_min -> session_expire_minutes --- src/auth/auth_gitlab.py | 6 +++--- src/tests/auth/test_auth_gitlab.py | 10 +++++----- src/tests/server_conf_test.py | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 7d00899d..4fc916ae 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -121,10 +121,10 @@ def __init__(self, params_dict): self.states = {} self.user_states = {} - self.gitlab_update = params_dict.get('ttl') - self.gitlab_dump = params_dict.get('dump') + self.gitlab_update = params_dict.get('auth_info_ttl') + self.gitlab_dump = params_dict.get('state_dump_file') self.gitlab_group_support = params_dict.get('group_support', True) - self.session_expire = int(params_dict.get('session_expire_min', 0)) * 60 + self.session_expire = int(params_dict.get('session_expire_minutes', 0)) * 60 now = time.time() if self.gitlab_dump and os.path.exists(self.gitlab_dump): diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index 530ae841..969591b3 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -67,9 +67,9 @@ def test_gitlab_oauth(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 80, - "dump": tmp[1], - "session_expire_min": 10 + "auth_info_ttl": 80, + "state_dump_file": tmp[1], + "session_expire_minutes": 10 }, 'access': { 'allowed_users': [] @@ -170,8 +170,8 @@ def test_gitlab_oauth_user_read_scope(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 80, - "session_expire_min": 1, + "auth_info_ttl": 80, + "session_expire_minutes": 1, "group_support": False }, 'access': { diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index c412df5b..c26a7ef9 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -212,9 +212,9 @@ def test_gitlab_oauth(self): "client_id": "1234", "secret": "abcd", "group_search": "script-server", - "ttl": 120, - "dump": "/tmp/dump.json", - "session_expire_min": 60, + "auth_info_ttl": 120, + "state_dump_file": "/tmp/dump.json", + "session_expire_minutes": 60, "group_support": False }, 'access': { From 88b2c482b8777631bd6e9c6b02b0f9626b35022f Mon Sep 17 00:00:00 2001 From: MiksIr Date: Wed, 10 Jun 2020 03:20:02 +0300 Subject: [PATCH 011/398] Refactoring of gitlab auth class --- src/auth/auth_base.py | 2 +- src/auth/auth_gitlab.py | 98 ++++++++++++++---------------- src/auth/tornado_auth.py | 2 +- src/tests/auth/test_auth_gitlab.py | 44 +++++++------- src/web/server.py | 12 ++-- web-src/src/login/login.js | 44 +++++--------- 6 files changed, 90 insertions(+), 112 deletions(-) diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index 64a2e2ad..45e3edaa 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -16,7 +16,7 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] - def is_active(self, user, request_handler): + def validate_user(self, user, request_handler): return True def logout(self, user, request_handler): diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 4fc916ae..76064350 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -15,6 +15,8 @@ from typing import List, Any, Dict, cast, Iterable, Union, Optional +from utils import file_utils + LOGGER = logging.getLogger('script_server.GitlabAuthorizer') @@ -58,6 +60,7 @@ async def get_authenticated_user( body=body, raise_error=False) + default_response_values = {"state": "unknown"} response_values = {} if response.body: response_values = escape.json_decode(response.body) @@ -88,7 +91,7 @@ async def get_authenticated_user( LOGGER.error(error_message) raise AuthFailureError(error_message) - return {**response_values, **user} + return {**default_response_values, **response_values, **user} async def fetch_user(self, access_token): user = await self.oauth2_request( @@ -115,9 +118,9 @@ def __init__(self, params_dict): secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for Gitlab OAuth') self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) - gitlabPrefix = params_dict.get('url') - if not model_helper.is_empty(gitlabPrefix): - self._GITLAB_PREFIX = gitlabPrefix + gitlab_prefix = params_dict.get('url') + if not model_helper.is_empty(gitlab_prefix): + self._GITLAB_PREFIX = gitlab_prefix self.states = {} self.user_states = {} @@ -128,15 +131,13 @@ def __init__(self, params_dict): now = time.time() if self.gitlab_dump and os.path.exists(self.gitlab_dump): - dumpFile = open(self.gitlab_dump, "r") - stateStr = dumpFile.read() - self.user_states = escape.json_decode(stateStr) - dumpFile.close() - for userData in list(self.user_states.keys()): + state_str = file_utils.read_file(self.gitlab_dump) + self.user_states = escape.json_decode(state_str) + for user_data in list(self.user_states.keys()): # force to update user from gitlab - self.user_states[userData]['updating'] = False + self.user_states[user_data]['updating'] = False if self.gitlab_update: - self.user_states[userData]['updated'] = now - self.gitlab_update - 1 + self.user_states[user_data]['updated'] = now - self.gitlab_update - 1 LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) self.gitlab_group_search = params_dict.get('group_search') @@ -152,40 +153,29 @@ def authenticate(self, request_handler): LOGGER.error('Code is not specified') raise AuthBadRequestException('Missing authorization information. Please contact your administrator') - return self.validate_user(code, request_handler) + return self.read_user(code, request_handler) - def is_active(self, user, request_handler): + def validate_user(self, user, request_handler): access_token = request_handler.get_secure_cookie('token') if access_token is None: return False access_token = access_token.decode("utf-8") + self.clean_and_persist_sessions() + if self.user_states.get(user) is None: LOGGER.debug("User %s not found in state" % user) return False - if self.user_states[user]['state'] is None or self.user_states[user]['state'] != "active": - LOGGER.info("User %s state inactive: " % user + str(self.user_states[user])) - del self.user_states[user] - self.dump_sessions_to_file() - return False - now = time.time() - # check session ttl - if self.session_expire and (self.user_states[user]['visit'] + self.session_expire) < now: - del self.user_states[user] - LOGGER.info("User %s session expired, logged out" % user) - self.dump_sessions_to_file() - return False - self.user_states[user]['visit'] = now # check gitlab response ttl, also check for stale updating (ttl*2) if self.gitlab_update is not None: - stale = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now + stale_update = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now ttl_expired = (self.user_states[user]['updated'] + self.gitlab_update) < now updating_now = self.user_states[user]['updating'] is True - if ttl_expired and (not updating_now or stale): + if ttl_expired and (not updating_now or stale_update): if self.gitlab_group_support: self.do_update_groups(user, access_token) else: @@ -203,31 +193,38 @@ def get_groups(self, user, known_groups=None): def logout(self, user, request_handler): request_handler.clear_cookie('token') - def clean_expired_sessions(self): + def clean_sessions(self): now = time.time() - if self.session_expire: - for userData in list(self.user_states.keys()): - if (self.user_states[userData]['visit'] + self.session_expire) < now: - LOGGER.debug("User %s session expired and removed" % userData) - del self.user_states[userData] - - def dump_sessions_to_file(self): + for user_data in list(self.user_states.keys()): + if self.session_expire and (self.user_states[user_data]['visit'] + self.session_expire) < now: + LOGGER.info("User %s removed because session expired" % user_data) + del self.user_states[user_data] + continue + if self.user_states[user_data]['state'] is None or self.user_states[user_data]['state'] != "active": + LOGGER.info("User %s removed because state '%s' != 'active'" % + (user_data, self.user_states[user_data]['state'])) + del self.user_states[user_data] + continue + + def clean_and_persist_sessions(self): + self.clean_sessions() if self.gitlab_dump: - dumpFile = open(self.gitlab_dump, "w") - dumpFile.write(escape.json_encode(self.user_states)) - dumpFile.close() - LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) + self.persist_session() + + def persist_session(self): + file_utils.write_file(self.gitlab_dump, escape.json_encode(self.user_states)) + LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) def do_update_user(self, user, access_token): self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_user_state, user, access_token) + tornado.ioloop.IOLoop.current().spawn_callback(self.update_user, user, access_token) def do_update_groups(self, user, access_token): self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_group_list, user, access_token) + tornado.ioloop.IOLoop.current().spawn_callback(self.update_groups, user, access_token) @gen.coroutine - def update_group_list(self, user, access_token): + def update_groups(self, user, access_token): group_list = yield self.read_groups(access_token) if group_list is None: LOGGER.error("Failed to refresh groups for %s" % user) @@ -239,12 +236,10 @@ def update_group_list(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_expired_sessions() - self.dump_sessions_to_file() - return + self.clean_and_persist_sessions() @gen.coroutine - def update_user_state(self, user, access_token): + def update_user(self, user, access_token): user_state = yield self.fetch_user(access_token) if user_state is None: LOGGER.error("Failed to fetch user %s" % user) @@ -256,8 +251,7 @@ def update_user_state(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_expired_sessions() - self.dump_sessions_to_file() + self.clean_and_persist_sessions() return @gen.coroutine @@ -288,7 +282,7 @@ def read_groups(self, access_token): return groups @gen.coroutine - def validate_user(self, code, request_handler): + def read_user(self, code, request_handler): user_response_future = self.get_authenticated_user( get_path_for_redirect(request_handler), self.client_id, @@ -316,10 +310,8 @@ def validate_user(self, code, request_handler): user_response['visit'] = time.time() user_response['updating'] = False oauth_access_token = user_response.pop('access_token') - oauth_refresh_token = user_response.pop('refresh_token') # not used atm self.user_states[user_response['email']] = user_response - self.clean_expired_sessions() - self.dump_sessions_to_file() + self.clean_and_persist_sessions() request_handler.set_secure_cookie('token', oauth_access_token) return user_response['email'] diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index b7d9e0ad..6caeff41 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -26,7 +26,7 @@ def is_authenticated(self, request_handler): if not username: return False - active = self.authenticator.is_active(username, request_handler) + active = self.authenticator.validate_user(username, request_handler) if not active: self.logout(request_handler) diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index 969591b3..6b17e83f 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -19,7 +19,7 @@ mock_time = Mock() mock_time.return_value = 10000.01 -mock_dump_sessions_to_file = Mock() +mock_persist_session = Mock() mock_do_update_groups = Mock() mock_do_update_user = Mock() mock_request_handler = Mock(**{'get_secure_cookie.return_value': "12345".encode()}) @@ -27,10 +27,9 @@ class TestAuthConfig(TestCase): @patch('time.time', mock_time) - @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.dump_sessions_to_file', mock_dump_sessions_to_file) + @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.persist_session', mock_persist_session) @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) def test_gitlab_oauth(self): - tmp = tempfile.mkstemp('.json', 'test_auth_gitlab-') now = time.time() state = { "user@test.com": { @@ -57,8 +56,7 @@ def test_gitlab_oauth(self): } } - os.write(tmp[0], str.encode(escape.json_encode(state))) - os.fsync(tmp[0]) + state_file = test_utils.create_file("gitlab_state.json", text=escape.json_encode(state)) config = _from_json({ 'auth': { @@ -68,7 +66,7 @@ def test_gitlab_oauth(self): "secret": "abcd", "group_search": "script-server", "auth_info_ttl": 80, - "state_dump_file": tmp[1], + "state_dump_file": state_file, "session_expire_minutes": 10 }, 'access': { @@ -76,7 +74,7 @@ def test_gitlab_oauth(self): }}) self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEqual(tmp[1], config.authenticator.gitlab_dump) + self.assertEqual(state_file, config.authenticator.gitlab_dump) self.assertEqual("1234", config.authenticator._client_visible_config['client_id']) self.assertEqual("https://gitlab/oauth/authorize", config.authenticator._client_visible_config['oauth_url']) self.assertEqual("api", config.authenticator._client_visible_config['oauth_scope']) @@ -88,23 +86,23 @@ def test_gitlab_oauth(self): self.assertDictEqual(assert_state, config.authenticator.user_states) saved_state = copy.deepcopy(config.authenticator.user_states) - self.assertEqual(False, config.authenticator.is_active("unknown@test.com", mock_request_handler)) - self.assertEqual(False, config.authenticator.is_active("nogroups@test.com", mock_request_handler)) + self.assertEqual(False, config.authenticator.validate_user("unknown@test.com", mock_request_handler)) + self.assertEqual(False, config.authenticator.validate_user("nogroups@test.com", mock_request_handler)) self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) self.assertListEqual([], config.authenticator.get_groups("nogroups@test.com")) - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"], "visit updated") self.assertEqual(True, mock_do_update_groups.called, "state just loaded, gitlab updating") mock_do_update_groups.reset_mock() config.authenticator.user_states["user@test.com"]["updating"] = True - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "do not call parallel updated") mock_do_update_groups.reset_mock() mock_time.return_value = 10000.01 + 80*2 + 1 # stale request - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(True, mock_do_update_groups.called, "parallel but stale") mock_do_update_groups.reset_mock() config.authenticator.user_states = copy.deepcopy(saved_state) @@ -112,12 +110,12 @@ def test_gitlab_oauth(self): config.authenticator.user_states["user@test.com"]['updated'] = now # gitlab info updated config.authenticator.user_states["user@test.com"]['updating'] = False - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "do not update gitlab because ttl not expired") mock_do_update_groups.reset_mock() mock_time.return_value = 10000.01 + 81 - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(True, mock_do_update_groups.called, "ttl expired") mock_do_update_groups.reset_mock() config.authenticator.user_states = copy.deepcopy(saved_state) @@ -125,9 +123,9 @@ def test_gitlab_oauth(self): # session expire test mock_time.return_value = 10000.01 + 601 - self.assertEqual(False, config.authenticator.is_active("user@test.com", mock_request_handler), "shoud be expired") - self.assertEqual(True, mock_dump_sessions_to_file.called, "dump state to file") - mock_dump_sessions_to_file.reset_mock() + self.assertEqual(False, config.authenticator.validate_user("user@test.com", mock_request_handler), "shoud be expired") + self.assertEqual(True, mock_persist_session.called, "dump state to file") + mock_persist_session.reset_mock() self.assertIsNone(config.authenticator.user_states.get("user@test.com"), "removed from state") self.assertListEqual([], config.authenticator.get_groups("user@test.com")) config.authenticator.user_states = copy.deepcopy(saved_state) @@ -135,14 +133,11 @@ def test_gitlab_oauth(self): # test clean expire mock_time.return_value = 10000.01 + 601 - config.authenticator.clean_expired_sessions() + config.authenticator.clean_sessions() self.assertIsNone(config.authenticator.user_states.get("user@test.com")) config.authenticator.user_states = copy.deepcopy(saved_state) mock_time.return_value = 10000.01 - os.close(tmp[0]) - os.unlink(tmp[1]) - @patch('time.time', mock_time) @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_user', mock_do_update_user) @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) @@ -181,19 +176,22 @@ def test_gitlab_oauth_user_read_scope(self): self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) self.assertEqual("read_user", config.authenticator._client_visible_config['oauth_scope']) config.authenticator.user_states = state - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "update==0, gitlab updating but not groups") self.assertEqual(True, mock_do_update_user.called, "update==0, gitlab updating only user") mock_do_update_groups.reset_mock() mock_do_update_user.reset_mock() config.authenticator.gitlab_update = None - self.assertEqual(True, config.authenticator.is_active("user@test.com", mock_request_handler)) + self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) self.assertEqual(False, mock_do_update_groups.called, "gitab update disabled") self.assertEqual(False, mock_do_update_user.called, "gitab update disabled") mock_do_update_groups.reset_mock() mock_do_update_user.reset_mock() + def tearDown(self): + test_utils.cleanup() + def _from_json(content): json_obj = json.dumps(content) diff --git a/src/web/server.py b/src/web/server.py index 9870c77e..029f4e80 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -174,10 +174,7 @@ def wrapper(self, *args, **kwargs): def has_admin_rights(request_handler): - try: - user_id = _identify_user(request_handler) - except Exception: - return False + user_id = _identify_user(request_handler) return request_handler.application.authorizer.is_admin(user_id) @@ -696,10 +693,15 @@ def get(self): if auth.is_enabled(): username = auth.get_username(self) + try: + admin_rights = has_admin_rights(self) + except Exception: + admin_rights = False + info = { 'enabled': auth.is_enabled(), 'username': username, - 'admin': has_admin_rights(self) + 'admin': admin_rights } self.write(info) diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index 0d30118d..94b89572 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -57,40 +57,26 @@ function setupCredentials(loginContainer) { } function setupGoogleOAuth(loginContainer, authConfig) { - var credentialsTemplate = createTemplateElement('login-google_oauth-template'); - loginContainer.appendChild(credentialsTemplate); - - var oauthLoginButton = document.getElementById('login-google_oauth-button'); - oauthLoginButton.onclick = function () { - var token = guid(32); - - var localState = { - 'token': token, - 'urlFragment': window.location.hash - }; - localState[NEXT_URL_KEY] = getQueryParameter(NEXT_URL_KEY); - - saveState(localState); - - const queryArgs = { - 'redirect_uri': getUnparameterizedUrl(), - 'state': token, - 'client_id': authConfig['client_id'], - 'scope': authConfig['oauth_scope'], - 'response_type': OAUTH_RESPONSE_KEY - }; - const query = toQueryArgs(queryArgs); - window.location = authConfig['oauth_url'] + '?' + query; - }; - - processCurrentOauthState(); + setupOAuth( + loginContainer, + authConfig, + 'login-google_oauth-template', + 'login-google_oauth-button') } function setupGitlabOAuth(loginContainer, authConfig) { - var credentialsTemplate = createTemplateElement('login-gitlab-template'); + setupOAuth( + loginContainer, + authConfig, + 'login-gitlab-template', + 'login-gitlab-button') +} + +function setupOAuth(loginContainer, authConfig, templateName, buttonId) { + var credentialsTemplate = createTemplateElement(templateName); loginContainer.appendChild(credentialsTemplate); - var oauthLoginButton = document.getElementById('login-gitlab-button'); + var oauthLoginButton = document.getElementById(buttonId); oauthLoginButton.onclick = function () { var token = guid(32); From 29c0c75b23222445d2dae6d7dd500ddeb57ce13c Mon Sep 17 00:00:00 2001 From: MiksIr Date: Wed, 10 Jun 2020 13:16:04 +0300 Subject: [PATCH 012/398] Removing unnecessary persists --- src/auth/auth_gitlab.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index 76064350..bf5ea48c 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -161,7 +161,7 @@ def validate_user(self, user, request_handler): return False access_token = access_token.decode("utf-8") - self.clean_and_persist_sessions() + self.validate_sessions() if self.user_states.get(user) is None: LOGGER.debug("User %s not found in state" % user) @@ -195,20 +195,24 @@ def logout(self, user, request_handler): def clean_sessions(self): now = time.time() + changed = False for user_data in list(self.user_states.keys()): if self.session_expire and (self.user_states[user_data]['visit'] + self.session_expire) < now: LOGGER.info("User %s removed because session expired" % user_data) del self.user_states[user_data] + changed = True continue if self.user_states[user_data]['state'] is None or self.user_states[user_data]['state'] != "active": LOGGER.info("User %s removed because state '%s' != 'active'" % (user_data, self.user_states[user_data]['state'])) del self.user_states[user_data] + changed = True continue + return changed - def clean_and_persist_sessions(self): - self.clean_sessions() - if self.gitlab_dump: + def validate_sessions(self, force_persist=False): + changed = self.clean_sessions() + if self.gitlab_dump and (changed or force_persist): self.persist_session() def persist_session(self): @@ -236,7 +240,7 @@ def update_groups(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_and_persist_sessions() + self.validate_sessions(force_persist=True) @gen.coroutine def update_user(self, user, access_token): @@ -251,7 +255,7 @@ def update_user(self, user, access_token): self.user_states[user]['updating'] = False self.user_states[user]['updated'] = now self.user_states[user]['visit'] = now - self.clean_and_persist_sessions() + self.validate_sessions(force_persist=True) return @gen.coroutine @@ -311,7 +315,7 @@ def read_user(self, code, request_handler): user_response['updating'] = False oauth_access_token = user_response.pop('access_token') self.user_states[user_response['email']] = user_response - self.clean_and_persist_sessions() + self.validate_sessions(force_persist=True) request_handler.set_secure_cookie('token', oauth_access_token) return user_response['email'] From 67c105df4356f83c2f46b842f3e8a323e742dfd2 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 12 Jun 2020 11:02:28 +0200 Subject: [PATCH 013/398] #305 moved gitlab and google oauth shared code to a common class --- src/auth/auth_abstract_oauth.py | 322 ++++++++++++++++++++ src/auth/auth_gitlab.py | 323 ++------------------- src/auth/auth_google_oauth.py | 107 +------ src/auth/tornado_auth.py | 3 +- src/tests/auth/test_auth_abstract_oauth.py | 168 +++++++++++ src/tests/server_conf_test.py | 4 +- 6 files changed, 536 insertions(+), 391 deletions(-) create mode 100644 src/auth/auth_abstract_oauth.py create mode 100644 src/tests/auth/test_auth_abstract_oauth.py diff --git a/src/auth/auth_abstract_oauth.py b/src/auth/auth_abstract_oauth.py new file mode 100644 index 00000000..fa617f96 --- /dev/null +++ b/src/auth/auth_abstract_oauth.py @@ -0,0 +1,322 @@ +import abc +import json +import logging +import os +import threading +import time +import urllib.parse as urllib_parse +from collections import namedtuple, defaultdict +from typing import Dict + +import tornado +import tornado.ioloop +from tornado import httpclient, escape + +from auth import auth_base +from auth.auth_base import AuthFailureError, AuthBadRequestException +from model import model_helper +from model.model_helper import read_bool_from_config, read_int_from_config +from model.server_conf import InvalidServerConfigException +from utils import file_utils + +LOGGER = logging.getLogger('script_server.AbstractOauthAuthenticator') + + +class _UserState: + def __init__(self, username) -> None: + self.username = username + self.groups = [] + self.last_auth_update = None + self.last_visit = None + + +_OauthUserInfo = namedtuple('_OauthUserInfo', ['email', 'enabled', 'oauth_response']) + + +# noinspection PyProtectedMember +class AbstractOauthAuthenticator(auth_base.Authenticator, metaclass=abc.ABCMeta): + def __init__(self, oauth_authorize_url, oauth_token_url, oauth_scope, params_dict): + super().__init__() + + self.oauth_token_url = oauth_token_url + self.oauth_scope = oauth_scope + + self.client_id = model_helper.read_obligatory(params_dict, 'client_id', ' for OAuth') + secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for OAuth') + self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) + + self._client_visible_config['client_id'] = self.client_id + self._client_visible_config['oauth_url'] = oauth_authorize_url + self._client_visible_config['oauth_scope'] = oauth_scope + + self.group_support = read_bool_from_config('group_support', params_dict, default=True) + self.auth_info_ttl = params_dict.get('auth_info_ttl') + self.session_expire = read_int_from_config('session_expire_minutes', params_dict, default=0) * 60 + self.dump_file = params_dict.get('state_dump_file') + + if self.dump_file: + self._validate_dump_file(self.dump_file) + + self._users = {} # type: Dict[str, _UserState] + self._user_locks = defaultdict(lambda: threading.Lock()) + + self.timer = None + if self.dump_file: + self._restore_state() + + self._schedule_dump_task() + + @staticmethod + def _validate_dump_file(dump_file): + if os.path.isdir(dump_file): + raise InvalidServerConfigException('Please specify dump FILE instead of folder for OAuth') + dump_folder = os.path.abspath(os.path.dirname(dump_file)) + if not os.path.exists(dump_folder): + raise InvalidServerConfigException('OAuth dump file folder does not exist: ' + dump_folder) + + async def authenticate(self, request_handler): + code = request_handler.get_argument('code', False) + + if not code: + LOGGER.error('Code is not specified') + raise AuthBadRequestException('Missing authorization information. Please contact your administrator') + + access_token = await self.fetch_access_token(code, request_handler) + user_info = await self.fetch_user_info(access_token) + + user_email = user_info.email + if not user_email: + error_message = 'No email field in user response. The response: ' + str(user_info.oauth_response) + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + if not user_info.enabled: + error_message = 'User %s is not enabled in OAuth provider. The response: %s' \ + % (user_email, str(user_info.oauth_response)) + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + user_state = _UserState(user_email) + self._users[user_email] = user_state + + if self.group_support: + user_groups = await self.fetch_user_groups(access_token) + user_state.groups = user_groups + + now = time.time() + + if self.auth_info_ttl: + request_handler.set_secure_cookie('token', access_token) + user_state.last_auth_update = now + + user_state.last_visit = now + + return user_email + + def validate_user(self, user, request_handler): + if not user: + LOGGER.warning('Username is not available') + return False + + now = time.time() + + user_state = self._users.get(user) + if not user_state: + # if nothing is enabled, it's ok not to have user state (e.g. after server restart) + if self.session_expire <= 0 and not self.auth_info_ttl and not self.group_support: + return True + else: + LOGGER.info('User %s state is missing', user) + return False + + if self.session_expire > 0: + last_visit = user_state.last_visit + if (last_visit is None) or ((last_visit + self.session_expire) < now): + LOGGER.info('User %s state is expired', user) + return False + + user_state.last_visit = now + + if self.auth_info_ttl: + access_token = request_handler.get_secure_cookie('token') + if access_token is None: + LOGGER.info('User %s token is not available', user) + return False + + self.update_user_auth(user, user_state, access_token) + + return True + + def get_groups(self, user, known_groups=None): + user_state = self._users.get(user) + if not user_state: + return [] + + return user_state.groups + + def logout(self, user, request_handler): + request_handler.clear_cookie('token') + self._remove_user(user) + + self._dump_state() + + def _remove_user(self, user): + if user in self._users: + del self._users[user] + + async def fetch_access_token(self, code, request_handler): + body = urllib_parse.urlencode({ + 'redirect_uri': get_path_for_redirect(request_handler), + 'code': code, + 'client_id': self.client_id, + 'client_secret': self.secret, + 'grant_type': 'authorization_code', + }) + http_client = httpclient.AsyncHTTPClient() + response = await http_client.fetch( + self.oauth_token_url, + method='POST', + headers={'Content-Type': 'application/x-www-form-urlencoded'}, + body=body, + raise_error=False) + + response_values = {} + if response.body: + response_values = escape.json_decode(response.body) + + if response.error: + if response_values.get('error_description'): + error_text = response_values.get('error_description') + elif response_values.get('error'): + error_text = response_values.get('error') + else: + error_text = str(response.error) + + error_message = 'Failed to load access_token: ' + error_text + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + response_values = escape.json_decode(response.body) + access_token = response_values.get('access_token') + + if not access_token: + message = 'No access token in response: ' + str(response.body) + LOGGER.error(message) + raise AuthFailureError(message) + + return access_token + + def update_user_auth(self, username, user_state, access_token): + now = time.time() + ttl_expired = (user_state.last_auth_update is None) \ + or ((user_state.last_auth_update + self.auth_info_ttl) < now) + + if not ttl_expired: + return + + tornado.ioloop.IOLoop.current().spawn_callback( + self._do_update_user_auth_async, + username, + user_state, + access_token) + + async def _do_update_user_auth_async(self, username, user_state, access_token): + lock = self._user_locks[username] + + with lock: + now = time.time() + + ttl_expired = (user_state.last_auth_update is None) \ + or ((user_state.last_auth_update + self.auth_info_ttl) < now) + + if not ttl_expired: + return + + LOGGER.info('User %s state expired, refreshing', username) + + user_info = await self.fetch_user_info(access_token) # type: _OauthUserInfo + if (not user_info) or (not user_info.email): + LOGGER.error('Failed to fetch user info: %s', str(user_info.oauth_response)) + self._remove_user(username) + return + + if not user_info.enabled: + LOGGER.error('User %s, was deactivated on OAuth server. New state: %s', username, + str(user_info.oauth_response)) + self._remove_user(username) + return + + if self.group_support: + try: + user_groups = await self.fetch_user_groups(access_token) + user_state.groups = user_groups + except AuthFailureError: + LOGGER.error('Failed to fetch user %s groups', username) + self._remove_user(username) + return + + user_state.last_auth_update = now + + def _restore_state(self): + if not os.path.exists(self.dump_file): + LOGGER.info('OAuth dump file is missing. Nothing to restore') + return + + dump_data = file_utils.read_file(self.dump_file) + dump_json = json.loads(dump_data) + + for user_state in dump_json: + username = user_state.get('username') + if not username: + LOGGER.warning('Missing username in ' + str(user_state)) + continue + + state = _UserState(username) + self._users[username] = state + state.groups = user_state.get('groups', []) + state.last_auth_update = user_state.get('last_auth_update') + state.last_visit = user_state.get('last_visit') + + def _schedule_dump_task(self): + def repeating_dump(): + try: + self._dump_state() + finally: + self._schedule_dump_task() + + self.timer = threading.Timer(30, repeating_dump) + self.timer.setDaemon(True) + self.timer.start() + + def _dump_state(self): + if self.dump_file: + states = [s.__dict__ for s in self._users.values()] + state_json = json.dumps(states) + file_utils.write_file(self.dump_file, state_json) + + @abc.abstractmethod + async def fetch_user_info(self, access_token: str) -> _OauthUserInfo: + pass + + @abc.abstractmethod + async def fetch_user_groups(self, access_token): + pass + + # Tests only + def _cleanup(self): + if self.timer: + self.timer.cancel() + + +def get_path_for_redirect(request_handler): + referer = request_handler.request.headers.get('Referer') + if not referer: + LOGGER.error('No referer') + raise AuthFailureError('Missing request header. Please contact system administrator') + + parse_result = urllib_parse.urlparse(referer) + protocol = parse_result[0] + host = parse_result[1] + path = parse_result[2] + + return urllib_parse.urlunparse((protocol, host, path, '', '', '')) diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index bf5ea48c..f5e5b799 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -1,279 +1,58 @@ -import json import logging -import os -import time -import urllib.parse as urllib_parse -import tornado.auth -import tornado.ioloop from tornado.auth import OAuth2Mixin -from tornado import gen, httpclient, escape -from auth import auth_base -from auth.auth_base import AuthFailureError, AuthBadRequestException -from model import model_helper - -from typing import List, Any, Dict, cast, Iterable, Union, Optional - -from utils import file_utils +from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo +from auth.auth_base import AuthFailureError LOGGER = logging.getLogger('script_server.GitlabAuthorizer') +_OAUTH_AUTHORIZE_URL = '%s/oauth/authorize' +_OAUTH_ACCESS_TOKEN_URL = '%s/oauth/token' +_OAUTH_GITLAB_USERINFO = '%s/api/v4/user' +_OAUTH_GITLAB_GROUPS = '%s/api/v4/groups' -class GitlabOAuth2Mixin(OAuth2Mixin): - _OAUTH_AUTHORIZE_URL = '%s/oauth/authorize' - _OAUTH_ACCESS_TOKEN_URL = '%s/oauth/token' - _OAUTH_GITLAB_USERINFO = '%s/api/v4/user' - _OAUTH_GITLAB_GROUPS = '%s/api/v4/groups' - _GITLAB_PREFIX = 'https://gitlab.com' - - async def oauth2_request(self, url: str, access_token: str = None, post_args: Dict[str, Any] = None, - **args: Any) -> Any: - try: - return await super().oauth2_request(url, access_token, post_args, **args) - except tornado.httpclient.HTTPClientError as e: - LOGGER.error("HTTP error " + str(e.message)) - return None - - async def get_authenticated_user( - self, - redirect_uri: str, - client_id: str, - client_secret: str, - code: str, - ) -> Optional[Dict[str, Any]]: - http = self.get_auth_http_client() - args = { - "redirect_uri": redirect_uri, - "code": code, - "client_id": client_id, - "client_secret": client_secret, - "grant_type": "authorization_code", - } - - body = urllib_parse.urlencode(args) - http_client = httpclient.AsyncHTTPClient() - response = await http_client.fetch( - self._OAUTH_ACCESS_TOKEN_URL % self._GITLAB_PREFIX, - method='POST', - headers={'Content-Type': 'application/x-www-form-urlencoded'}, - body=body, - raise_error=False) - - default_response_values = {"state": "unknown"} - response_values = {} - if response.body: - response_values = escape.json_decode(response.body) - - if response.error: - if response_values.get('error_description'): - error_text = response_values.get('error_description') - elif response_values.get('error'): - error_text = response_values.get('error') - else: - error_text = str(response.error) - - error_message = 'Failed to load access_token: ' + error_text - LOGGER.error(error_message) - raise AuthFailureError(error_message) - - access_token = response_values.get('access_token') - - if not access_token: - message = 'No access token in response: ' + str(response.body) - LOGGER.error(message) - raise AuthFailureError(message) - - user = await self.fetch_user(access_token) - - if user is None: - error_message = 'Failed to load user info' - LOGGER.error(error_message) - raise AuthFailureError(error_message) - - return {**default_response_values, **response_values, **user} - - async def fetch_user(self, access_token): - user = await self.oauth2_request( - self._OAUTH_GITLAB_USERINFO % self._GITLAB_PREFIX, - access_token) - if user is None: - return None - - fieldmap = {} - for field in {"id", "username", "name", "email", "state"}: - fieldmap[field] = user.get(field) - - return fieldmap # noinspection PyProtectedMember -class GitlabOAuthAuthenticator(auth_base.Authenticator, GitlabOAuth2Mixin): +class GitlabOAuthAuthenticator(AbstractOauthAuthenticator, OAuth2Mixin): def __init__(self, params_dict): - super().__init__() - - LOGGER.debug("Init gitlab oauth provider with " + str(params_dict)) - - self.client_id = model_helper.read_obligatory(params_dict, 'client_id', ' for Gitlab OAuth') - - secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for Gitlab OAuth') - self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) - - gitlab_prefix = params_dict.get('url') - if not model_helper.is_empty(gitlab_prefix): - self._GITLAB_PREFIX = gitlab_prefix - - self.states = {} - self.user_states = {} - self.gitlab_update = params_dict.get('auth_info_ttl') - self.gitlab_dump = params_dict.get('state_dump_file') + self.gitlab_host = params_dict.get('url', 'https://gitlab.com') self.gitlab_group_support = params_dict.get('group_support', True) - self.session_expire = int(params_dict.get('session_expire_minutes', 0)) * 60 - now = time.time() - if self.gitlab_dump and os.path.exists(self.gitlab_dump): - state_str = file_utils.read_file(self.gitlab_dump) - self.user_states = escape.json_decode(state_str) - for user_data in list(self.user_states.keys()): - # force to update user from gitlab - self.user_states[user_data]['updating'] = False - if self.gitlab_update: - self.user_states[user_data]['updated'] = now - self.gitlab_update - 1 - LOGGER.info("Readed state from file %s: " % self.gitlab_dump + str(self.user_states)) + super().__init__( + _OAUTH_AUTHORIZE_URL % self.gitlab_host, + _OAUTH_ACCESS_TOKEN_URL % self.gitlab_host, + 'api' if self.gitlab_group_support else 'read_user', + params_dict) self.gitlab_group_search = params_dict.get('group_search') - self._client_visible_config['client_id'] = self.client_id - self._client_visible_config['oauth_url'] = self._OAUTH_AUTHORIZE_URL % self._GITLAB_PREFIX - self._client_visible_config['oauth_scope'] = 'api' if self.gitlab_group_support else 'read_user' - - def authenticate(self, request_handler): - code = request_handler.get_argument('code', False) - - if not code: - LOGGER.error('Code is not specified') - raise AuthBadRequestException('Missing authorization information. Please contact your administrator') - - return self.read_user(code, request_handler) - - def validate_user(self, user, request_handler): - access_token = request_handler.get_secure_cookie('token') - if access_token is None: - return False - access_token = access_token.decode("utf-8") - - self.validate_sessions() - - if self.user_states.get(user) is None: - LOGGER.debug("User %s not found in state" % user) - return False - - now = time.time() - self.user_states[user]['visit'] = now - - # check gitlab response ttl, also check for stale updating (ttl*2) - if self.gitlab_update is not None: - stale_update = (self.user_states[user]['updated'] + max(self.gitlab_update*2, 60)) < now - ttl_expired = (self.user_states[user]['updated'] + self.gitlab_update) < now - updating_now = self.user_states[user]['updating'] is True - if ttl_expired and (not updating_now or stale_update): - if self.gitlab_group_support: - self.do_update_groups(user, access_token) - else: - self.do_update_user(user, access_token) - - return True - - def get_groups(self, user, known_groups=None): - if self.user_states.get(user) is None: - return [] - if self.user_states[user]['groups'] is None: - return [] - return self.user_states[user]['groups'] - - def logout(self, user, request_handler): - request_handler.clear_cookie('token') - - def clean_sessions(self): - now = time.time() - changed = False - for user_data in list(self.user_states.keys()): - if self.session_expire and (self.user_states[user_data]['visit'] + self.session_expire) < now: - LOGGER.info("User %s removed because session expired" % user_data) - del self.user_states[user_data] - changed = True - continue - if self.user_states[user_data]['state'] is None or self.user_states[user_data]['state'] != "active": - LOGGER.info("User %s removed because state '%s' != 'active'" % - (user_data, self.user_states[user_data]['state'])) - del self.user_states[user_data] - changed = True - continue - return changed - - def validate_sessions(self, force_persist=False): - changed = self.clean_sessions() - if self.gitlab_dump and (changed or force_persist): - self.persist_session() - - def persist_session(self): - file_utils.write_file(self.gitlab_dump, escape.json_encode(self.user_states)) - LOGGER.debug("Dumped state to file %s" % self.gitlab_dump) - - def do_update_user(self, user, access_token): - self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_user, user, access_token) - - def do_update_groups(self, user, access_token): - self.user_states[user]['updating'] = True - tornado.ioloop.IOLoop.current().spawn_callback(self.update_groups, user, access_token) - - @gen.coroutine - def update_groups(self, user, access_token): - group_list = yield self.read_groups(access_token) - if group_list is None: - LOGGER.error("Failed to refresh groups for %s" % user) - self.user_states[user]['state'] = "error" - else: - LOGGER.info("Groups for %s refreshed: " % user + str(group_list)) - self.user_states[user]['groups'] = group_list - now = time.time() - self.user_states[user]['updating'] = False - self.user_states[user]['updated'] = now - self.user_states[user]['visit'] = now - self.validate_sessions(force_persist=True) + async def fetch_user_info(self, access_token) -> _OauthUserInfo: + user = await self.oauth2_request( + _OAUTH_GITLAB_USERINFO % self.gitlab_host, + access_token) + if user is None: + return None - @gen.coroutine - def update_user(self, user, access_token): - user_state = yield self.fetch_user(access_token) - if user_state is None: - LOGGER.error("Failed to fetch user %s" % user) - self.user_states[user]['state'] = "error" - else: - LOGGER.info("User %s refreshed: " % user + str(user_state)) - self.user_states[user] = {**self.user_states[user], **user_state} - now = time.time() - self.user_states[user]['updating'] = False - self.user_states[user]['updated'] = now - self.user_states[user]['visit'] = now - self.validate_sessions(force_persist=True) - return + active = user.get('state') == 'active' + return _OauthUserInfo(user.get('email'), active, user) - @gen.coroutine - def read_groups(self, access_token): + async def fetch_user_groups(self, access_token): args = { 'access_token': access_token, 'all_available': 'false', 'per_page': 100, } - if not self.gitlab_group_search is None: + + if self.gitlab_group_search is not None: args['search'] = self.gitlab_group_search group_list_future = self.oauth2_request( - self._OAUTH_GITLAB_GROUPS % self._GITLAB_PREFIX, + _OAUTH_GITLAB_GROUPS % self.gitlab_host, **args ) - group_list = yield group_list_future + group_list = await group_list_future if group_list is None: return None @@ -283,53 +62,9 @@ def read_groups(self, access_token): if group.get('full_path'): groups.append(group['full_path']) - return groups - - @gen.coroutine - def read_user(self, code, request_handler): - user_response_future = self.get_authenticated_user( - get_path_for_redirect(request_handler), - self.client_id, - self.secret, - code - ) - user_response = yield user_response_future - - if user_response.get('email') is None: - error_message = 'No email field in user response. The response: ' + str(user_response) + if groups is None: + error_message = 'Cant read user groups' LOGGER.error(error_message) raise AuthFailureError(error_message) - user_groups = [] - if self.gitlab_group_support: - user_groups = yield self.read_groups(user_response.get('access_token')) - if user_groups is None: - error_message = 'Cant read user groups' - LOGGER.error(error_message) - raise AuthFailureError(error_message) - - LOGGER.info("User %s group list: " % user_response['email'] + str(user_groups)) - user_response['groups'] = user_groups - user_response['updated'] = time.time() - user_response['visit'] = time.time() - user_response['updating'] = False - oauth_access_token = user_response.pop('access_token') - self.user_states[user_response['email']] = user_response - self.validate_sessions(force_persist=True) - request_handler.set_secure_cookie('token', oauth_access_token) - - return user_response['email'] - - -def get_path_for_redirect(request_handler): - referer = request_handler.request.headers.get('Referer') - if not referer: - LOGGER.error('No referer') - raise AuthFailureError('Missing request header. Please contact system administrator') - - parse_result = urllib_parse.urlparse(referer) - protocol = parse_result[0] - host = parse_result[1] - path = parse_result[2] - - return urllib_parse.urlunparse((protocol, host, path, '', '', '')) + return groups diff --git a/src/auth/auth_google_oauth.py b/src/auth/auth_google_oauth.py index 5e038b00..9d648042 100644 --- a/src/auth/auth_google_oauth.py +++ b/src/auth/auth_google_oauth.py @@ -1,111 +1,30 @@ import logging -import urllib.parse as urllib_parse import tornado.auth -from tornado import gen, httpclient, escape -from auth import auth_base -from auth.auth_base import AuthFailureError, AuthBadRequestException -from model import model_helper +from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo LOGGER = logging.getLogger('script_server.GoogleOauthAuthorizer') # noinspection PyProtectedMember -class GoogleOauthAuthenticator(auth_base.Authenticator): +class GoogleOauthAuthenticator(AbstractOauthAuthenticator): def __init__(self, params_dict): - super().__init__() - - self.client_id = model_helper.read_obligatory(params_dict, 'client_id', ' for Google OAuth') - - secret_value = model_helper.read_obligatory(params_dict, 'secret', ' for Google OAuth') - self.secret = model_helper.resolve_env_vars(secret_value, full_match=True) - - self.states = {} - - self._client_visible_config['client_id'] = self.client_id - self._client_visible_config['oauth_url'] = tornado.auth.GoogleOAuth2Mixin._OAUTH_AUTHORIZE_URL - self._client_visible_config['oauth_scope'] = 'email' - - def authenticate(self, request_handler): - code = request_handler.get_argument('code', False) - - if not code: - LOGGER.error('Code is not specified') - raise AuthBadRequestException('Missing authorization information. Please contact your administrator') - - return self.read_user(code, request_handler) - - @gen.coroutine - def read_user(self, code, request_handler): - access_token = yield self.get_access_token(code, request_handler) + super().__init__(tornado.auth.GoogleOAuth2Mixin._OAUTH_AUTHORIZE_URL, + tornado.auth.GoogleOAuth2Mixin._OAUTH_ACCESS_TOKEN_URL, + 'email', + params_dict) + async def fetch_user_info(self, access_token) -> _OauthUserInfo: oauth_mixin = tornado.auth.GoogleOAuth2Mixin() user_future = oauth_mixin.oauth2_request( tornado.auth.GoogleOAuth2Mixin._OAUTH_USERINFO_URL, access_token=access_token) - user_response = yield user_future - - if user_response.get('email'): - return user_response.get('email') - - error_message = 'No email field in user response. The response: ' + str(user_response) - LOGGER.error(error_message) - raise AuthFailureError(error_message) - - @gen.coroutine - def get_access_token(self, code, request_handler): - body = urllib_parse.urlencode({ - 'redirect_uri': get_path_for_redirect(request_handler), - 'code': code, - 'client_id': self.client_id, - 'client_secret': self.secret, - 'grant_type': 'authorization_code', - }) - http_client = httpclient.AsyncHTTPClient() - response = yield http_client.fetch( - tornado.auth.GoogleOAuth2Mixin._OAUTH_ACCESS_TOKEN_URL, - method='POST', - headers={'Content-Type': 'application/x-www-form-urlencoded'}, - body=body, - raise_error=False) - - response_values = {} - if response.body: - response_values = escape.json_decode(response.body) - - if response.error: - if response_values.get('error_description'): - error_text = response_values.get('error_description') - elif response_values.get('error'): - error_text = response_values.get('error') - else: - error_text = str(response.error) - - error_message = 'Failed to load access_token: ' + error_text - LOGGER.error(error_message) - raise AuthFailureError(error_message) - - response_values = escape.json_decode(response.body) - access_token = response_values.get('access_token') - - if not access_token: - message = 'No access token in response: ' + str(response.body) - LOGGER.error(message) - raise AuthFailureError(message) - - return access_token - - -def get_path_for_redirect(request_handler): - referer = request_handler.request.headers.get('Referer') - if not referer: - LOGGER.error('No referer') - raise AuthFailureError('Missing request header. Please contact system administrator') + user_response = await user_future + if not user_response: + return None - parse_result = urllib_parse.urlparse(referer) - protocol = parse_result[0] - host = parse_result[1] - path = parse_result[2] + return _OauthUserInfo(user_response.get('email'), True, user_response) - return urllib_parse.urlunparse((protocol, host, path, '', '', '')) + async def fetch_user_groups(self, access_token): + return [] diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 6caeff41..17040f3e 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -1,3 +1,4 @@ +import asyncio import logging import tornado.concurrent @@ -54,7 +55,7 @@ def authenticate(self, request_handler): try: username = self.authenticator.authenticate(request_handler) - if isinstance(username, tornado.concurrent.Future): + if asyncio.iscoroutine(username): username = yield username except auth_base.AuthRejectedError as e: diff --git a/src/tests/auth/test_auth_abstract_oauth.py b/src/tests/auth/test_auth_abstract_oauth.py new file mode 100644 index 00000000..55e58f8d --- /dev/null +++ b/src/tests/auth/test_auth_abstract_oauth.py @@ -0,0 +1,168 @@ +import json +import json +import os +import random +import unittest +from unittest import TestCase +from unittest.mock import Mock + +from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo +from model import server_conf +from model.server_conf import InvalidServerConfigException +from tests import test_utils +from utils import file_utils + +if __name__ == '__main__': + unittest.main() + +mock_time = Mock() +mock_time.return_value = 10000.01 +mock_request_handler = Mock(**{'get_secure_cookie.return_value': '12345'.encode()}) + + +class TestAuthConfig(TestCase): + def test_client_visible_config(self): + authenticator = self.create_test_authenticator() + + client_visible_config = authenticator._client_visible_config + self.assertEqual('1234', client_visible_config['client_id']) + self.assertEqual('authorize_url', client_visible_config['oauth_url']) + self.assertEqual('test_scope', client_visible_config['oauth_scope']) + + def test_config_values(self): + dump_file_path = os.path.join(test_utils.temp_folder, 'dump.json') + authenticator = self.create_test_authenticator(dump_file=dump_file_path, session_expire_minutes=10) + + self.assertEqual('1234', authenticator.client_id) + self.assertEqual('abcd', authenticator.secret) + self.assertEqual(True, authenticator.group_support) + self.assertEqual(80, authenticator.auth_info_ttl) + self.assertEqual(600, authenticator.session_expire) + self.assertEqual(dump_file_path, authenticator.dump_file) + + def test_group_support_disabled(self): + authenticator = self.create_test_authenticator(group_support=False) + + self.assertEqual(False, authenticator.group_support) + + def test_no_session_expire(self): + authenticator = self.create_test_authenticator() + + self.assertEqual(0, authenticator.session_expire) + + def test_dump_file_when_folder(self): + self.assertRaisesRegex( + InvalidServerConfigException, + 'dump FILE instead of folder', + self.create_test_authenticator, + dump_file=test_utils.temp_folder) + + def test_dump_file_when_folder_not_exists(self): + self.assertRaisesRegex( + InvalidServerConfigException, + 'OAuth dump file folder does not exist', + self.create_test_authenticator, + dump_file=os.path.join(test_utils.temp_folder, 'sub', 'dump.json')) + + def test_restore_dump_state_when_no_file(self): + dump_file_path = os.path.join(test_utils.temp_folder, 'dump.json') + authenticator = self.create_test_authenticator(dump_file=dump_file_path) + + self.assertEqual({}, authenticator._users) + + def test_restore_dump_state_when_multiple_users(self): + dump_file = test_utils.create_file('dump.json', text=json.dumps( + [{'username': 'User_X', 'groups': ['group1', 'group2'], 'last_auth_update': 123}, + {'username': 'User_Y', 'last_visit': 456}])) + authenticator = self.create_test_authenticator(dump_file=dump_file) + + self.assertEqual({'User_X', 'User_Y'}, authenticator._users.keys()) + + user_x_state = authenticator._users['User_X'] + self.assertEqual('User_X', user_x_state.username) + self.assertEqual(['group1', 'group2'], user_x_state.groups) + self.assertEqual(123, user_x_state.last_auth_update) + self.assertEqual(None, user_x_state.last_visit) + + user_y_state = authenticator._users['User_Y'] + self.assertEqual('User_Y', user_y_state.username) + self.assertEqual([], user_y_state.groups) + self.assertEqual(None, user_y_state.last_auth_update) + self.assertEqual(456, user_y_state.last_visit) + + def create_test_authenticator(self, *, dump_file=None, group_support=None, session_expire_minutes=None): + config = { + 'type': 'test_oauth', + 'url': 'some_url', + 'client_id': '1234', + 'secret': 'abcd', + 'group_search': 'script-server', + 'auth_info_ttl': 80 + } + + if dump_file is not None: + config['state_dump_file'] = dump_file + + if group_support is not None: + config['group_support'] = group_support + + if session_expire_minutes is not None: + config['session_expire_minutes'] = session_expire_minutes + + authenticator = TestOauthAuthenticator(config) + + self.authenticators.append(authenticator) + + return authenticator + + def setUp(self) -> None: + self.authenticators = [] + + test_utils.setup() + + def tearDown(self): + test_utils.cleanup() + + for authenticator in self.authenticators: + authenticator._cleanup() + + +def _from_json(content): + json_obj = json.dumps(content) + conf_path = os.path.join(test_utils.temp_folder, 'conf.json') + file_utils.write_file(conf_path, json_obj) + return server_conf.from_json(conf_path, test_utils.temp_folder) + + +class TestOauthAuthenticator(AbstractOauthAuthenticator): + def __init__(self, params_dict): + super().__init__('authorize_url', 'token_url', 'test_scope', params_dict) + + self.random_instance = random.seed(a=123) + + self.user_tokens = { + '11111': 'user_X', + '22222': 'user_Y', + '33333': 'user_Z' + } + self.user_groups = {} + self.disabled_users = [] + + async def fetch_access_token(self, code, request_handler): + for key, value in self.user_tokens.items(): + if value.endswith(code): + return key + + raise Exception('Could not generate token for code ' + code + '. Make sure core is equal to user suffix') + + async def fetch_user_info(self, access_token: str) -> _OauthUserInfo: + user = self.user_tokens[access_token] + + enabled = user not in self.disabled_users + return _OauthUserInfo(user, enabled, {'username': user, 'access_token': access_token}) + + async def fetch_user_groups(self, access_token): + user = self.user_tokens[access_token] + if user in self.user_groups: + return self.user_groups[user] + return [] diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index c26a7ef9..ae304b7a 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -224,7 +224,7 @@ def test_gitlab_oauth(self): self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) self.assertEquals('1234', config.authenticator.client_id) self.assertEquals('abcd', config.authenticator.secret) - self.assertEquals('https://gitlab', config.authenticator._GITLAB_PREFIX) + self.assertEquals('https://gitlab', config.authenticator._GITLAB_HOST) self.assertEquals('script-server', config.authenticator.gitlab_group_search) self.assertEquals(120, config.authenticator.gitlab_update) self.assertEquals("/tmp/dump.json", config.authenticator.gitlab_dump) @@ -243,7 +243,7 @@ def test_gitlab_oauth_default(self): }}) self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEquals('https://gitlab.com', config.authenticator._GITLAB_PREFIX) + self.assertEquals('https://gitlab.com', config.authenticator._GITLAB_HOST) self.assertIsNone(config.authenticator.gitlab_group_search) self.assertIsNone(config.authenticator.gitlab_update) self.assertIsNone(config.authenticator.gitlab_dump) From c6c8201a59b9d0b4db2d835399dc50318072b380 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 12 Jun 2020 14:22:19 +0200 Subject: [PATCH 014/398] #305 added oauth authenticate and validate_user tests --- src/auth/auth_google_oauth.py | 2 + src/tests/auth/test_auth_abstract_oauth.py | 327 ++++++++++++++++++--- 2 files changed, 282 insertions(+), 47 deletions(-) diff --git a/src/auth/auth_google_oauth.py b/src/auth/auth_google_oauth.py index 9d648042..9fded43d 100644 --- a/src/auth/auth_google_oauth.py +++ b/src/auth/auth_google_oauth.py @@ -10,6 +10,8 @@ # noinspection PyProtectedMember class GoogleOauthAuthenticator(AbstractOauthAuthenticator): def __init__(self, params_dict): + params_dict['group_support'] = False + super().__init__(tornado.auth.GoogleOAuth2Mixin._OAUTH_AUTHORIZE_URL, tornado.auth.GoogleOAuth2Mixin._OAUTH_ACCESS_TOKEN_URL, 'email', diff --git a/src/tests/auth/test_auth_abstract_oauth.py b/src/tests/auth/test_auth_abstract_oauth.py index 55e58f8d..d056ba0a 100644 --- a/src/tests/auth/test_auth_abstract_oauth.py +++ b/src/tests/auth/test_auth_abstract_oauth.py @@ -1,28 +1,59 @@ import json -import json import os import random import unittest from unittest import TestCase -from unittest.mock import Mock +from unittest.mock import Mock, patch + +import tornado +from tornado.testing import AsyncTestCase from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo -from model import server_conf +from auth.auth_base import AuthFailureError, AuthBadRequestException from model.server_conf import InvalidServerConfigException from tests import test_utils -from utils import file_utils +from tests.test_utils import mock_object if __name__ == '__main__': unittest.main() mock_time = Mock() mock_time.return_value = 10000.01 -mock_request_handler = Mock(**{'get_secure_cookie.return_value': '12345'.encode()}) + +authenticators = [] + + +def create_test_authenticator(*, dump_file=None, group_support=None, session_expire_minutes=None, auth_info_ttl=None): + config = { + 'type': 'test_oauth', + 'url': 'some_url', + 'client_id': '1234', + 'secret': 'abcd', + 'group_search': 'script-server' + } + + if dump_file is not None: + config['state_dump_file'] = dump_file + + if group_support is not None: + config['group_support'] = group_support + + if session_expire_minutes is not None: + config['session_expire_minutes'] = session_expire_minutes + + if auth_info_ttl is not None: + config['auth_info_ttl'] = auth_info_ttl + + authenticator = MockOauthAuthenticator(config) + + authenticators.append(authenticator) + + return authenticator class TestAuthConfig(TestCase): def test_client_visible_config(self): - authenticator = self.create_test_authenticator() + authenticator = create_test_authenticator() client_visible_config = authenticator._client_visible_config self.assertEqual('1234', client_visible_config['client_id']) @@ -31,7 +62,7 @@ def test_client_visible_config(self): def test_config_values(self): dump_file_path = os.path.join(test_utils.temp_folder, 'dump.json') - authenticator = self.create_test_authenticator(dump_file=dump_file_path, session_expire_minutes=10) + authenticator = create_test_authenticator(dump_file=dump_file_path, session_expire_minutes=10, auth_info_ttl=80) self.assertEqual('1234', authenticator.client_id) self.assertEqual('abcd', authenticator.secret) @@ -41,12 +72,12 @@ def test_config_values(self): self.assertEqual(dump_file_path, authenticator.dump_file) def test_group_support_disabled(self): - authenticator = self.create_test_authenticator(group_support=False) + authenticator = create_test_authenticator(group_support=False) self.assertEqual(False, authenticator.group_support) def test_no_session_expire(self): - authenticator = self.create_test_authenticator() + authenticator = create_test_authenticator() self.assertEqual(0, authenticator.session_expire) @@ -54,87 +85,289 @@ def test_dump_file_when_folder(self): self.assertRaisesRegex( InvalidServerConfigException, 'dump FILE instead of folder', - self.create_test_authenticator, + create_test_authenticator, dump_file=test_utils.temp_folder) def test_dump_file_when_folder_not_exists(self): self.assertRaisesRegex( InvalidServerConfigException, 'OAuth dump file folder does not exist', - self.create_test_authenticator, + create_test_authenticator, dump_file=os.path.join(test_utils.temp_folder, 'sub', 'dump.json')) def test_restore_dump_state_when_no_file(self): dump_file_path = os.path.join(test_utils.temp_folder, 'dump.json') - authenticator = self.create_test_authenticator(dump_file=dump_file_path) + authenticator = create_test_authenticator(dump_file=dump_file_path) self.assertEqual({}, authenticator._users) def test_restore_dump_state_when_multiple_users(self): dump_file = test_utils.create_file('dump.json', text=json.dumps( - [{'username': 'User_X', 'groups': ['group1', 'group2'], 'last_auth_update': 123}, - {'username': 'User_Y', 'last_visit': 456}])) - authenticator = self.create_test_authenticator(dump_file=dump_file) + [{'username': 'user_X', 'groups': ['group1', 'group2'], 'last_auth_update': 123}, + {'username': 'user_Y', 'last_visit': 456}])) + authenticator = create_test_authenticator(dump_file=dump_file) - self.assertEqual({'User_X', 'User_Y'}, authenticator._users.keys()) + self.assertEqual({'user_X', 'user_Y'}, authenticator._users.keys()) - user_x_state = authenticator._users['User_X'] - self.assertEqual('User_X', user_x_state.username) + user_x_state = authenticator._users['user_X'] + self.assertEqual('user_X', user_x_state.username) self.assertEqual(['group1', 'group2'], user_x_state.groups) self.assertEqual(123, user_x_state.last_auth_update) self.assertEqual(None, user_x_state.last_visit) - user_y_state = authenticator._users['User_Y'] - self.assertEqual('User_Y', user_y_state.username) + user_y_state = authenticator._users['user_Y'] + self.assertEqual('user_Y', user_y_state.username) self.assertEqual([], user_y_state.groups) self.assertEqual(None, user_y_state.last_auth_update) self.assertEqual(456, user_y_state.last_visit) - def create_test_authenticator(self, *, dump_file=None, group_support=None, session_expire_minutes=None): - config = { - 'type': 'test_oauth', - 'url': 'some_url', - 'client_id': '1234', - 'secret': 'abcd', - 'group_search': 'script-server', - 'auth_info_ttl': 80 - } + def setUp(self) -> None: + test_utils.setup() + + def tearDown(self): + test_utils.cleanup() - if dump_file is not None: - config['state_dump_file'] = dump_file + for authenticator in authenticators: + authenticator._cleanup() - if group_support is not None: - config['group_support'] = group_support - if session_expire_minutes is not None: - config['session_expire_minutes'] = session_expire_minutes +def mock_request_handler(code): + handler_mock = mock_object() + handler_mock.get_argument = lambda arg, default: code if arg == 'code' else None - authenticator = TestOauthAuthenticator(config) + secure_cookies = {} - self.authenticators.append(authenticator) + handler_mock.get_secure_cookie = lambda cookie: secure_cookies.get(cookie) - return authenticator + def set_secure_cookie(cookie, value): + secure_cookies[cookie] = value - def setUp(self) -> None: - self.authenticators = [] + handler_mock.set_secure_cookie = set_secure_cookie + + return handler_mock + + +class TestAuthenticate(AsyncTestCase): + @tornado.testing.gen_test + def test_authenticate_successful(self): + authenticator = create_test_authenticator() + username = yield authenticator.authenticate(mock_request_handler(code='X')) + + self.assertEqual('user_X', username) + + @tornado.testing.gen_test + def test_authenticate_successful_different_user(self): + authenticator = create_test_authenticator() + username = yield authenticator.authenticate(mock_request_handler(code='Z')) + + self.assertEqual('user_Z', username) + + @tornado.testing.gen_test + def test_authenticate_when_no_code(self): + authenticator = create_test_authenticator() + with self.assertRaisesRegex(AuthBadRequestException, 'Missing authorization information'): + yield authenticator.authenticate(mock_request_handler(code=None)) + + @tornado.testing.gen_test + def test_authenticate_when_no_token(self): + authenticator = create_test_authenticator() + with self.assertRaisesRegex(Exception, 'Could not generate token'): + yield authenticator.authenticate(mock_request_handler(code='W')) + + @tornado.testing.gen_test + def test_authenticate_when_no_email(self): + authenticator = create_test_authenticator() + + async def custom_fetch_user_info(access_token): + return _OauthUserInfo(None, True, {}) + + authenticator.fetch_user_info = custom_fetch_user_info + + with self.assertRaisesRegex(AuthFailureError, 'No email field in user response'): + yield authenticator.authenticate(mock_request_handler(code='X')) + + @tornado.testing.gen_test + def test_authenticate_when_not_enabled(self): + authenticator = create_test_authenticator() + authenticator.disabled_users.append('user_Y') + + with self.assertRaisesRegex(AuthFailureError, 'is not enabled in OAuth provider'): + yield authenticator.authenticate(mock_request_handler(code='Y')) + + @tornado.testing.gen_test + def test_authenticate_and_get_user_groups(self): + authenticator = create_test_authenticator(group_support=True) + authenticator.user_groups['user_Y'] = ['group1', 'group2'] + + username = yield authenticator.authenticate(mock_request_handler(code='Y')) + groups = authenticator.get_groups(username) + self.assertEqual(['group1', 'group2'], groups) + + @tornado.testing.gen_test + def test_authenticate_and_get_user_groups_when_groups_disabled(self): + authenticator = create_test_authenticator(group_support=False) + authenticator.user_groups['user_Y'] = ['group1', 'group2'] + + username = yield authenticator.authenticate(mock_request_handler(code='Y')) + groups = authenticator.get_groups(username) + self.assertEqual([], groups) + + @tornado.testing.gen_test + def test_authenticate_and_save_user_token(self): + authenticator = create_test_authenticator(auth_info_ttl=10) + + request_handler = mock_request_handler(code='Y') + yield authenticator.authenticate(request_handler) + + saved_token = request_handler.get_secure_cookie('token') + self.assertEqual('22222', saved_token) + @tornado.testing.gen_test + def test_authenticate_and_save_user_token_when_auth_update_disabled(self): + authenticator = create_test_authenticator(auth_info_ttl=None) + + request_handler = mock_request_handler(code='Y') + yield authenticator.authenticate(request_handler) + + saved_token = request_handler.get_secure_cookie('token') + self.assertIsNone(saved_token) + + def setUp(self) -> None: + super().setUp() test_utils.setup() def tearDown(self): + super().tearDown() test_utils.cleanup() - for authenticator in self.authenticators: + for authenticator in authenticators: authenticator._cleanup() -def _from_json(content): - json_obj = json.dumps(content) - conf_path = os.path.join(test_utils.temp_folder, 'conf.json') - file_utils.write_file(conf_path, json_obj) - return server_conf.from_json(conf_path, test_utils.temp_folder) +class TestValidateUser(AsyncTestCase): + @tornado.testing.gen_test + def test_validate_user_success(self): + authenticator = create_test_authenticator() + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + valid = authenticator.validate_user(username, request_handler) + self.assertEqual(True, valid) + + @tornado.testing.gen_test + def test_validate_when_no_state(self): + authenticator = create_test_authenticator(group_support=False) + + valid = authenticator.validate_user('user_X', mock_request_handler('')) + self.assertEqual(True, valid) + + @tornado.testing.gen_test + def test_validate_when_no_username(self): + authenticator = create_test_authenticator(group_support=False) + + valid = authenticator.validate_user(None, mock_request_handler('')) + self.assertEqual(False, valid) + + @tornado.testing.gen_test + def test_validate_when_no_state_and_expire_enabled(self): + authenticator = create_test_authenticator(session_expire_minutes=1) + + valid = authenticator.validate_user('user_X', mock_request_handler('')) + self.assertEqual(False, valid) + + @tornado.testing.gen_test + def test_validate_when_no_state_and_auth_update_enabled(self): + authenticator = create_test_authenticator(auth_info_ttl=1) + + valid = authenticator.validate_user('user_X', mock_request_handler('')) + self.assertEqual(False, valid) + + @tornado.testing.gen_test + def test_validate_when_no_state_and_group_support(self): + authenticator = create_test_authenticator(group_support=True) + + valid = authenticator.validate_user('user_X', mock_request_handler('')) + self.assertEqual(False, valid) + + @patch('time.time', mock_time) + @tornado.testing.gen_test + def test_validate_when_session_expired(self): + authenticator = create_test_authenticator(session_expire_minutes=5) + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + mock_time.return_value = mock_time.return_value + 60 * 10 + valid = authenticator.validate_user(username, request_handler) + self.assertEqual(False, valid) + + @patch('time.time', mock_time) + @tornado.testing.gen_test + def test_validate_when_session_not_expired(self): + authenticator = create_test_authenticator(session_expire_minutes=5) + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + mock_time.return_value = mock_time.return_value + 60 * 2 + valid = authenticator.validate_user(username, request_handler) + self.assertEqual(True, valid) + + @patch('time.time', mock_time) + @tornado.testing.gen_test + def test_validate_when_session_not_expired_after_renew(self): + authenticator = create_test_authenticator(session_expire_minutes=5) + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + mock_time.return_value = mock_time.return_value + 60 * 2 + authenticator.validate_user(username, request_handler) + + mock_time.return_value = mock_time.return_value + 60 * 4 + valid2 = authenticator.validate_user(username, request_handler) + self.assertEqual(True, valid2) + + @patch('time.time', mock_time) + @tornado.testing.gen_test + def test_validate_when_session_expired_after_renew(self): + authenticator = create_test_authenticator(session_expire_minutes=5) + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + mock_time.return_value = mock_time.return_value + 60 * 2 + authenticator.validate_user(username, request_handler) + + mock_time.return_value = mock_time.return_value + 60 * 6 + valid2 = authenticator.validate_user(username, request_handler) + self.assertEqual(False, valid2) + + @patch('time.time', mock_time) + @tornado.testing.gen_test + def test_validate_when_update_auth_and_no_access_token(self): + authenticator = create_test_authenticator(auth_info_ttl=1) + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + valid = authenticator.validate_user(username, mock_request_handler('X')) + self.assertEqual(False, valid) + + def setUp(self) -> None: + super().setUp() + test_utils.setup() + + def tearDown(self): + super().tearDown() + test_utils.cleanup() + + for authenticator in authenticators: + authenticator._cleanup() -class TestOauthAuthenticator(AbstractOauthAuthenticator): +class MockOauthAuthenticator(AbstractOauthAuthenticator): def __init__(self, params_dict): super().__init__('authorize_url', 'token_url', 'test_scope', params_dict) From fa3d664be83dc0d029062a60b7ac76f14e8b62c7 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 13 Jun 2020 17:10:14 +0200 Subject: [PATCH 015/398] #305 added oauth update_auth, logout and dump tests --- src/auth/auth_abstract_oauth.py | 16 +- src/concurrency/__init__.py | 0 src/concurrency/countdown_latch.py | 36 +++ src/tests/auth/test_auth_abstract_oauth.py | 330 ++++++++++++++++++--- 4 files changed, 335 insertions(+), 47 deletions(-) create mode 100644 src/concurrency/__init__.py create mode 100644 src/concurrency/countdown_latch.py diff --git a/src/auth/auth_abstract_oauth.py b/src/auth/auth_abstract_oauth.py index fa617f96..71f5c00f 100644 --- a/src/auth/auth_abstract_oauth.py +++ b/src/auth/auth_abstract_oauth.py @@ -33,7 +33,13 @@ def __init__(self, username) -> None: _OauthUserInfo = namedtuple('_OauthUserInfo', ['email', 'enabled', 'oauth_response']) -# noinspection PyProtectedMember +def _start_timer(callback): + timer = threading.Timer(30, callback) + timer.setDaemon(True) + timer.start() + return timer + + class AbstractOauthAuthenticator(auth_base.Authenticator, metaclass=abc.ABCMeta): def __init__(self, oauth_authorize_url, oauth_token_url, oauth_scope, params_dict): super().__init__() @@ -214,7 +220,7 @@ def update_user_auth(self, username, user_state, access_token): if not ttl_expired: return - tornado.ioloop.IOLoop.current().spawn_callback( + tornado.ioloop.IOLoop.current().add_callback( self._do_update_user_auth_async, username, user_state, @@ -236,7 +242,7 @@ async def _do_update_user_auth_async(self, username, user_state, access_token): user_info = await self.fetch_user_info(access_token) # type: _OauthUserInfo if (not user_info) or (not user_info.email): - LOGGER.error('Failed to fetch user info: %s', str(user_info.oauth_response)) + LOGGER.error('Failed to fetch user info: %s', str(user_info)) self._remove_user(username) return @@ -284,9 +290,7 @@ def repeating_dump(): finally: self._schedule_dump_task() - self.timer = threading.Timer(30, repeating_dump) - self.timer.setDaemon(True) - self.timer.start() + self.timer = _start_timer(repeating_dump) def _dump_state(self): if self.dump_file: diff --git a/src/concurrency/__init__.py b/src/concurrency/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/concurrency/countdown_latch.py b/src/concurrency/countdown_latch.py new file mode 100644 index 00000000..cf6819da --- /dev/null +++ b/src/concurrency/countdown_latch.py @@ -0,0 +1,36 @@ +import threading +import time + + +class CountDownLatch(object): + def __init__(self, count=1): + self.count = count + self.lock = threading.Condition() + + def count_down(self): + with self.lock: + self.count -= 1 + if self.count <= 0: + print('count_down: count = ' + str(self.count)) + self.lock.notifyAll() + + def await_latch(self, timeout=None): + if timeout: + end_time = time.time() + timeout + + with self.lock: + while self.count > 0: + wait_delta = end_time - time.time() + + if wait_delta > 0: + print('await_latch before wait: count = ' + str(self.count)) + self.lock.wait(wait_delta) + print('await_latch after wait: count = ' + str(self.count)) + else: + raise TimeoutError('Latch await timed out') + + return + + with self.lock: + while self.count > 0: + self.lock.wait() diff --git a/src/tests/auth/test_auth_abstract_oauth.py b/src/tests/auth/test_auth_abstract_oauth.py index d056ba0a..d5ac3dc0 100644 --- a/src/tests/auth/test_auth_abstract_oauth.py +++ b/src/tests/auth/test_auth_abstract_oauth.py @@ -1,18 +1,22 @@ import json import os import random +import threading +import time import unittest from unittest import TestCase from unittest.mock import Mock, patch -import tornado -from tornado.testing import AsyncTestCase +from tornado import gen +from tornado.testing import AsyncTestCase, gen_test +import auth from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo from auth.auth_base import AuthFailureError, AuthBadRequestException from model.server_conf import InvalidServerConfigException from tests import test_utils from tests.test_utils import mock_object +from utils import file_utils if __name__ == '__main__': unittest.main() @@ -23,6 +27,21 @@ authenticators = [] +class _OauthTestCase(AsyncTestCase): + def setUp(self) -> None: + super().setUp() + test_utils.setup() + + mock_time.return_value = 10000.01 + + def tearDown(self): + super().tearDown() + test_utils.cleanup() + + for authenticator in authenticators: + authenticator._cleanup() + + def create_test_authenticator(*, dump_file=None, group_support=None, session_expire_minutes=None, auth_info_ttl=None): config = { 'type': 'test_oauth', @@ -104,6 +123,7 @@ def test_restore_dump_state_when_no_file(self): def test_restore_dump_state_when_multiple_users(self): dump_file = test_utils.create_file('dump.json', text=json.dumps( [{'username': 'user_X', 'groups': ['group1', 'group2'], 'last_auth_update': 123}, + {'groups': ['group3'], 'last_auth_update': 999}, {'username': 'user_Y', 'last_visit': 456}])) authenticator = create_test_authenticator(dump_file=dump_file) @@ -142,39 +162,44 @@ def mock_request_handler(code): def set_secure_cookie(cookie, value): secure_cookies[cookie] = value + def clear_secure_cookie(cookie): + if cookie in secure_cookies: + del secure_cookies[cookie] + handler_mock.set_secure_cookie = set_secure_cookie + handler_mock.clear_cookie = clear_secure_cookie return handler_mock -class TestAuthenticate(AsyncTestCase): - @tornado.testing.gen_test +class TestAuthenticate(_OauthTestCase): + @gen_test def test_authenticate_successful(self): authenticator = create_test_authenticator() username = yield authenticator.authenticate(mock_request_handler(code='X')) self.assertEqual('user_X', username) - @tornado.testing.gen_test + @gen_test def test_authenticate_successful_different_user(self): authenticator = create_test_authenticator() username = yield authenticator.authenticate(mock_request_handler(code='Z')) self.assertEqual('user_Z', username) - @tornado.testing.gen_test + @gen_test def test_authenticate_when_no_code(self): authenticator = create_test_authenticator() with self.assertRaisesRegex(AuthBadRequestException, 'Missing authorization information'): yield authenticator.authenticate(mock_request_handler(code=None)) - @tornado.testing.gen_test + @gen_test def test_authenticate_when_no_token(self): authenticator = create_test_authenticator() with self.assertRaisesRegex(Exception, 'Could not generate token'): yield authenticator.authenticate(mock_request_handler(code='W')) - @tornado.testing.gen_test + @gen_test def test_authenticate_when_no_email(self): authenticator = create_test_authenticator() @@ -186,7 +211,7 @@ async def custom_fetch_user_info(access_token): with self.assertRaisesRegex(AuthFailureError, 'No email field in user response'): yield authenticator.authenticate(mock_request_handler(code='X')) - @tornado.testing.gen_test + @gen_test def test_authenticate_when_not_enabled(self): authenticator = create_test_authenticator() authenticator.disabled_users.append('user_Y') @@ -194,7 +219,7 @@ def test_authenticate_when_not_enabled(self): with self.assertRaisesRegex(AuthFailureError, 'is not enabled in OAuth provider'): yield authenticator.authenticate(mock_request_handler(code='Y')) - @tornado.testing.gen_test + @gen_test def test_authenticate_and_get_user_groups(self): authenticator = create_test_authenticator(group_support=True) authenticator.user_groups['user_Y'] = ['group1', 'group2'] @@ -203,7 +228,7 @@ def test_authenticate_and_get_user_groups(self): groups = authenticator.get_groups(username) self.assertEqual(['group1', 'group2'], groups) - @tornado.testing.gen_test + @gen_test def test_authenticate_and_get_user_groups_when_groups_disabled(self): authenticator = create_test_authenticator(group_support=False) authenticator.user_groups['user_Y'] = ['group1', 'group2'] @@ -212,7 +237,7 @@ def test_authenticate_and_get_user_groups_when_groups_disabled(self): groups = authenticator.get_groups(username) self.assertEqual([], groups) - @tornado.testing.gen_test + @gen_test def test_authenticate_and_save_user_token(self): authenticator = create_test_authenticator(auth_info_ttl=10) @@ -222,7 +247,7 @@ def test_authenticate_and_save_user_token(self): saved_token = request_handler.get_secure_cookie('token') self.assertEqual('22222', saved_token) - @tornado.testing.gen_test + @gen_test def test_authenticate_and_save_user_token_when_auth_update_disabled(self): authenticator = create_test_authenticator(auth_info_ttl=None) @@ -232,20 +257,9 @@ def test_authenticate_and_save_user_token_when_auth_update_disabled(self): saved_token = request_handler.get_secure_cookie('token') self.assertIsNone(saved_token) - def setUp(self) -> None: - super().setUp() - test_utils.setup() - def tearDown(self): - super().tearDown() - test_utils.cleanup() - - for authenticator in authenticators: - authenticator._cleanup() - - -class TestValidateUser(AsyncTestCase): - @tornado.testing.gen_test +class TestValidateUser(_OauthTestCase): + @gen_test def test_validate_user_success(self): authenticator = create_test_authenticator() @@ -255,35 +269,35 @@ def test_validate_user_success(self): valid = authenticator.validate_user(username, request_handler) self.assertEqual(True, valid) - @tornado.testing.gen_test + @gen_test def test_validate_when_no_state(self): authenticator = create_test_authenticator(group_support=False) valid = authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(True, valid) - @tornado.testing.gen_test + @gen_test def test_validate_when_no_username(self): authenticator = create_test_authenticator(group_support=False) valid = authenticator.validate_user(None, mock_request_handler('')) self.assertEqual(False, valid) - @tornado.testing.gen_test + @gen_test def test_validate_when_no_state_and_expire_enabled(self): authenticator = create_test_authenticator(session_expire_minutes=1) valid = authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(False, valid) - @tornado.testing.gen_test + @gen_test def test_validate_when_no_state_and_auth_update_enabled(self): authenticator = create_test_authenticator(auth_info_ttl=1) valid = authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(False, valid) - @tornado.testing.gen_test + @gen_test def test_validate_when_no_state_and_group_support(self): authenticator = create_test_authenticator(group_support=True) @@ -291,7 +305,7 @@ def test_validate_when_no_state_and_group_support(self): self.assertEqual(False, valid) @patch('time.time', mock_time) - @tornado.testing.gen_test + @gen_test def test_validate_when_session_expired(self): authenticator = create_test_authenticator(session_expire_minutes=5) @@ -303,7 +317,7 @@ def test_validate_when_session_expired(self): self.assertEqual(False, valid) @patch('time.time', mock_time) - @tornado.testing.gen_test + @gen_test def test_validate_when_session_not_expired(self): authenticator = create_test_authenticator(session_expire_minutes=5) @@ -315,7 +329,7 @@ def test_validate_when_session_not_expired(self): self.assertEqual(True, valid) @patch('time.time', mock_time) - @tornado.testing.gen_test + @gen_test def test_validate_when_session_not_expired_after_renew(self): authenticator = create_test_authenticator(session_expire_minutes=5) @@ -330,7 +344,7 @@ def test_validate_when_session_not_expired_after_renew(self): self.assertEqual(True, valid2) @patch('time.time', mock_time) - @tornado.testing.gen_test + @gen_test def test_validate_when_session_expired_after_renew(self): authenticator = create_test_authenticator(session_expire_minutes=5) @@ -344,8 +358,7 @@ def test_validate_when_session_expired_after_renew(self): valid2 = authenticator.validate_user(username, request_handler) self.assertEqual(False, valid2) - @patch('time.time', mock_time) - @tornado.testing.gen_test + @gen_test def test_validate_when_update_auth_and_no_access_token(self): authenticator = create_test_authenticator(auth_info_ttl=1) @@ -355,16 +368,243 @@ def test_validate_when_update_auth_and_no_access_token(self): valid = authenticator.validate_user(username, mock_request_handler('X')) self.assertEqual(False, valid) + +class TestUpdateUserAuth(_OauthTestCase): + + @patch('time.time', mock_time) + @gen_test + def test_user_becomes_prohibited(self): + valid = yield self.run_validation_test( + lambda username, authenticator: authenticator.disabled_users.append(username)) + + self.assertEqual(False, valid) + + @patch('time.time', mock_time) + @gen_test + def test_user_stays_active(self): + valid = yield self.run_validation_test(lambda username, authenticator: None) + + self.assertEqual(True, valid) + + @patch('time.time', mock_time) + @gen_test + def test_user_removed(self): + def remove_user(username, authenticator): + del authenticator.user_tokens['11111'] + + valid = yield self.run_validation_test(remove_user) + + self.assertEqual(False, valid) + + @patch('time.time', mock_time) + @gen_test + def test_user_no_email(self): + def remove_user_email(username, authenticator): + authenticator.user_tokens['11111'] = '' + + valid = yield self.run_validation_test(remove_user_email) + + self.assertEqual(False, valid) + + @patch('time.time', mock_time) + @gen_test + def test_reload_groups(self): + this_auth = None # type: MockOauthAuthenticator + + def change_groups(username, authenticator): + authenticator.user_groups['user_X'] = ['Group A'] + nonlocal this_auth + this_auth = authenticator + + valid = yield self.run_validation_test(change_groups) + + self.assertEqual(True, valid) + self.assertEqual(['Group A'], this_auth.get_groups('user_X')) + + @patch('time.time', mock_time) + @gen_test + def test_reload_groups_fails(self): + def set_groups_loading_fail(username, authenticator): + authenticator.failing_groups_loading.append(username) + + valid = yield self.run_validation_test(set_groups_loading_fail) + + self.assertEqual(False, valid) + + @patch('time.time', mock_time) + @gen_test + def test_no_reload_groups_without_expiry(self): + authenticator = create_test_authenticator(auth_info_ttl=5) + authenticator.user_groups['user_X'] = ['group1', 'group2'] + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + mock_time.return_value = mock_time.return_value + 2 + + authenticator.user_groups['user_X'] = ['Group A'] + + valid1 = authenticator.validate_user(username, request_handler) + self.assertEqual(True, valid1) + + yield self.wait_next_ioloop() + + groups = authenticator.get_groups('user_X') + self.assertEqual(['group1', 'group2'], groups) + + @gen.coroutine + def run_validation_test(self, prevalidation_callback): + authenticator = create_test_authenticator(auth_info_ttl=5) + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + mock_time.return_value = mock_time.return_value + 10 + + prevalidation_callback(username, authenticator) + + valid1 = authenticator.validate_user(username, request_handler) + self.assertEqual(True, valid1) + + yield self.wait_next_ioloop() + + return authenticator.validate_user(username, request_handler) + + async def wait_next_ioloop(self): + await gen.sleep(0.001) + + +class TestLogout(_OauthTestCase): + @gen_test + def test_validate_user_success(self): + authenticator = create_test_authenticator() + + request_handler = mock_request_handler('X') + username = yield authenticator.authenticate(request_handler) + + authenticator.logout(username, request_handler) + + self.assertIsNone(request_handler.get_secure_cookie('token')) + + valid = authenticator.validate_user(username, request_handler) + self.assertFalse(valid) + + +class TestDump(_OauthTestCase): + @gen_test + def test_validate_empty_dump(self): + dump_file = os.path.join(test_utils.temp_folder, 'dump.json') + create_test_authenticator(dump_file=dump_file) + + self.wait_dump() + + self.validate_dump(dump_file, []) + + @patch('time.time', mock_time) + @gen_test + def test_validate_single_user(self): + dump_file = os.path.join(test_utils.temp_folder, 'dump.json') + authenticator = create_test_authenticator(dump_file=dump_file) + + yield authenticator.authenticate(mock_request_handler('X')) + + self.wait_dump() + + self.validate_dump(dump_file, [{ + 'username': 'user_X', + 'last_visit': 10000.01, + 'last_auth_update': None, + 'groups': []}]) + + @patch('time.time', mock_time) + @gen_test + def test_validate_2_users(self): + dump_file = os.path.join(test_utils.temp_folder, 'dump.json') + authenticator = create_test_authenticator(dump_file=dump_file, auth_info_ttl=1) + + yield authenticator.authenticate(mock_request_handler('X')) + + mock_time.return_value = 10002.02 + authenticator.user_groups['user_Y'] = ['Group A'] + + yield authenticator.authenticate(mock_request_handler('Y')) + + self.wait_dump() + + self.validate_dump(dump_file, [{ + 'username': 'user_X', + 'last_visit': 10000.01, + 'last_auth_update': 10000.01, + 'groups': []}, + { + 'username': 'user_Y', + 'last_visit': 10002.02, + 'last_auth_update': 10002.02, + 'groups': ['Group A'] + }]) + + @patch('time.time', mock_time) + @gen_test + def test_validate_after_logout(self): + dump_file = os.path.join(test_utils.temp_folder, 'dump.json') + authenticator = create_test_authenticator(dump_file=dump_file, auth_info_ttl=1) + + user_x_request_handler = mock_request_handler('X') + yield authenticator.authenticate(user_x_request_handler) + + mock_time.return_value = 10002.02 + authenticator.user_groups['user_Y'] = ['Group A'] + + yield authenticator.authenticate(mock_request_handler('Y')) + + authenticator.logout('user_X', user_x_request_handler) + + self.validate_dump(dump_file, [{ + 'username': 'user_Y', + 'last_visit': 10002.02, + 'last_auth_update': 10002.02, + 'groups': ['Group A'] + }]) + + def validate_dump(self, dump_file, expected_value): + self.assertTrue(os.path.exists(dump_file)) + file_content = file_utils.read_file(dump_file) + restored_dump = json.loads(file_content) + self.assertEqual(expected_value, restored_dump) + + def wait_dump(self): + invocations = self.timer_invocations + + wait_count = 0 + while (self.timer_invocations == invocations) and (wait_count < 50): + time.sleep(0.001) + def setUp(self) -> None: super().setUp() - test_utils.setup() + + self._def_start_timer = auth.auth_abstract_oauth._start_timer + + self.timer_invocations = 0 + self.max_timer_invocations = 9999 + + def start_quick_timer(callback): + if self.timer_invocations > self.max_timer_invocations: + return + + timer = threading.Timer(0.01, callback) + timer.setDaemon(True) + timer.start() + + self.timer_invocations += 1 + + return timer + + auth.auth_abstract_oauth._start_timer = start_quick_timer def tearDown(self): super().tearDown() - test_utils.cleanup() - for authenticator in authenticators: - authenticator._cleanup() + auth.auth_abstract_oauth._start_timer = self._def_start_timer class MockOauthAuthenticator(AbstractOauthAuthenticator): @@ -380,6 +620,7 @@ def __init__(self, params_dict): } self.user_groups = {} self.disabled_users = [] + self.failing_groups_loading = [] async def fetch_access_token(self, code, request_handler): for key, value in self.user_tokens.items(): @@ -389,6 +630,9 @@ async def fetch_access_token(self, code, request_handler): raise Exception('Could not generate token for code ' + code + '. Make sure core is equal to user suffix') async def fetch_user_info(self, access_token: str) -> _OauthUserInfo: + if access_token not in self.user_tokens: + return None + user = self.user_tokens[access_token] enabled = user not in self.disabled_users @@ -396,6 +640,10 @@ async def fetch_user_info(self, access_token: str) -> _OauthUserInfo: async def fetch_user_groups(self, access_token): user = self.user_tokens[access_token] + + if user in self.failing_groups_loading: + raise AuthFailureError('Emulate group loading error') + if user in self.user_groups: return self.user_groups[user] return [] From dd1ff238b093f1e26e57b036d00f6c7b3cfb7b40 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 13 Jun 2020 17:50:13 +0200 Subject: [PATCH 016/398] #305 updated gitlab oauth tests to reflect new structure --- src/auth/auth_gitlab.py | 10 +- src/tests/auth/test_auth_abstract_oauth.py | 4 - src/tests/auth/test_auth_gitlab.py | 305 ++++++++------------- 3 files changed, 111 insertions(+), 208 deletions(-) diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index f5e5b799..e53be74f 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -3,7 +3,6 @@ from tornado.auth import OAuth2Mixin from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo -from auth.auth_base import AuthFailureError LOGGER = logging.getLogger('script_server.GitlabAuthorizer') @@ -17,12 +16,12 @@ class GitlabOAuthAuthenticator(AbstractOauthAuthenticator, OAuth2Mixin): def __init__(self, params_dict): self.gitlab_host = params_dict.get('url', 'https://gitlab.com') - self.gitlab_group_support = params_dict.get('group_support', True) + gitlab_group_support = params_dict.get('group_support', True) super().__init__( _OAUTH_AUTHORIZE_URL % self.gitlab_host, _OAUTH_ACCESS_TOKEN_URL % self.gitlab_host, - 'api' if self.gitlab_group_support else 'read_user', + 'api' if gitlab_group_support else 'read_user', params_dict) self.gitlab_group_search = params_dict.get('group_search') @@ -62,9 +61,4 @@ async def fetch_user_groups(self, access_token): if group.get('full_path'): groups.append(group['full_path']) - if groups is None: - error_message = 'Cant read user groups' - LOGGER.error(error_message) - raise AuthFailureError(error_message) - return groups diff --git a/src/tests/auth/test_auth_abstract_oauth.py b/src/tests/auth/test_auth_abstract_oauth.py index d5ac3dc0..00795892 100644 --- a/src/tests/auth/test_auth_abstract_oauth.py +++ b/src/tests/auth/test_auth_abstract_oauth.py @@ -3,7 +3,6 @@ import random import threading import time -import unittest from unittest import TestCase from unittest.mock import Mock, patch @@ -18,9 +17,6 @@ from tests.test_utils import mock_object from utils import file_utils -if __name__ == '__main__': - unittest.main() - mock_time = Mock() mock_time.return_value = 10000.01 diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index 6b17e83f..fb74bdda 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -1,200 +1,113 @@ -import copy -import json -import os -import tempfile -import time import unittest +from unittest.mock import patch -from tornado import escape +# noinspection PyProtectedMember +from tornado.testing import AsyncTestCase, gen_test +from auth.auth_abstract_oauth import _OauthUserInfo from auth.auth_gitlab import GitlabOAuthAuthenticator -from model import server_conf -from tests import test_utils -from utils import file_utils -from unittest import TestCase -from unittest.mock import patch, Mock - -if __name__ == '__main__': - unittest.main() - -mock_time = Mock() -mock_time.return_value = 10000.01 -mock_persist_session = Mock() -mock_do_update_groups = Mock() -mock_do_update_user = Mock() -mock_request_handler = Mock(**{'get_secure_cookie.return_value': "12345".encode()}) - - -class TestAuthConfig(TestCase): - @patch('time.time', mock_time) - @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.persist_session', mock_persist_session) - @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) - def test_gitlab_oauth(self): - now = time.time() - state = { - "user@test.com": { - "groups": ["testgroup"], - "updating": False, - "updated": now-10, - "visit": now-10, - "id": 1, - "username": "test", - "name": "John", - "email": "user@test.com", - "state": "active" - }, - "nogroups@test.com": { - "groups": None, - "updating": True, - "updated": now-10, - "visit": now-10, - "id": 2, - "username": "nogroups", - "name": "John", - "email": "nogroups@test.com", - "state": "blocked" - } - } - - state_file = test_utils.create_file("gitlab_state.json", text=escape.json_encode(state)) - - config = _from_json({ - 'auth': { - "type": "gitlab", - "url": "https://gitlab", - "client_id": "1234", - "secret": "abcd", - "group_search": "script-server", - "auth_info_ttl": 80, - "state_dump_file": state_file, - "session_expire_minutes": 10 - }, - 'access': { - 'allowed_users': [] - }}) - - self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEqual(state_file, config.authenticator.gitlab_dump) - self.assertEqual("1234", config.authenticator._client_visible_config['client_id']) - self.assertEqual("https://gitlab/oauth/authorize", config.authenticator._client_visible_config['oauth_url']) - self.assertEqual("api", config.authenticator._client_visible_config['oauth_scope']) - - assert_state = state.copy() - for key in list(assert_state.keys()): - assert_state[key]['updating'] = False - assert_state[key]['updated'] = 10000.01 - 80 - 1 - self.assertDictEqual(assert_state, config.authenticator.user_states) - saved_state = copy.deepcopy(config.authenticator.user_states) - - self.assertEqual(False, config.authenticator.validate_user("unknown@test.com", mock_request_handler)) - self.assertEqual(False, config.authenticator.validate_user("nogroups@test.com", mock_request_handler)) - self.assertListEqual([], config.authenticator.get_groups("unknown@test.com")) - self.assertListEqual([], config.authenticator.get_groups("nogroups@test.com")) - - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(time.time(), config.authenticator.user_states["user@test.com"]["visit"], "visit updated") - self.assertEqual(True, mock_do_update_groups.called, "state just loaded, gitlab updating") - mock_do_update_groups.reset_mock() - - config.authenticator.user_states["user@test.com"]["updating"] = True - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(False, mock_do_update_groups.called, "do not call parallel updated") - mock_do_update_groups.reset_mock() - - mock_time.return_value = 10000.01 + 80*2 + 1 # stale request - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(True, mock_do_update_groups.called, "parallel but stale") - mock_do_update_groups.reset_mock() - config.authenticator.user_states = copy.deepcopy(saved_state) - mock_time.return_value = 10000.01 - - config.authenticator.user_states["user@test.com"]['updated'] = now # gitlab info updated - config.authenticator.user_states["user@test.com"]['updating'] = False - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(False, mock_do_update_groups.called, "do not update gitlab because ttl not expired") - mock_do_update_groups.reset_mock() - - mock_time.return_value = 10000.01 + 81 - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(True, mock_do_update_groups.called, "ttl expired") - mock_do_update_groups.reset_mock() - config.authenticator.user_states = copy.deepcopy(saved_state) - mock_time.return_value = 10000.01 - - # session expire test - mock_time.return_value = 10000.01 + 601 - self.assertEqual(False, config.authenticator.validate_user("user@test.com", mock_request_handler), "shoud be expired") - self.assertEqual(True, mock_persist_session.called, "dump state to file") - mock_persist_session.reset_mock() - self.assertIsNone(config.authenticator.user_states.get("user@test.com"), "removed from state") - self.assertListEqual([], config.authenticator.get_groups("user@test.com")) - config.authenticator.user_states = copy.deepcopy(saved_state) - mock_time.return_value = 10000.01 - - # test clean expire - mock_time.return_value = 10000.01 + 601 - config.authenticator.clean_sessions() - self.assertIsNone(config.authenticator.user_states.get("user@test.com")) - config.authenticator.user_states = copy.deepcopy(saved_state) - mock_time.return_value = 10000.01 - - @patch('time.time', mock_time) - @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_user', mock_do_update_user) - @patch('auth.auth_gitlab.GitlabOAuthAuthenticator.do_update_groups', mock_do_update_groups) - def test_gitlab_oauth_user_read_scope(self): - now = time.time() - - state = { - "user@test.com": { - "groups": ["testgroup"], - "updating": False, - "updated": 0, - "visit": now-10, - "id": 1, - "username": "test", - "name": "John", - "email": "user@test.com", - "state": "active" - } - } - - config = _from_json({ - 'auth': { - "type": "gitlab", - "url": "https://gitlab", - "client_id": "1234", - "secret": "abcd", - "group_search": "script-server", - "auth_info_ttl": 80, - "session_expire_minutes": 1, - "group_support": False - }, - 'access': { - 'allowed_users': [] - }}) - - self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEqual("read_user", config.authenticator._client_visible_config['oauth_scope']) - config.authenticator.user_states = state - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(False, mock_do_update_groups.called, "update==0, gitlab updating but not groups") - self.assertEqual(True, mock_do_update_user.called, "update==0, gitlab updating only user") - mock_do_update_groups.reset_mock() - mock_do_update_user.reset_mock() - - config.authenticator.gitlab_update = None - self.assertEqual(True, config.authenticator.validate_user("user@test.com", mock_request_handler)) - self.assertEqual(False, mock_do_update_groups.called, "gitab update disabled") - self.assertEqual(False, mock_do_update_user.called, "gitab update disabled") - mock_do_update_groups.reset_mock() - mock_do_update_user.reset_mock() - - def tearDown(self): - test_utils.cleanup() - - -def _from_json(content): - json_obj = json.dumps(content) - conf_path = os.path.join(test_utils.temp_folder, 'conf.json') - file_utils.write_file(conf_path, json_obj) - return server_conf.from_json(conf_path, test_utils.temp_folder) \ No newline at end of file + + +def create_config(*, url=None, group_search=None, group_support=None): + config = { + 'client_id': '1234', + 'secret': 'hello world?' + } + + if url is not None: + config['url'] = url + if group_search is not None: + config['group_search'] = group_search + if group_support is not None: + config['group_support'] = group_support + + return config + + +class TestAuthConfig(unittest.TestCase): + def test_client_visible_config(self): + authenticator = GitlabOAuthAuthenticator(create_config(url='https://my.gitlab.host')) + + client_visible_config = authenticator._client_visible_config + self.assertEqual('1234', client_visible_config['client_id']) + self.assertEqual('https://my.gitlab.host/oauth/authorize', client_visible_config['oauth_url']) + self.assertEqual('api', client_visible_config['oauth_scope']) + + def test_client_visible_config_when_groups_disabled(self): + authenticator = GitlabOAuthAuthenticator(create_config(group_support=False)) + + client_visible_config = authenticator._client_visible_config + self.assertEqual('read_user', client_visible_config['oauth_scope']) + + def test_client_visible_config_when_default_url(self): + authenticator = GitlabOAuthAuthenticator(create_config()) + + client_visible_config = authenticator._client_visible_config + self.assertEqual('https://gitlab.com/oauth/authorize', client_visible_config['oauth_url']) + + +class TestFetchUserInfo(AsyncTestCase): + @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @gen_test + def test_fetch_user_info(self, mock_request): + response = {'email': 'me@gmail.com', 'state': 'active'} + mock_request.return_value = response + + authenticator = GitlabOAuthAuthenticator(create_config(url='https://my.gitlab.host')) + + user_info = yield authenticator.fetch_user_info('my_token_2') + self.assertEqual(_OauthUserInfo('me@gmail.com', True, response), user_info) + + mock_request.assert_called_with('https://my.gitlab.host/api/v4/user', 'my_token_2') + + @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @gen_test + def test_fetch_user_info_when_no_response(self, mock_request): + mock_request.return_value = None + + authenticator = GitlabOAuthAuthenticator(create_config()) + + user_info = yield authenticator.fetch_user_info('my_token_2') + self.assertEqual(None, user_info) + + @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @gen_test + def test_fetch_user_info_when_not_active(self, mock_request): + response = {'email': 'me@gmail.com', 'state': 'something'} + mock_request.return_value = response + + authenticator = GitlabOAuthAuthenticator(create_config()) + + user_info = yield authenticator.fetch_user_info('my_token_2') + self.assertEqual(_OauthUserInfo('me@gmail.com', False, response), user_info) + + +class TestFetchUserGroups(AsyncTestCase): + @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @gen_test + def test_fetch_user_info(self, mock_request): + response = [{'full_path': 'group1'}, {'full_path': 'group2'}, {'something': 'group3'}] + mock_request.return_value = response + + authenticator = GitlabOAuthAuthenticator(create_config(url='https://my.gitlab.host')) + + groups = yield authenticator.fetch_user_groups('my_token_2') + self.assertEqual(['group1', 'group2'], groups) + + mock_request.assert_called_with('https://my.gitlab.host/api/v4/groups', + access_token='my_token_2', + all_available='false', + per_page=100) + + @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @gen_test + def test_fetch_user_info_when_search(self, mock_request): + authenticator = GitlabOAuthAuthenticator(create_config(url='https://my.gitlab.host', group_search='abc')) + + yield authenticator.fetch_user_groups('my_token_2') + + mock_request.assert_called_with('https://my.gitlab.host/api/v4/groups', + access_token='my_token_2', + all_available='false', + per_page=100, + search='abc') From c53dfd1c469afe2e9a55d62b40ec26696ca59901 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 13 Jun 2020 19:28:06 +0200 Subject: [PATCH 017/398] #305 fixed flickering and <3.7 incompatible tests --- src/tests/auth/__init__.py | 0 src/tests/auth/test_auth_abstract_oauth.py | 1 + src/tests/auth/test_auth_gitlab.py | 18 ++++++---- src/tests/server_conf_test.py | 41 +++------------------- src/tests/test_utils.py | 6 ++++ 5 files changed, 22 insertions(+), 44 deletions(-) mode change 100755 => 100644 src/tests/auth/__init__.py diff --git a/src/tests/auth/__init__.py b/src/tests/auth/__init__.py old mode 100755 new mode 100644 diff --git a/src/tests/auth/test_auth_abstract_oauth.py b/src/tests/auth/test_auth_abstract_oauth.py index 00795892..47325314 100644 --- a/src/tests/auth/test_auth_abstract_oauth.py +++ b/src/tests/auth/test_auth_abstract_oauth.py @@ -566,6 +566,7 @@ def validate_dump(self, dump_file, expected_value): self.assertTrue(os.path.exists(dump_file)) file_content = file_utils.read_file(dump_file) restored_dump = json.loads(file_content) + restored_dump.sort(key=lambda state: state['username']) self.assertEqual(expected_value, restored_dump) def wait_dump(self): diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index fb74bdda..10030f05 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -4,8 +4,10 @@ # noinspection PyProtectedMember from tornado.testing import AsyncTestCase, gen_test +# noinspection PyProtectedMember from auth.auth_abstract_oauth import _OauthUserInfo from auth.auth_gitlab import GitlabOAuthAuthenticator +from tests.test_utils import AsyncMock def create_config(*, url=None, group_search=None, group_support=None): @@ -47,7 +49,7 @@ def test_client_visible_config_when_default_url(self): class TestFetchUserInfo(AsyncTestCase): - @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @patch('tornado.auth.OAuth2Mixin.oauth2_request', new_callable=AsyncMock) @gen_test def test_fetch_user_info(self, mock_request): response = {'email': 'me@gmail.com', 'state': 'active'} @@ -60,7 +62,7 @@ def test_fetch_user_info(self, mock_request): mock_request.assert_called_with('https://my.gitlab.host/api/v4/user', 'my_token_2') - @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @patch('tornado.auth.OAuth2Mixin.oauth2_request', new_callable=AsyncMock) @gen_test def test_fetch_user_info_when_no_response(self, mock_request): mock_request.return_value = None @@ -70,7 +72,7 @@ def test_fetch_user_info_when_no_response(self, mock_request): user_info = yield authenticator.fetch_user_info('my_token_2') self.assertEqual(None, user_info) - @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @patch('tornado.auth.OAuth2Mixin.oauth2_request', new_callable=AsyncMock) @gen_test def test_fetch_user_info_when_not_active(self, mock_request): response = {'email': 'me@gmail.com', 'state': 'something'} @@ -83,9 +85,9 @@ def test_fetch_user_info_when_not_active(self, mock_request): class TestFetchUserGroups(AsyncTestCase): - @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @patch('tornado.auth.OAuth2Mixin.oauth2_request', new_callable=AsyncMock) @gen_test - def test_fetch_user_info(self, mock_request): + def test_fetch_user_groups(self, mock_request): response = [{'full_path': 'group1'}, {'full_path': 'group2'}, {'something': 'group3'}] mock_request.return_value = response @@ -99,9 +101,11 @@ def test_fetch_user_info(self, mock_request): all_available='false', per_page=100) - @patch('tornado.auth.OAuth2Mixin.oauth2_request') + @patch('tornado.auth.OAuth2Mixin.oauth2_request', new_callable=AsyncMock) @gen_test - def test_fetch_user_info_when_search(self, mock_request): + def test_fetch_user_groups_when_search(self, mock_request): + mock_request.return_value = [] + authenticator = GitlabOAuthAuthenticator(create_config(url='https://my.gitlab.host', group_search='abc')) yield authenticator.fetch_user_groups('my_token_2') diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index ae304b7a..5531ac90 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -207,48 +207,15 @@ def test_google_oauth_without_allowed_users(self): def test_gitlab_oauth(self): config = _from_json({ 'auth': { - "type": "gitlab", - "url": "https://gitlab", - "client_id": "1234", - "secret": "abcd", - "group_search": "script-server", - "auth_info_ttl": 120, - "state_dump_file": "/tmp/dump.json", - "session_expire_minutes": 60, - "group_support": False + 'type': 'gitlab', + 'client_id': '1234', + 'secret': 'abcd', }, 'access': { - 'allowed_users': [] + 'allowed_users': [] }}) self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEquals('1234', config.authenticator.client_id) - self.assertEquals('abcd', config.authenticator.secret) - self.assertEquals('https://gitlab', config.authenticator._GITLAB_HOST) - self.assertEquals('script-server', config.authenticator.gitlab_group_search) - self.assertEquals(120, config.authenticator.gitlab_update) - self.assertEquals("/tmp/dump.json", config.authenticator.gitlab_dump) - self.assertEquals(60*60, config.authenticator.session_expire) - self.assertEquals(False, config.authenticator.gitlab_group_support) - - def test_gitlab_oauth_default(self): - config = _from_json({ - 'auth': { - "type": "gitlab", - "client_id": "1234", - "secret": "abcd", - }, - 'access': { - 'allowed_users': [] - }}) - - self.assertIsInstance(config.authenticator, GitlabOAuthAuthenticator) - self.assertEquals('https://gitlab.com', config.authenticator._GITLAB_HOST) - self.assertIsNone(config.authenticator.gitlab_group_search) - self.assertIsNone(config.authenticator.gitlab_update) - self.assertIsNone(config.authenticator.gitlab_dump) - self.assertIsNone(config.authenticator.session_expire) - self.assertEquals(True, config.authenticator.gitlab_group_support) def test_ldap(self): config = _from_json({'auth': {'type': 'ldap', diff --git a/src/tests/test_utils.py b/src/tests/test_utils.py index a77546d0..2ba0e308 100644 --- a/src/tests/test_utils.py +++ b/src/tests/test_utils.py @@ -4,6 +4,7 @@ import stat import threading import uuid +from unittest.mock import MagicMock import utils.file_utils as file_utils import utils.os_utils as os_utils @@ -467,3 +468,8 @@ def next_id(self): self._next_id += 1 self.generated_ids.append(id) return id + + +class AsyncMock(MagicMock): + async def __call__(self, *args, **kwargs): + return super(AsyncMock, self).__call__(*args, **kwargs) From 882424ead8e1baf88101a86974e0de61e05895d1 Mon Sep 17 00:00:00 2001 From: Dmitry Togushev Date: Thu, 18 Jun 2020 15:54:26 +0300 Subject: [PATCH 018/398] =?UTF-8?q?Two=20issue=20has=20been=20added:=201.?= =?UTF-8?q?=20--log-folder=20and=20--tmp-folder=20command=20line=20argumen?= =?UTF-8?q?ts=20to=20get=20the=20files=20locations=20more=20flexible=202.?= =?UTF-8?q?=20"expiration=5Fdays"=20(non-int=D0=B5ger=20values=20are=20ava?= =?UTF-8?q?lible=20also)=20in=20server=20config=20file's=20section=20"auth?= =?UTF-8?q?"=20to=20limit=20auth=20cookie=20life=20time.=20The=20default?= =?UTF-8?q?=20value=20is=2030=20days=20if=20the=20param=20is=20underfined.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changes to be committed: modified: src/auth/auth_base.py modified: src/auth/tornado_auth.py modified: src/main.py modified: src/model/server_conf.py --- src/auth/auth_base.py | 1 + src/auth/tornado_auth.py | 2 +- src/main.py | 6 ++++-- src/model/server_conf.py | 2 ++ 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index a5ddefb1..367e3782 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -5,6 +5,7 @@ class Authenticator(metaclass=abc.ABCMeta): def __init__(self) -> None: self._client_visible_config = {} self.auth_type = None + self.auth_expiration_days = 30 @abc.abstractmethod def authenticate(self, request_handler): diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 2bca3d7e..149bb573 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -70,7 +70,7 @@ def authenticate(self, request_handler): LOGGER.info('Authenticated user ' + username) - request_handler.set_secure_cookie('username', username) + request_handler.set_secure_cookie('username', username, expires_days=self.authenticator.auth_expiration_days) path = tornado.escape.url_unescape(request_handler.get_argument('next', '/')) diff --git a/src/main.py b/src/main.py index d46bfbf8..ada497db 100644 --- a/src/main.py +++ b/src/main.py @@ -26,10 +26,12 @@ parser = argparse.ArgumentParser(description='Launch script-server.') parser.add_argument('-d', '--config-dir', default='conf') parser.add_argument('-f', '--config-file', default='conf.json') +parser.add_argument('-l', '--log-folder', default='logs') +parser.add_argument('-t', '--tmp-folder', default='temp') args = vars(parser.parse_args()) -TEMP_FOLDER = 'temp' -LOG_FOLDER = 'logs' +TEMP_FOLDER = args['tmp_folder'] +LOG_FOLDER = args['log_folder'] CONFIG_FOLDER = args['config_dir'] if os.path.isabs(args['config_file']): diff --git a/src/model/server_conf.py b/src/model/server_conf.py index 71d26ca5..6355b14b 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -156,6 +156,8 @@ def create_authenticator(auth_object, temp_folder): else: raise Exception(auth_type + ' auth is not supported') + authenticator.auth_expiration_days = float(auth_object.get('expiration_days')) if auth_object.get('expiration_days') is not None else 30 + authenticator.auth_type = auth_type return authenticator From 3ecaea6f02a917f40945443621ce0fceb177ad64 Mon Sep 17 00:00:00 2001 From: Dmitry Togushev Date: Thu, 18 Jun 2020 16:32:25 +0300 Subject: [PATCH 019/398] optimeze get 'expiration_days' param in server_config Changes to be committed: modified: src/model/server_conf.py --- src/model/server_conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model/server_conf.py b/src/model/server_conf.py index 6355b14b..2f6b1ac6 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -156,7 +156,7 @@ def create_authenticator(auth_object, temp_folder): else: raise Exception(auth_type + ' auth is not supported') - authenticator.auth_expiration_days = float(auth_object.get('expiration_days')) if auth_object.get('expiration_days') is not None else 30 + authenticator.auth_expiration_days = float(auth_object.get('expiration_days', 30)) authenticator.auth_type = auth_type From 6f3f1c411e47af424edc2b02e1a9b1c573ad2465 Mon Sep 17 00:00:00 2001 From: Daniel Rehelis Date: Mon, 22 Jun 2020 21:59:10 +0300 Subject: [PATCH 020/398] allow auth.username and auth.audit_name in script values and descriptions --- src/model/parameter_config.py | 8 ++++---- src/model/script_config.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/model/parameter_config.py b/src/model/parameter_config.py index ec7f326b..d5487895 100644 --- a/src/model/parameter_config.py +++ b/src/model/parameter_config.py @@ -67,7 +67,7 @@ def _reload(self): self.repeat_param = read_bool_from_config('repeat_param', config, default=True) self.env_var = config.get('env_var') self.no_value = read_bool_from_config('no_value', config, default=False) - self.description = config.get('description') + self.description = resolve_default(config.get('description'), self._username, self._audit_name, self._working_dir) self.required = read_bool_from_config('required', config, default=False) self.min = config.get('min') self.max = config.get('max') @@ -75,7 +75,7 @@ def _reload(self): self.separator = config.get('separator', ',') self.multiple_arguments = read_bool_from_config('multiple_arguments', config, default=False) self.same_arg_param = read_bool_from_config('same_arg_param', config, default=False) - self.default = _resolve_default(config.get('default'), self._username, self._audit_name, self._working_dir) + self.default = resolve_default(config.get('default'), self._username, self._audit_name, self._working_dir) self.file_dir = _resolve_file_dir(config, 'file_dir') self._list_files_dir = _resolve_list_files_dir(self.file_dir, self._working_dir) self.file_extensions = _resolve_file_extensions(config, 'file_extensions') @@ -180,7 +180,7 @@ def _create_values_provider(self, values_config, type, constant): return ConstValuesProvider(values_config) elif 'script' in values_config: - script = values_config['script'] + script = resolve_default(values_config['script'], self._username, self._audit_name, self._working_dir) if '${' not in script: return ScriptValuesProvider(script) @@ -400,7 +400,7 @@ def _build_list_file_path(self, child_path): return os.path.normpath(os.path.join(self._list_files_dir, *child_path)) -def _resolve_default(default, username, audit_name, working_dir): +def resolve_default(default, username, audit_name, working_dir): if not default: return default diff --git a/src/model/script_config.py b/src/model/script_config.py index 608f00b1..67bc976a 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -8,7 +8,7 @@ from model import parameter_config from model.model_helper import is_empty, fill_parameter_values, read_bool_from_config, InvalidValueException, \ read_str_from_config -from model.parameter_config import ParameterModel +from model.parameter_config import ParameterModel, resolve_default from react.properties import ObservableList, ObservableDict, observable_fields, Property from utils import file_utils from utils.object_utils import merge_dicts @@ -152,8 +152,8 @@ def _reload_config(self): config = merge_dicts(self._original_config, self._included_config, ignored_keys=['parameters']) self.script_command = config.get('script_path') - self.description = config.get('description') self.working_directory = config.get('working_directory') + self.description = resolve_default(config.get('description'), self._username, self._audit_name, self.working_directory) required_terminal = read_bool_from_config('requires_terminal', config, default=self._pty_enabled_default) self.requires_terminal = required_terminal From 1f5cb51e2365af258562386085a5253447d6e069 Mon Sep 17 00:00:00 2001 From: Daniel Rehelis Date: Mon, 22 Jun 2020 22:10:40 +0300 Subject: [PATCH 021/398] fixing tests --- src/tests/parameter_config_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tests/parameter_config_test.py b/src/tests/parameter_config_test.py index b446e122..433ee775 100644 --- a/src/tests/parameter_config_test.py +++ b/src/tests/parameter_config_test.py @@ -277,7 +277,7 @@ def test_script_value_with_shell_operators(self): @staticmethod def resolve_default(value, *, username=None, audit_name=None, working_dir=None): - return parameter_config._resolve_default(value, username, audit_name, working_dir) + return parameter_config.resolve_default(value, username, audit_name, working_dir) def setUp(self): test_utils.setup() From 50b093d4d7dd407e8d9d66e6e00068da0872a746 Mon Sep 17 00:00:00 2001 From: Daniel Rehelis Date: Wed, 24 Jun 2020 01:18:54 +0300 Subject: [PATCH 022/398] review fixes --- src/model/model_helper.py | 4 +++- src/model/parameter_config.py | 8 ++++---- src/model/script_config.py | 6 +++--- src/tests/parameter_config_test.py | 2 +- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/model/model_helper.py b/src/model/model_helper.py index 2c18f239..32328fd9 100644 --- a/src/model/model_helper.py +++ b/src/model/model_helper.py @@ -189,7 +189,9 @@ def fill_parameter_values(parameter_configs, template, values): def replace_auth_vars(text, username, audit_name): result = text - + if not result: + return + if not username: username = '' if not audit_name: diff --git a/src/model/parameter_config.py b/src/model/parameter_config.py index d5487895..c267e937 100644 --- a/src/model/parameter_config.py +++ b/src/model/parameter_config.py @@ -67,7 +67,7 @@ def _reload(self): self.repeat_param = read_bool_from_config('repeat_param', config, default=True) self.env_var = config.get('env_var') self.no_value = read_bool_from_config('no_value', config, default=False) - self.description = resolve_default(config.get('description'), self._username, self._audit_name, self._working_dir) + self.description = replace_auth_vars(config.get('description'), self._username, self._audit_name) self.required = read_bool_from_config('required', config, default=False) self.min = config.get('min') self.max = config.get('max') @@ -75,7 +75,7 @@ def _reload(self): self.separator = config.get('separator', ',') self.multiple_arguments = read_bool_from_config('multiple_arguments', config, default=False) self.same_arg_param = read_bool_from_config('same_arg_param', config, default=False) - self.default = resolve_default(config.get('default'), self._username, self._audit_name, self._working_dir) + self.default = _resolve_default(config.get('default'), self._username, self._audit_name, self._working_dir) self.file_dir = _resolve_file_dir(config, 'file_dir') self._list_files_dir = _resolve_list_files_dir(self.file_dir, self._working_dir) self.file_extensions = _resolve_file_extensions(config, 'file_extensions') @@ -180,7 +180,7 @@ def _create_values_provider(self, values_config, type, constant): return ConstValuesProvider(values_config) elif 'script' in values_config: - script = resolve_default(values_config['script'], self._username, self._audit_name, self._working_dir) + script = replace_auth_vars(values_config['script'], self._username, self._audit_name) if '${' not in script: return ScriptValuesProvider(script) @@ -400,7 +400,7 @@ def _build_list_file_path(self, child_path): return os.path.normpath(os.path.join(self._list_files_dir, *child_path)) -def resolve_default(default, username, audit_name, working_dir): +def _resolve_default(default, username, audit_name, working_dir): if not default: return default diff --git a/src/model/script_config.py b/src/model/script_config.py index 67bc976a..1257c834 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -7,8 +7,8 @@ from auth.authorization import ANY_USER from model import parameter_config from model.model_helper import is_empty, fill_parameter_values, read_bool_from_config, InvalidValueException, \ - read_str_from_config -from model.parameter_config import ParameterModel, resolve_default + read_str_from_config, replace_auth_vars +from model.parameter_config import ParameterModel from react.properties import ObservableList, ObservableDict, observable_fields, Property from utils import file_utils from utils.object_utils import merge_dicts @@ -152,8 +152,8 @@ def _reload_config(self): config = merge_dicts(self._original_config, self._included_config, ignored_keys=['parameters']) self.script_command = config.get('script_path') + self.description = replace_auth_vars(config.get('description'), self._username, self._audit_name) self.working_directory = config.get('working_directory') - self.description = resolve_default(config.get('description'), self._username, self._audit_name, self.working_directory) required_terminal = read_bool_from_config('requires_terminal', config, default=self._pty_enabled_default) self.requires_terminal = required_terminal diff --git a/src/tests/parameter_config_test.py b/src/tests/parameter_config_test.py index 433ee775..b446e122 100644 --- a/src/tests/parameter_config_test.py +++ b/src/tests/parameter_config_test.py @@ -277,7 +277,7 @@ def test_script_value_with_shell_operators(self): @staticmethod def resolve_default(value, *, username=None, audit_name=None, working_dir=None): - return parameter_config.resolve_default(value, username, audit_name, working_dir) + return parameter_config._resolve_default(value, username, audit_name, working_dir) def setUp(self): test_utils.setup() From 9d87b8232c314e0965022c5724f2003398414382 Mon Sep 17 00:00:00 2001 From: Daniel Rehelis Date: Wed, 24 Jun 2020 12:04:22 +0300 Subject: [PATCH 023/398] review fixes #2 --- src/model/model_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model/model_helper.py b/src/model/model_helper.py index 32328fd9..61fc5091 100644 --- a/src/model/model_helper.py +++ b/src/model/model_helper.py @@ -190,7 +190,7 @@ def fill_parameter_values(parameter_configs, template, values): def replace_auth_vars(text, username, audit_name): result = text if not result: - return + return result if not username: username = '' From 7ff1063ce744db61f928b8a3a078c2ed0e1e98fd Mon Sep 17 00:00:00 2001 From: Daniel Rehelis Date: Tue, 14 Jul 2020 01:35:45 +0300 Subject: [PATCH 024/398] Enable searchbox support on multiselect combobox --- web-src/src/common/components/combobox.vue | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web-src/src/common/components/combobox.vue b/web-src/src/common/components/combobox.vue index 8d21c041..79585a7f 100644 --- a/web-src/src/common/components/combobox.vue +++ b/web-src/src/common/components/combobox.vue @@ -48,7 +48,7 @@ computed: { searchEnabled() { - return !this.disabled && !this.config.multiselect && (this.options.length > 10); + return !this.disabled && (this.options.length > 10); } }, @@ -275,6 +275,6 @@ background-color: white; position: sticky; top: 0; + z-index: 1; } - From 519d2172abe4a2fd49888e98e48a905f8a70a58f Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sun, 26 Jul 2020 17:42:49 +0200 Subject: [PATCH 025/398] #180 added possibility to schedule scripts --- src/auth/user.py | 10 + src/files/user_file_storage.py | 6 +- src/main.py | 4 + src/model/external_model.py | 11 + src/model/model_helper.py | 21 +- src/model/parameter_config.py | 4 +- src/model/script_config.py | 16 + src/scheduling/__init__.py | 0 src/scheduling/schedule_config.py | 153 ++++++++ src/scheduling/schedule_service.py | 177 +++++++++ src/scheduling/scheduling_job.py | 35 ++ src/tests/__init__.py | 0 src/tests/date_utils_test.py | 167 +++++++- src/tests/external_model_test.py | 43 +- src/tests/file_utils_test.py | 20 + src/tests/model_helper_test.py | 37 ++ src/tests/scheduling/__init__.py | 0 src/tests/scheduling/schedule_config_test.py | 128 ++++++ src/tests/scheduling/schedule_service_test.py | 369 +++++++++++++++++ src/tests/scheduling/scheduling_job_test.py | 36 ++ src/tests/script_config_test.py | 41 +- src/tests/test_utils.py | 15 +- src/tests/web/server_test.py | 1 + src/utils/date_utils.py | 43 +- src/utils/file_utils.py | 4 +- src/web/server.py | 45 ++- .../materializecss/material-datepicker.css | 3 + .../css/materializecss/material-textfield.css | 15 + .../common/components/PromisableButton.vue | 43 +- web-src/src/common/components/combobox.vue | 12 +- .../common/components/inputs/DatePicker.vue | 76 ++++ .../common/components/inputs/TimePicker.vue | 96 +++++ web-src/src/common/components/textfield.vue | 1 + .../materializecss/color_variables.scss | 31 -- .../materializecss/imports/datepicker.js | 8 + .../common/materializecss/imports/modal.js | 1 + web-src/src/common/style_imports.js | 5 +- .../components/schedule/SchedulePanel.vue | 370 ++++++++++++++++++ .../components/schedule/ToggleDayButton.vue | 48 +++ .../schedule/schedulePanelFields.js | 11 + .../components/scripts/ScheduleButton.vue | 74 ++++ .../scripts/ScriptViewScheduleHolder.vue | 121 ++++++ .../scripts/script-parameters-view.vue | 16 - .../components/scripts/script-view.vue | 117 +++++- web-src/src/main-app/store/index.js | 4 +- web-src/src/main-app/store/mainStoreHelper.js | 18 + .../main-app/store/scriptExecutionManager.js | 14 +- web-src/src/main-app/store/scriptSchedule.js | 28 ++ web-src/tests/unit/combobox_test.js | 20 +- .../components/inputs/TimePicker_test.js | 138 +++++++ web-src/vue.config.js | 1 - 51 files changed, 2535 insertions(+), 122 deletions(-) create mode 100644 src/scheduling/__init__.py create mode 100644 src/scheduling/schedule_config.py create mode 100644 src/scheduling/schedule_service.py create mode 100644 src/scheduling/scheduling_job.py mode change 100755 => 100644 src/tests/__init__.py create mode 100644 src/tests/file_utils_test.py create mode 100755 src/tests/scheduling/__init__.py create mode 100644 src/tests/scheduling/schedule_config_test.py create mode 100644 src/tests/scheduling/schedule_service_test.py create mode 100644 src/tests/scheduling/scheduling_job_test.py create mode 100644 web-src/src/assets/css/materializecss/material-datepicker.css create mode 100644 web-src/src/assets/css/materializecss/material-textfield.css create mode 100644 web-src/src/common/components/inputs/DatePicker.vue create mode 100644 web-src/src/common/components/inputs/TimePicker.vue delete mode 100644 web-src/src/common/materializecss/color_variables.scss create mode 100644 web-src/src/common/materializecss/imports/datepicker.js create mode 100644 web-src/src/main-app/components/schedule/SchedulePanel.vue create mode 100644 web-src/src/main-app/components/schedule/ToggleDayButton.vue create mode 100644 web-src/src/main-app/components/schedule/schedulePanelFields.js create mode 100644 web-src/src/main-app/components/scripts/ScheduleButton.vue create mode 100644 web-src/src/main-app/components/scripts/ScriptViewScheduleHolder.vue create mode 100644 web-src/src/main-app/store/mainStoreHelper.js create mode 100644 web-src/src/main-app/store/scriptSchedule.js create mode 100644 web-src/tests/unit/common/components/inputs/TimePicker_test.js diff --git a/src/auth/user.py b/src/auth/user.py index d8889c8d..51242c4e 100644 --- a/src/auth/user.py +++ b/src/auth/user.py @@ -18,3 +18,13 @@ def __str__(self) -> str: return self.audit_names.get(AUTH_USERNAME) return str(self.audit_names) + + def as_serializable_dict(self): + return { + 'user_id': self.user_id, + 'audit_names': self.audit_names + } + + +def from_serialized_dict(dict): + return User(dict['user_id'], dict['audit_names']) diff --git a/src/files/user_file_storage.py b/src/files/user_file_storage.py index 6a533b99..4acaf78f 100644 --- a/src/files/user_file_storage.py +++ b/src/files/user_file_storage.py @@ -6,8 +6,8 @@ import shutil import threading -from utils import file_utils -from utils.date_utils import get_current_millis, datetime_now, ms_to_datetime +from utils import file_utils, date_utils +from utils.date_utils import get_current_millis, ms_to_datetime LOGGER = logging.getLogger('script_server.user_file_storage') @@ -55,7 +55,7 @@ def clean_results(): millis = int(timed_folder) folder_date = ms_to_datetime(millis) - now = datetime_now() + now = date_utils.now() if (now - folder_date) > datetime.timedelta(milliseconds=lifetime_ms): folder_path = os.path.join(parent_folder, user_folder, timed_folder) diff --git a/src/main.py b/src/main.py index ada497db..714dc943 100644 --- a/src/main.py +++ b/src/main.py @@ -18,6 +18,7 @@ from features.file_upload_feature import FileUploadFeature from files.user_file_storage import UserFileStorage from model import server_conf +from scheduling.schedule_service import ScheduleService from utils import tool_utils, file_utils from utils.tool_utils import InvalidWebBuildException from web import server @@ -122,11 +123,14 @@ def main(): executions_callback_feature = ExecutionsCallbackFeature(execution_service, server_config.callbacks_config) executions_callback_feature.start() + schedule_service = ScheduleService(config_service, execution_service, CONFIG_FOLDER) + server.init( server_config, server_config.authenticator, authorizer, execution_service, + schedule_service, execution_logging_service, config_service, alerts_service, diff --git a/src/model/external_model.py b/src/model/external_model.py index daf32a7d..2287e100 100644 --- a/src/model/external_model.py +++ b/src/model/external_model.py @@ -23,6 +23,7 @@ def config_to_external(config, id): 'id': id, 'name': config.name, 'description': config.description, + 'schedulable': config.schedulable, 'parameters': parameters } @@ -112,3 +113,13 @@ def server_conf_to_external(server_config, server_version): 'enableScriptTitles': server_config.enable_script_titles, 'version': server_version } + + +def parse_external_schedule(external_schedule): + return { + 'repeatable': external_schedule.get('repeatable'), + 'start_datetime': external_schedule.get('startDatetime'), + 'repeat_unit': external_schedule.get('repeatUnit'), + 'repeat_period': external_schedule.get('repeatPeriod'), + 'weekdays': external_schedule.get('weekDays') + } diff --git a/src/model/model_helper.py b/src/model/model_helper.py index 61fc5091..a12941cd 100644 --- a/src/model/model_helper.py +++ b/src/model/model_helper.py @@ -1,9 +1,11 @@ import logging import os import re +from datetime import datetime import utils.env_utils as env_utils from config.constants import FILE_TYPE_DIR, FILE_TYPE_FILE +from utils import date_utils from utils.string_utils import is_blank ENV_VAR_PREFIX = '$$' @@ -106,6 +108,20 @@ def read_bool_from_config(key, config_obj, *, default=None): raise Exception('"' + key + '" field should be true or false') +def read_datetime_from_config(key, config_obj, *, default=None): + value = config_obj.get(key) + if value is None: + return default + + if isinstance(value, datetime): + return value + + if isinstance(value, str): + return date_utils.parse_iso_datetime(value) + + raise InvalidValueTypeException('"' + key + '" field should be a datetime, but was ' + repr(value)) + + def read_bool(value): if isinstance(value, bool): return value @@ -191,7 +207,7 @@ def replace_auth_vars(text, username, audit_name): result = text if not result: return result - + if not username: username = '' if not audit_name: @@ -247,6 +263,9 @@ def __init__(self, param_name, validation_error) -> None: super().__init__(validation_error) self.param_name = param_name + def get_user_message(self): + return 'Invalid value for "' + self.param_name + '": ' + str(self) + class InvalidValueTypeException(Exception): def __init__(self, message) -> None: diff --git a/src/model/parameter_config.py b/src/model/parameter_config.py index c267e937..34b0525e 100644 --- a/src/model/parameter_config.py +++ b/src/model/parameter_config.py @@ -96,7 +96,7 @@ def _reload(self): self._reload_values() def _validate_config(self): - param_log_name = self._str_name() + param_log_name = self.str_name() if self.constant and not self.default: message = 'Constant should have default value specified' @@ -106,7 +106,7 @@ def _validate_config(self): if not self.file_dir: raise Exception('Parameter ' + param_log_name + ' has missing config file_dir') - def _str_name(self): + def str_name(self): names = (name for name in (self.name, self.param, self.description) if name) return next(names, 'unknown') diff --git a/src/model/script_config.py b/src/model/script_config.py index 1257c834..dfeb3b25 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -30,6 +30,7 @@ def __init__(self): 'working_directory', 'ansi_enabled', 'output_files', + 'schedulable', '_included_config') class ConfigModel: @@ -51,6 +52,7 @@ def __init__(self, self._username = username self._audit_name = audit_name + self.schedulable = False self.parameters = ObservableList() self.parameter_values = ObservableDict() @@ -63,6 +65,8 @@ def __init__(self, self._reload_config() + self.parameters.subscribe(self) + self._init_parameters(username, audit_name) if parameter_values is not None: @@ -163,6 +167,9 @@ def _reload_config(self): self.output_files = config.get('output_files', []) + if config.get('scheduling'): + self.schedulable = read_bool_from_config('enabled', config.get('scheduling'), default=False) + if not self.script_command: raise Exception('No script_path is specified for ' + self.name) @@ -204,6 +211,15 @@ def find_parameter(self, param_name): return parameter return None + def on_add(self, parameter, index): + if self.schedulable and parameter.secure: + LOGGER.warning( + 'Disabling schedulable functionality, because parameter ' + parameter.str_name() + ' is secure') + self.schedulable = False + + def on_remove(self, parameter): + pass + def _validate_parameter_configs(self): for parameter in self.parameters: parameter.validate_parameter_dependencies(self.parameters) diff --git a/src/scheduling/__init__.py b/src/scheduling/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py new file mode 100644 index 00000000..35c3facb --- /dev/null +++ b/src/scheduling/schedule_config.py @@ -0,0 +1,153 @@ +from datetime import timezone, timedelta, datetime + +from model import model_helper +from utils import date_utils +from utils.string_utils import is_blank + +ALLOWED_WEEKDAYS = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] + + +def _read_start_datetime(incoming_schedule_config): + start_datetime = model_helper.read_datetime_from_config('start_datetime', incoming_schedule_config) + if start_datetime is None: + raise InvalidScheduleException('start_datetime is required') + return start_datetime + + +def _read_repeat_unit(incoming_schedule_config): + repeat_unit = incoming_schedule_config.get('repeat_unit') + if is_blank(repeat_unit): + raise InvalidScheduleException('repeat_unit is required for repeatable schedule') + + if repeat_unit.lower() not in ['hours', 'days', 'weeks', 'months']: + raise InvalidScheduleException('repeat_unit should be one of: hours, days, weeks, months') + + return repeat_unit.lower() + + +def _read_repeat_period(incoming_schedule_config): + period = model_helper.read_int_from_config('repeat_period', incoming_schedule_config, default=1) + if period <= 0: + raise InvalidScheduleException('repeat_period should be > 0') + return period + + +def read_repeatable_flag(incoming_schedule_config): + repeatable = model_helper.read_bool_from_config('repeatable', incoming_schedule_config) + if repeatable is None: + raise InvalidScheduleException('Missing "repeatable" field') + return repeatable + + +def read_weekdays(incoming_schedule_config): + weekdays = model_helper.read_list(incoming_schedule_config, 'weekdays') + if not weekdays: + raise InvalidScheduleException('At least one weekday should be specified') + weekdays = [day.lower().strip() for day in weekdays] + for day in weekdays: + if day not in ALLOWED_WEEKDAYS: + raise InvalidScheduleException('Unknown weekday: ' + day) + return sorted(weekdays, key=lambda x: ALLOWED_WEEKDAYS.index(x)) + + +def read_schedule_config(incoming_schedule_config): + repeatable = read_repeatable_flag(incoming_schedule_config) + start_datetime = _read_start_datetime(incoming_schedule_config) + + prepared_schedule_config = ScheduleConfig(repeatable, start_datetime) + if repeatable: + prepared_schedule_config.repeat_unit = _read_repeat_unit(incoming_schedule_config) + prepared_schedule_config.repeat_period = _read_repeat_period(incoming_schedule_config) + + if prepared_schedule_config.repeat_unit == 'weeks': + prepared_schedule_config.weekdays = read_weekdays(incoming_schedule_config) + + return prepared_schedule_config + + +class ScheduleConfig: + + def __init__(self, repeatable, start_datetime) -> None: + self.repeatable = repeatable + self.start_datetime = start_datetime # type: datetime + self.repeat_unit = None + self.repeat_period = None + self.weekdays = None + + def as_serializable_dict(self): + result = { + 'repeatable': self.repeatable, + 'start_datetime': date_utils.to_iso_string(self.start_datetime) + } + + if self.repeat_unit is not None: + result['repeat_unit'] = self.repeat_unit + + if self.repeat_period is not None: + result['repeat_period'] = self.repeat_period + + if self.weekdays is not None: + result['weekdays'] = self.weekdays + + return result + + def get_next_time(self): + if not self.repeatable: + return self.start_datetime + + if self.repeat_unit == 'hours': + next_time_func = lambda start, iteration_index: start + timedelta( + hours=self.repeat_period * iteration_index) + + get_initial_multiplier = lambda start: \ + ((now - start).seconds // 3600 + (now - start).days * 24) \ + // self.repeat_period + elif self.repeat_unit == 'days': + next_time_func = lambda start, iteration_index: start + timedelta(days=self.repeat_period * iteration_index) + get_initial_multiplier = lambda start: (now - start).days // self.repeat_period + elif self.repeat_unit == 'months': + next_time_func = lambda start, iteration_index: date_utils.add_months(start, + self.repeat_period * iteration_index) + get_initial_multiplier = lambda start: (now - start).days // 28 // self.repeat_period + elif self.repeat_unit == 'weeks': + start_weekday = self.start_datetime.weekday() + offset = 0 + for weekday in self.weekdays: + index = ALLOWED_WEEKDAYS.index(weekday) + if index < start_weekday: + offset += 1 + + def next_weekday(start: datetime, iteration_index): + weeks_multiplier = (iteration_index + offset) // len(self.weekdays) + next_weekday_index = (iteration_index + offset) % len(self.weekdays) + next_weekday_name = self.weekdays[next_weekday_index] + next_weekday = ALLOWED_WEEKDAYS.index(next_weekday_name) + + return start \ + + timedelta(weeks=self.repeat_period * weeks_multiplier) \ + + timedelta(days=(next_weekday - start.weekday())) + + next_time_func = next_weekday + + get_initial_multiplier = lambda start: (now - start).days // 7 // self.repeat_period * len( + self.weekdays) - 1 + else: + raise Exception('Unknown unit: ' + repr(self.repeat_unit)) + + now = date_utils.now(tz=timezone.utc) + max_iterations = 10000 + initial_multiplier = max(0, get_initial_multiplier(self.start_datetime)) + i = 0 + while True: + resolved_time = next_time_func(self.start_datetime, i + initial_multiplier) + if resolved_time >= now: + return resolved_time + + i += 1 + if i > max_iterations: + raise Exception('Endless loop in calc next time') + + +class InvalidScheduleException(Exception): + def __init__(self, message) -> None: + super().__init__(message) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py new file mode 100644 index 00000000..8bc352b1 --- /dev/null +++ b/src/scheduling/schedule_service.py @@ -0,0 +1,177 @@ +import json +import logging +import os +import sched +import threading +import time +from datetime import timedelta + +from auth.user import User +from config.config_service import ConfigService +from execution.execution_service import ExecutionService +from execution.id_generator import IdGenerator +from scheduling import scheduling_job +from scheduling.schedule_config import read_schedule_config, InvalidScheduleException +from scheduling.scheduling_job import SchedulingJob +from utils import file_utils, date_utils + +SCRIPT_NAME_KEY = 'script_name' +USER_KEY = 'user' +PARAM_VALUES_KEY = 'parameter_values' + +JOB_SCHEDULE_KEY = 'schedule' + +LOGGER = logging.getLogger('script_server.scheduling.schedule_service') + +_sleep = time.sleep + + +def restore_jobs(schedules_folder): + files = [file for file in os.listdir(schedules_folder) if file.endswith('.json')] + + job_dict = {} + ids = [] # list of ALL ids, including broken configs + + for file in files: + try: + content = file_utils.read_file(os.path.join(schedules_folder, file)) + job_json = json.loads(content) + ids.append(job_json['id']) + + job = scheduling_job.from_dict(job_json) + + job_dict[job.id] = job + except: + LOGGER.exception('Failed to parse schedule file: ' + file) + + return job_dict, ids + + +class ScheduleService: + + def __init__(self, + config_service: ConfigService, + execution_service: ExecutionService, + conf_folder): + self._schedules_folder = os.path.join(conf_folder, 'schedules') + file_utils.prepare_folder(self._schedules_folder) + + self._config_service = config_service + self._execution_service = execution_service + + (jobs, ids) = restore_jobs(self._schedules_folder) + self._scheduled_executions = jobs + self._id_generator = IdGenerator(ids) + self.stopped = False + + self.scheduler = sched.scheduler(timefunc=time.time) + self._start_scheduler() + + for job in jobs.values(): + self.schedule_job(job) + + def create_job(self, script_name, parameter_values, incoming_schedule_config, user: User): + if user is None: + raise InvalidUserException('User id is missing') + + config_model = self._config_service.load_config_model(script_name, user, parameter_values) + self.validate_script_config(config_model) + + schedule_config = read_schedule_config(incoming_schedule_config) + + if not schedule_config.repeatable and date_utils.is_past(schedule_config.start_datetime): + raise InvalidScheduleException('Start date should be in the future') + + id = self._id_generator.next_id() + + job = SchedulingJob(id, user, schedule_config, script_name, parameter_values) + + self.save_job(job) + + self.schedule_job(job) + + return id + + @staticmethod + def validate_script_config(config_model): + if not config_model.schedulable: + raise UnavailableScriptException(config_model.name + ' is not schedulable') + + for parameter in config_model.parameters: + if parameter.secure: + raise UnavailableScriptException( + 'Script contains secure parameters (' + parameter.str_name() + '), this is not supported') + + def schedule_job(self, job: SchedulingJob): + schedule = job.schedule + + if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): + return + + next_datetime = schedule.get_next_time() + LOGGER.info( + 'Scheduling ' + job.get_log_name() + ' at ' + next_datetime.astimezone(tz=None).strftime('%H:%M, %d %B %Y')) + + self.scheduler.enterabs(next_datetime.timestamp(), 1, self._execute_job, (job,)) + + def _execute_job(self, job: SchedulingJob): + LOGGER.info('Executing ' + job.get_log_name()) + + script_name = job.script_name + parameter_values = job.parameter_values + user = job.user + + try: + config = self._config_service.load_config_model(script_name, user, parameter_values) + self.validate_script_config(config) + + execution_id = self._execution_service.start_script(config, parameter_values, user.user_id, + user.audit_names) + LOGGER.info('Started script #' + str(execution_id) + ' for ' + job.get_log_name()) + except: + LOGGER.exception('Failed to execute ' + job.get_log_name()) + + self.schedule_job(job) + + def save_job(self, job: SchedulingJob): + user = job.user + script_name = job.script_name + + filename = file_utils.to_filename('%s_%s_%s.json' % (script_name, user.get_audit_name(), job.id)) + file_utils.write_file( + os.path.join(self._schedules_folder, filename), + json.dumps(job.as_serializable_dict(), indent=2)) + + def _start_scheduler(self): + def scheduler_loop(): + while not self.stopped: + try: + self.scheduler.run(blocking=False) + except: + LOGGER.exception('Failed to execute scheduled job') + + now = date_utils.now() + sleep_delta = timedelta(minutes=1) - timedelta(microseconds=now.microsecond, seconds=now.second) + _sleep(sleep_delta.total_seconds()) + + self.scheduling_thread = threading.Thread(daemon=True, target=scheduler_loop) + self.scheduling_thread.start() + + def _stop(self): + self.stopped = True + + def stopper(): + pass + + # just schedule the next execution to exit thread immediately + self.scheduler.enter(1, 0, stopper) + + +class InvalidUserException(Exception): + def __init__(self, message) -> None: + super().__init__(message) + + +class UnavailableScriptException(Exception): + def __init__(self, message) -> None: + super().__init__(message) diff --git a/src/scheduling/scheduling_job.py b/src/scheduling/scheduling_job.py new file mode 100644 index 00000000..cdccafba --- /dev/null +++ b/src/scheduling/scheduling_job.py @@ -0,0 +1,35 @@ +from auth import user +from auth.user import User +from scheduling import schedule_config +from scheduling.schedule_config import ScheduleConfig + + +class SchedulingJob: + def __init__(self, id, user, schedule_config, script_name, parameter_values) -> None: + self.id = str(id) + self.user = user # type: User + self.schedule = schedule_config # type: ScheduleConfig + self.script_name = script_name + self.parameter_values = parameter_values # type: dict + + def as_serializable_dict(self): + return { + 'id': self.id, + 'user': self.user.as_serializable_dict(), + 'schedule': self.schedule.as_serializable_dict(), + 'script_name': self.script_name, + 'parameter_values': self.parameter_values + } + + def get_log_name(self): + return 'Job#' + str(self.id) + '-' + self.script_name + + +def from_dict(job_as_dict): + id = job_as_dict['id'] + parsed_user = user.from_serialized_dict(job_as_dict['user']) + schedule = schedule_config.read_schedule_config(job_as_dict['schedule']) + script_name = job_as_dict['script_name'] + parameter_values = job_as_dict['parameter_values'] + + return SchedulingJob(id, parsed_user, schedule, script_name, parameter_values) diff --git a/src/tests/__init__.py b/src/tests/__init__.py old mode 100755 new mode 100644 diff --git a/src/tests/date_utils_test.py b/src/tests/date_utils_test.py index d4b8183c..a0af980c 100644 --- a/src/tests/date_utils_test.py +++ b/src/tests/date_utils_test.py @@ -1,5 +1,5 @@ import unittest -from datetime import datetime, timezone +from datetime import datetime, timezone, timedelta from utils import date_utils @@ -20,3 +20,168 @@ def test_astimezone_naive_before_dst(self): transformed_datetime = date_utils.astimezone(naive_datetime, timezone.utc) self.assertEqual(utc_datetime, transformed_datetime) + + +class TestParseIsoDatetime(unittest.TestCase): + def test_parse_correct_time(self): + parsed = date_utils.parse_iso_datetime('2020-07-10T15:30:59.123456Z') + expected = datetime(2020, 7, 10, 15, 30, 59, 123456, timezone.utc) + self.assertEqual(expected, parsed) + + def test_parse_wrong_time(self): + self.assertRaisesRegex( + ValueError, + 'does not match format', + date_utils.parse_iso_datetime, + '15:30:59 2020-07-10') + + +class TestToIsoString(unittest.TestCase): + def test_utc_time(self): + iso_string = date_utils.to_iso_string(datetime(2020, 7, 10, 15, 30, 59, 123456, timezone.utc)) + self.assertEqual('2020-07-10T15:30:59.123456Z', iso_string) + + def test_naive_time(self): + iso_string = date_utils.to_iso_string(datetime(2020, 7, 10, 15, 30, 59, 123456)) + self.assertEqual('2020-07-10T15:30:59.123456Z', iso_string) + + def test_local_time(self): + iso_string = date_utils.to_iso_string(datetime(2020, 7, 10, 15, 30, 59, 123456, timezone(timedelta(hours=1)))) + self.assertEqual('2020-07-10T14:30:59.123456Z', iso_string) + + +class TestIsPast(unittest.TestCase): + def test_when_past_naive(self): + value = datetime(2020, 7, 10, 15, 30, 59, 123456) + + self.assertTrue(date_utils.is_past(value)) + + def test_when_past_utc(self): + value = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + + self.assertTrue(date_utils.is_past(value)) + + def test_when_future_naive(self): + value = datetime(2030, 7, 10, 15, 30, 59, 123456) + + self.assertFalse(date_utils.is_past(value)) + + def test_when_future_utc(self): + value = datetime(2030, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + + self.assertFalse(date_utils.is_past(value)) + + def test_when_now(self): + value = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + + date_utils._mocked_now = value + + self.assertFalse(date_utils.is_past(value)) + + def tearDown(self) -> None: + date_utils._mocked_now = None + + +class TestSecondsBetween(unittest.TestCase): + def test_small_positive_delta(self): + start = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + end = datetime(2020, 7, 10, 15, 33, 12, 123456, tzinfo=timezone.utc) + + seconds = date_utils.seconds_between(start, end) + self.assertEqual(133, seconds) + + def test_small_negative_delta(self): + start = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + end = datetime(2020, 7, 10, 15, 30, 13, 123456, tzinfo=timezone.utc) + + seconds = date_utils.seconds_between(start, end) + self.assertEqual(-46, seconds) + + def test_large_positive_delta(self): + start = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + end = datetime(2021, 2, 15, 17, 33, 12, 123456, tzinfo=timezone.utc) + + seconds = date_utils.seconds_between(start, end) + self.assertEqual(19015333, seconds) + + def test_large_negative_delta(self): + start = datetime(2020, 7, 10, 15, 30, 13, 123456, tzinfo=timezone.utc) + end = datetime(2019, 11, 29, 9, 30, 59, 123456, tzinfo=timezone.utc) + + seconds = date_utils.seconds_between(start, end) + self.assertEqual(-19375154, seconds) + + def test_delta_with_microseconds(self): + start = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + end = datetime(2020, 7, 10, 15, 33, 12, 876543, tzinfo=timezone.utc) + + seconds = date_utils.seconds_between(start, end) + self.assertEqual(133.753087, seconds) + + +class TestAddMonths(unittest.TestCase): + def test_add_one_month(self): + original = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 1) + expected = datetime(2020, 8, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_4_months(self): + original = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 4) + expected = datetime(2020, 11, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_roll_next_year(self): + original = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 6) + expected = datetime(2021, 1, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_roll_multiple_years(self): + original = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 33) + expected = datetime(2023, 4, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_last_day_when_next_shorter(self): + original = datetime(2020, 7, 31, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 2) + expected = datetime(2020, 9, 30, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_last_day_when_next_same(self): + original = datetime(2020, 7, 31, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 1) + expected = datetime(2020, 8, 31, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_last_day_when_next_longer(self): + original = datetime(2020, 6, 30, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 2) + expected = datetime(2020, 8, 30, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_last_day_when_next_february(self): + original = datetime(2020, 7, 30, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 7) + expected = datetime(2021, 2, 28, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_add_months_to_last_day_when_next_leap_february(self): + original = datetime(2019, 7, 30, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, 7) + expected = datetime(2020, 2, 29, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_subtract_one_month(self): + original = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, -1) + expected = datetime(2020, 6, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) + + def test_subtract_months_to_prev_year(self): + original = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + added = date_utils.add_months(original, -10) + expected = datetime(2019, 9, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected, added) diff --git a/src/tests/external_model_test.py b/src/tests/external_model_test.py index 03c11a9c..f7e5ccd1 100644 --- a/src/tests/external_model_test.py +++ b/src/tests/external_model_test.py @@ -3,7 +3,8 @@ from datetime import datetime, timezone from execution.logging import HistoryEntry -from model.external_model import to_short_execution_log, to_long_execution_log, server_conf_to_external +from model.external_model import to_short_execution_log, to_long_execution_log, server_conf_to_external, \ + parse_external_schedule from model.server_conf import ServerConfig @@ -163,3 +164,43 @@ def test_config_with_none_values(self): self.assertIsNone(external_config.get('title')) self.assertIsNone(external_config.get('enableScriptTitles')) self.assertIsNone(external_config.get('version')) + + +class TestParseExternalSchedule(unittest.TestCase): + def test_parse_full_config(self): + parsed = parse_external_schedule( + {'repeatable': False, 'startDatetime': '2020-12-30', 'repeatUnit': 'days', 'repeatPeriod': 5, + 'weekDays': ['monday', 'Tuesday']}) + + self.assertDictEqual({ + 'repeatable': False, + 'start_datetime': '2020-12-30', + 'repeat_unit': 'days', + 'repeat_period': 5, + 'weekdays': ['monday', 'Tuesday']}, + parsed) + + def test_parse_partial_config(self): + parsed = parse_external_schedule( + {'repeatable': False, 'startDatetime': '2020-12-30'}) + + self.assertDictEqual({ + 'repeatable': False, + 'start_datetime': '2020-12-30', + 'repeat_unit': None, + 'repeat_period': None, + 'weekdays': None}, + parsed) + + def test_parse_unknown_field(self): + parsed = parse_external_schedule( + {'repeatable': False, + 'startDatetime': '2020-12-30', + 'anotherField': 'abc'}) + + self.assertDictEqual({ + 'repeatable': False, + 'start_datetime': '2020-12-30', + 'repeat_unit': None, + 'repeat_period': None, + 'weekdays': None}, parsed) diff --git a/src/tests/file_utils_test.py b/src/tests/file_utils_test.py new file mode 100644 index 00000000..5b5cac00 --- /dev/null +++ b/src/tests/file_utils_test.py @@ -0,0 +1,20 @@ +from unittest import TestCase + +from utils import os_utils, file_utils + + +class TestToFilename(TestCase): + def test_replace_special_characters_linux(self): + os_utils.set_linux() + + filename = file_utils.to_filename('!@#$%^&*()_+\|/?.<>,\'"') + self.assertEqual('!@#$%^&*()_+\\|_?.<>,\'"', filename) + + def test_replace_special_characters_windows(self): + os_utils.set_win() + + filename = file_utils.to_filename('!@#$%^&*()_+\|/?.<>,\'"') + self.assertEqual('!@#$%^&_()_+____.__,\'_', filename) + + def tearDown(self) -> None: + os_utils.reset_os() diff --git a/src/tests/model_helper_test.py b/src/tests/model_helper_test.py index f718b37e..ad231f0a 100644 --- a/src/tests/model_helper_test.py +++ b/src/tests/model_helper_test.py @@ -1,5 +1,6 @@ import os import unittest +from datetime import datetime, timezone from config.constants import FILE_TYPE_FILE, FILE_TYPE_DIR from model import model_helper @@ -400,3 +401,39 @@ def test_text_when_blank_to_none_and_blank_and_default(self): def test_text_when_int(self): self.assertRaisesRegex(InvalidValueTypeException, 'Invalid key1 value: string expected, but was: 5', read_str_from_config, {'key1': 5}, 'key1') + + +class TestReadDatetime(unittest.TestCase): + def test_datetime_value(self): + value = datetime.now() + actual_value = model_helper.read_datetime_from_config('p1', {'p1': value}) + self.assertEqual(value, actual_value) + + def test_string_value(self): + actual_value = model_helper.read_datetime_from_config('p1', {'p1': '2020-07-10T15:30:59.123456Z'}) + expected_value = datetime(2020, 7, 10, 15, 30, 59, 123456, tzinfo=timezone.utc) + self.assertEqual(expected_value, actual_value) + + def test_string_value_when_bad_format(self): + self.assertRaisesRegex( + ValueError, + 'does not match format', + model_helper.read_datetime_from_config, + 'p1', {'p1': '15:30:59 2020-07-10'}) + + def test_default_value_when_missing_key(self): + value = datetime.now() + actual_value = model_helper.read_datetime_from_config('p1', {'another_key': 'abc'}, default=value) + self.assertEqual(value, actual_value) + + def test_default_value_when_value_none(self): + value = datetime.now() + actual_value = model_helper.read_datetime_from_config('p1', {'p1': None}, default=value) + self.assertEqual(value, actual_value) + + def test_int_value(self): + self.assertRaisesRegex( + InvalidValueTypeException, + 'should be a datetime', + model_helper.read_datetime_from_config, + 'p1', {'p1': 12345}) diff --git a/src/tests/scheduling/__init__.py b/src/tests/scheduling/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/src/tests/scheduling/schedule_config_test.py b/src/tests/scheduling/schedule_config_test.py new file mode 100644 index 00000000..e7499968 --- /dev/null +++ b/src/tests/scheduling/schedule_config_test.py @@ -0,0 +1,128 @@ +from unittest import TestCase + +from parameterized import parameterized + +from scheduling.schedule_config import ScheduleConfig +from utils import date_utils + + +def to_datetime(short_datetime_string): + dt_string = short_datetime_string + ':0.000000Z' + return date_utils.parse_iso_datetime(dt_string.replace(' ', 'T')) + + +class TestGetNextTime(TestCase): + @parameterized.expand([ + ('2020-03-19 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-19 16:13'), + ('2020-03-19 17:30', '2020-03-15 16:13', 1, 'days', '2020-03-20 16:13'), + ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'), + ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'), + ('2020-03-15 16:13', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'), + ('2020-03-15 16:14', '2020-03-15 16:13', 1, 'days', '2020-03-16 16:13'), + ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'days', '2020-03-19 16:13'), + ('2020-03-20 11:30', '2020-03-15 16:13', 2, 'days', '2020-03-21 16:13'), + ('2020-03-19 16:13', '2020-03-15 16:13', 2, 'days', '2020-03-19 16:13'), + ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'days', '2020-03-20 16:13'), + ('2020-03-20 11:30', '2020-03-15 16:13', 24, 'days', '2020-04-08 16:13'), + ('2020-04-09 11:30', '2020-03-15 16:13', 24, 'days', '2020-05-02 16:13'), + ('2020-03-19 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-19 12:13'), + ('2020-03-19 17:30', '2020-03-15 16:13', 1, 'hours', '2020-03-19 18:13'), + ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'), + ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'), + ('2020-03-15 16:13', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'), + ('2020-03-15 16:14', '2020-03-15 16:13', 1, 'hours', '2020-03-15 17:13'), + # big difference between start and now + ('2023-08-29 16:14', '2020-03-15 16:13', 1, 'hours', '2023-08-29 17:13'), + ('2020-03-19 10:30', '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'), + ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'), + ('2020-03-19 16:13', '2020-03-15 16:13', 2, 'hours', '2020-03-19 16:13'), + ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'hours', '2020-03-18 14:13'), + ('2020-03-20 11:30', '2020-03-15 16:13', 24, 'hours', '2020-03-20 16:13'), + ('2020-04-09 17:30', '2020-03-15 16:13', 24, 'hours', '2020-04-10 16:13'), + ('2020-03-19 11:30', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'), + ('2020-03-19 17:30', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'), + ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'), + ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'), + ('2020-03-15 16:13', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'), + ('2020-03-15 16:14', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'), + ('2020-04-01 16:11', '2020-03-31 16:13', 1, 'months', '2020-04-30 16:13'), + ('2021-01-31 20:00', '2021-01-31 16:13', 1, 'months', '2021-02-28 16:13'), # Roll to February + ('2020-01-31 20:00', '2020-01-31 16:13', 1, 'months', '2020-02-29 16:13'), # Roll to February leap year + ('2020-03-19 10:30', '2020-03-15 16:13', 2, 'months', '2020-05-15 16:13'), + ('2020-04-19 11:30', '2020-03-15 16:13', 2, 'months', '2020-05-15 16:13'), + ('2020-03-15 16:13', '2020-03-15 16:13', 2, 'months', '2020-03-15 16:13'), + ('2020-04-01 16:11', '2020-03-31 16:13', 2, 'months', '2020-05-31 16:13'), + ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'months', '2020-08-15 16:13'), + ('2020-08-18 11:30', '2020-03-15 16:13', 5, 'months', '2021-01-15 16:13'), + ('2021-01-18 11:30', '2020-03-15 16:13', 5, 'months', '2021-06-15 16:13'), + ('2020-03-16 11:30', '2020-03-15 16:13', 13, 'months', '2021-04-15 16:13'), + ('2020-03-19 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']), + ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday']), + ('2020-03-16 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday']), + ('2020-03-16 16:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']), + ('2020-03-20 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']), + ('2020-04-04 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-06 16:13', ['monday', 'friday']), + ('2020-04-07 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'friday']), + ('2020-03-16 16:13', '2020-03-16 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday']), + ('2020-03-16 16:14', '2020-03-16 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']), + # Test for testing start date on different weekdays, now tuesday + ('2020-04-07 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-07 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-07 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-07 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-07 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-07 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-07 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), + # Test for testing start date on different weekdays, now thursday + ('2020-04-09 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-09 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-09 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-09 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-09 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-09 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-09 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), + # Test for testing start date on different weekdays, now saturday + ('2020-04-11 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-11 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-11 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-11 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-11 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-11 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-11 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + # Test for testing start date on different weekdays, now monday + ('2020-04-13 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-13 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-13 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-13 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-13 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-13 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + ('2020-04-13 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), + # Test for testing start date on different weekdays, now wednesday, when larger interval + ('2020-09-16 1:30', '2020-03-14 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-09-16 2:30', '2020-03-15 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-09-16 3:30', '2020-03-16 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-09-16 4:30', '2020-03-17 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-09-16 5:30', '2020-03-18 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-09-16 6:30', '2020-03-19 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-09-16 7:30', '2020-03-20 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), + ('2020-03-16 16:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-18 16:13', ['wednesday']), + ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'weeks', '2020-03-23 16:13', ['monday', 'friday']), + ('2020-03-24 11:30', '2020-03-15 16:13', 2, 'weeks', '2020-03-27 16:13', ['monday', 'friday']), + ('2020-06-07 17:30', '2020-03-15 16:13', 2, 'weeks', '2020-06-15 16:13', ['monday', 'friday']), + ('2020-06-07 17:30', '2020-03-15 16:13', 2, 'weeks', '2020-06-16 16:13', ['tuesday', 'wednesday']), + ]) + def test_next_day_when_repeatable(self, now_dt, start, period, unit, expected, weekdays=None): + date_utils._mocked_now = to_datetime(now_dt) + + config = ScheduleConfig(True, to_datetime(start)) + config.repeat_period = period + config.repeat_unit = unit + config.weekdays = weekdays + + next_time = config.get_next_time() + self.assertEqual(to_datetime(expected), next_time) + + def tearDown(self) -> None: + super().tearDown() + + date_utils._mocked_now = None diff --git a/src/tests/scheduling/schedule_service_test.py b/src/tests/scheduling/schedule_service_test.py new file mode 100644 index 00000000..1ecff2b9 --- /dev/null +++ b/src/tests/scheduling/schedule_service_test.py @@ -0,0 +1,369 @@ +import json +import os +import time +from datetime import timedelta +from typing import Sequence +from unittest import TestCase +from unittest.mock import patch, ANY, MagicMock + +from auth.user import User +from scheduling import schedule_service +from scheduling.schedule_config import ScheduleConfig, InvalidScheduleException +from scheduling.schedule_service import ScheduleService, InvalidUserException, UnavailableScriptException +from scheduling.scheduling_job import SchedulingJob +from tests import test_utils +from utils import date_utils, audit_utils, file_utils + +mocked_now = date_utils.parse_iso_datetime('2020-07-24T12:30:59.000000Z') +mocked_now_epoch = mocked_now.timestamp() + + +class ScheduleServiceTestCase(TestCase): + def assert_schedule_calls(self, expected_job_time_pairs): + self.assertEqual(len(expected_job_time_pairs), len(self.scheduler_mock.enterabs.call_args_list)) + + for i, pair in enumerate(expected_job_time_pairs): + expected_time = date_utils.sec_to_datetime(pair[1]) + expected_job = pair[0] + + # the first item of call_args is actual arguments, passed to the method + args = self.scheduler_mock.enterabs.call_args_list[i][0] + + # we schedule job as enterabs(expected_time, priority, self._execute_job, (job,)) + # to get the job, we need to get the last arg, and extract the first parameter from it + schedule_method_args_tuple = args[3] + schedule_method_job_arg = schedule_method_args_tuple[0] + actual_time = date_utils.sec_to_datetime(args[0]) + + self.assertEqual(expected_time, actual_time) + self.assertDictEqual(expected_job.as_serializable_dict(), + schedule_method_job_arg.as_serializable_dict()) + + def mock_schedule_model_with_secure_param(self): + model = test_utils.create_config_model('some-name', parameters=[{'name': 'p1', 'secure': True}]) + model.schedulable = True + + self.config_service.load_config_model.side_effect = lambda a, b, c: model + + def setUp(self) -> None: + super().setUp() + + self.patcher = patch('sched.scheduler') + self.scheduler_mock = MagicMock() + self.patcher.start().return_value = self.scheduler_mock + + schedule_service._sleep = MagicMock() + schedule_service._sleep.side_effect = lambda x: time.sleep(0.001) + + self.unschedulable_scripts = set() + self.config_service = MagicMock() + self.config_service.load_config_model.side_effect = lambda name, b, c: test_utils.create_config_model( + name, + schedulable=name not in self.unschedulable_scripts) + + self.execution_service = MagicMock() + + self.schedule_service = ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + + date_utils._mocked_now = mocked_now + + test_utils.setup() + + def tearDown(self) -> None: + super().tearDown() + + test_utils.cleanup() + + date_utils._mocked_now = None + + self.schedule_service._stop() + self.schedule_service.scheduling_thread.join() + + schedule_service._sleep = time.sleep + + self.patcher.stop() + + +class TestScheduleServiceCreateJob(ScheduleServiceTestCase): + def test_create_job_when_single(self): + job_prototype = create_job() + job_id = self.call_create_job(job_prototype) + + self.assertEqual('1', job_id) + + job_prototype.id = job_id + self.verify_config_files([job_prototype]) + + def test_create_job_when_multiple(self): + jobs = [] + + for i in range(1, 3): + job_prototype = create_job( + user_id='user-' + str(i), + script_name='script-' + str(i), + repeatable=i % 2 == 1, + parameter_values={'p1': 'hi', 'p2': i}) + job_id = self.call_create_job(job_prototype) + + self.assertEqual(str(i), job_id) + + job_prototype.id = job_id + + jobs.append(job_prototype) + + self.verify_config_files(jobs) + + def test_create_job_when_user_none(self): + self.assertRaisesRegex( + InvalidUserException, + 'User id is missing', + self.schedule_service.create_job, + 'abc', {}, {}, None) + + def test_create_job_when_not_schedulable(self): + job_prototype = create_job() + self.unschedulable_scripts.add('my_script_A') + + self.assertRaisesRegex( + UnavailableScriptException, + 'is not schedulable', + self.call_create_job, + job_prototype) + + def test_create_job_when_secure(self): + self.mock_schedule_model_with_secure_param() + + job_prototype = create_job() + + self.assertRaisesRegex( + UnavailableScriptException, + 'Script contains secure parameters', + self.call_create_job, + job_prototype) + + def test_create_job_when_non_repeatable_in_the_past(self): + job_prototype = create_job(repeatable=False, start_datetime=mocked_now - timedelta(seconds=1)) + + self.assertRaisesRegex( + InvalidScheduleException, + 'Start date should be in the future', + self.call_create_job, + job_prototype) + + def test_create_job_verify_scheduler_call_when_one_time(self): + job_prototype = create_job(id='1', repeatable=False, start_datetime=mocked_now + timedelta(seconds=97)) + self.call_create_job(job_prototype) + + self.assert_schedule_calls([(job_prototype, mocked_now_epoch + 97)]) + + def test_create_job_verify_timer_call_when_repeatable(self): + job_prototype = create_job(id='1', repeatable=True, start_datetime=mocked_now - timedelta(seconds=97)) + self.call_create_job(job_prototype) + + self.assert_schedule_calls([(job_prototype, mocked_now_epoch + 1468703)]) + + def call_create_job(self, job: SchedulingJob): + return self.schedule_service.create_job( + job.script_name, + job.parameter_values, + job.schedule.as_serializable_dict(), + job.user) + + def verify_config_files(self, expected_jobs: Sequence[SchedulingJob]): + expected_files = [get_job_filename(job) for job in expected_jobs] + + schedules_dir = os.path.join(test_utils.temp_folder, 'schedules') + test_utils.assert_dir_files(expected_files, schedules_dir, self) + + for job in expected_jobs: + job_path = os.path.join(schedules_dir, get_job_filename(job)) + content = file_utils.read_file(job_path) + restored_job = json.loads(content) + + self.assertEqual(restored_job, job.as_serializable_dict()) + + +class TestScheduleServiceInit(ScheduleServiceTestCase): + def test_no_config_folder(self): + test_utils.cleanup() + + schedule_service = ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assertEqual(schedule_service._scheduled_executions, {}) + self.assertEqual('1', schedule_service._id_generator.next_id()) + + def test_restore_multiple_configs(self): + job1 = create_job(id='11') + job2 = create_job(id=9) + job3 = create_job(id=3) + self.save_job(job1) + self.save_job(job2) + self.save_job(job3) + + schedule_service = ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assertSetEqual({'3', '9', '11'}, set(schedule_service._scheduled_executions.keys())) + self.assertEqual('12', schedule_service._id_generator.next_id()) + + def test_restore_configs_when_one_corrupted(self): + job1 = create_job(id='11', repeatable=None) + job2 = create_job(id=3) + self.save_job(job1) + self.save_job(job2) + + schedule_service = ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assertSetEqual({'3'}, set(schedule_service._scheduled_executions.keys())) + self.assertEqual('12', schedule_service._id_generator.next_id()) + + def test_schedule_on_restore_when_one_time(self): + job = create_job(id=3, repeatable=False, start_datetime=mocked_now + timedelta(minutes=3)) + self.save_job(job) + + ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assert_schedule_calls([(job, mocked_now_epoch + 180)]) + + def test_schedule_on_restore_when_one_time_in_past(self): + job = create_job(id=3, repeatable=False, start_datetime=mocked_now - timedelta(seconds=1)) + self.save_job(job) + + ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assert_schedule_calls([]) + + def test_schedule_on_restore_when_repeatable_in_future(self): + job = create_job(id=3, repeatable=True, start_datetime=mocked_now + timedelta(hours=3)) + self.save_job(job) + + ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assert_schedule_calls([(job, mocked_now_epoch + 1479600)]) + + def test_schedule_on_restore_when_repeatable_in_past(self): + job = create_job(id=3, repeatable=True, start_datetime=mocked_now + timedelta(days=2)) + self.save_job(job) + + ScheduleService(self.config_service, self.execution_service, test_utils.temp_folder) + self.assert_schedule_calls([(job, mocked_now_epoch + 1468800)]) + + def test_scheduler_runner(self): + original_runs_count = self.scheduler_mock.run.call_count + time.sleep(0.1) + step1_runs_count = self.scheduler_mock.run.call_count + + self.assertGreater(step1_runs_count, original_runs_count) + + time.sleep(0.1) + step2_runs_count = self.scheduler_mock.run.call_count + self.assertGreater(step2_runs_count, step1_runs_count) + + def test_scheduler_runner_when_stopped(self): + self.schedule_service._stop() + time.sleep(0.1) + original_runs_count = self.scheduler_mock.run.call_count + + time.sleep(0.1) + + final_runs_count = self.scheduler_mock.run.call_count + self.assertEqual(final_runs_count, original_runs_count) + + def save_job(self, job): + schedules_dir = os.path.join(test_utils.temp_folder, 'schedules') + path = os.path.join(schedules_dir, get_job_filename(job)) + content = json.dumps(job.as_serializable_dict()) + file_utils.write_file(path, content) + + +class TestScheduleServiceExecuteJob(ScheduleServiceTestCase): + def test_execute_simple_job(self): + job = create_job(id=1, repeatable=False, start_datetime=mocked_now - timedelta(seconds=1)) + + self.schedule_service._execute_job(job) + + self.execution_service.start_script.assert_called_once_with( + ANY, job.parameter_values, job.user.user_id, job.user.audit_names) + self.assert_schedule_calls([]) + + def test_execute_repeatable_job(self): + job = create_job(id=1, + repeatable=True, + start_datetime=mocked_now - timedelta(seconds=1), + repeat_unit='days', + repeat_period=1) + + self.schedule_service._execute_job(job) + + self.execution_service.start_script.assert_called_once_with( + ANY, job.parameter_values, job.user.user_id, job.user.audit_names) + self.assert_schedule_calls([(job, mocked_now_epoch + 86399)]) + + def test_execute_when_fails(self): + job = create_job(id=1, + repeatable=True, + start_datetime=mocked_now - timedelta(seconds=1), + repeat_unit='days', + repeat_period=1) + + self.execution_service.start_script.side_effect = Exception('Test exception') + self.schedule_service._execute_job(job) + + self.assert_schedule_calls([(job, mocked_now_epoch + 86399)]) + + def test_execute_when_not_schedulable(self): + job = create_job(id=1, + repeatable=True, + start_datetime=mocked_now - timedelta(seconds=1), + repeat_unit='days', + repeat_period=1) + + self.unschedulable_scripts.add(job.script_name) + + self.schedule_service._execute_job(job) + + self.execution_service.start_script.assert_not_called() + self.assert_schedule_calls([(job, mocked_now_epoch + 86399)]) + + def test_execute_when_has_secure_parameters(self): + job = create_job(id=1, + repeatable=True, + start_datetime=mocked_now - timedelta(seconds=1), + repeat_unit='days', + repeat_period=1) + + self.mock_schedule_model_with_secure_param() + + self.schedule_service._execute_job(job) + + self.execution_service.start_script.assert_not_called() + self.assert_schedule_calls([(job, mocked_now_epoch + 86399)]) + + +def create_job(id=None, + user_id='UserX', + script_name='my_script_A', + audit_names=None, + repeatable=True, + start_datetime=mocked_now + timedelta(seconds=5), + repeat_unit=None, + repeat_period=None, + weekdays=None, + parameter_values=None): + if audit_names is None: + audit_names = {audit_utils.HOSTNAME: 'my-host'} + + if repeatable and repeat_unit is None: + repeat_unit = 'weeks' + if repeatable and repeat_period is None: + repeat_period = 3 + + if weekdays is None and repeatable and repeat_unit == 'weeks': + weekdays = ['monday', 'wednesday'] + + if parameter_values is None: + parameter_values = {'p1': 987, 'param_2': ['hello', 'world']} + + schedule_config = ScheduleConfig(repeatable, start_datetime) + schedule_config.repeat_unit = repeat_unit + schedule_config.repeat_period = repeat_period + schedule_config.weekdays = weekdays + + return SchedulingJob(id, User(user_id, audit_names), schedule_config, script_name, parameter_values) + + +def get_job_filename(job): + return job.script_name + '_' + job.user.get_audit_name() + '_' + str(job.id) + '.json' diff --git a/src/tests/scheduling/scheduling_job_test.py b/src/tests/scheduling/scheduling_job_test.py new file mode 100644 index 00000000..c75a86f3 --- /dev/null +++ b/src/tests/scheduling/scheduling_job_test.py @@ -0,0 +1,36 @@ +import json +from datetime import datetime, timezone +from unittest import TestCase + +from auth.user import User +from scheduling.schedule_config import ScheduleConfig +from scheduling.scheduling_job import SchedulingJob, from_dict +from utils import audit_utils + + +class TestSchedulingJob(TestCase): + def test_serialize_deserialize(self): + user = User('user-X', {audit_utils.AUTH_USERNAME: 'user-X', audit_utils.HOSTNAME: 'localhost'}) + schedule_config = ScheduleConfig(True, start_datetime=datetime.now(tz=timezone.utc)) + schedule_config.repeat_unit = 'weeks' + schedule_config.repeat_period = 3 + schedule_config.weekdays = ['monday', 'wednesday'] + parameter_values = {'p1': 9, 'p2': ['A', 'C']} + + job = SchedulingJob(123, user, schedule_config, 'my_script', parameter_values) + + serialized = json.dumps(job.as_serializable_dict()) + restored_job = from_dict(json.loads(serialized)) + + self.assertEqual(job.id, restored_job.id) + self.assertEqual(job.script_name, restored_job.script_name) + self.assertEqual(job.parameter_values, restored_job.parameter_values) + + self.assertEqual(job.user.user_id, restored_job.user.user_id) + self.assertEqual(job.user.audit_names, restored_job.user.audit_names) + + self.assertEqual(job.schedule.repeatable, restored_job.schedule.repeatable) + self.assertEqual(job.schedule.start_datetime, restored_job.schedule.start_datetime) + self.assertEqual(job.schedule.repeat_period, restored_job.schedule.repeat_period) + self.assertEqual(job.schedule.repeat_unit, restored_job.schedule.repeat_unit) + self.assertEqual(job.schedule.weekdays, restored_job.schedule.weekdays) diff --git a/src/tests/script_config_test.py b/src/tests/script_config_test.py index 8d9eeded..eef59aa6 100644 --- a/src/tests/script_config_test.py +++ b/src/tests/script_config_test.py @@ -34,7 +34,8 @@ def test_create_full_config(self): 'working_directory': working_directory, 'requires_terminal': requires_terminal, 'bash_formatting': bash_formatting, - 'output_files': output_files}) + 'output_files': output_files, + 'scheduling': {'enabled': True}}) self.assertEqual(name, config_model.name) self.assertEqual(script_path, config_model.script_command) @@ -43,6 +44,7 @@ def test_create_full_config(self): self.assertEqual(requires_terminal, config_model.requires_terminal) self.assertEqual(bash_formatting, config_model.ansi_enabled) self.assertEqual(output_files, config_model.output_files) + self.assertTrue(config_model.schedulable) def test_create_with_parameter(self): config_model = _create_config_model('conf_p_1', parameters=[create_script_param_config('param1')]) @@ -805,6 +807,43 @@ def test_get_sorted_with_parameters(self): self.assertEqual(expected, config) +class SchedulableConfigTest(unittest.TestCase): + def test_create_with_schedulable_false(self): + config_model = _create_config_model('some-name', config={ + 'scheduling': {'enabled': False}}) + self.assertFalse(config_model.schedulable) + + def test_create_with_schedulable_default(self): + config_model = _create_config_model('some-name', config={}) + self.assertFalse(config_model.schedulable) + + def test_create_with_schedulable_true_and_secure_parameter(self): + config_model = _create_config_model('some-name', config={ + 'scheduling': {'enabled': True}, + 'parameters': [{'name': 'p1', 'secure': True}] + }) + self.assertFalse(config_model.schedulable) + + def test_create_with_schedulable_true_and_included_secure_parameter(self): + config_model = _create_config_model('some-name', config={ + 'scheduling': {'enabled': True}, + 'include': '${p1}', + 'parameters': [{'name': 'p1', 'secure': False}] + }) + another_path = test_utils.write_script_config( + {'parameters': [{'name': 'p2', 'secure': True}]}, + 'another_config') + + self.assertTrue(config_model.schedulable) + + config_model.set_param_value('p1', another_path) + + self.assertFalse(config_model.schedulable) + + def tearDown(self) -> None: + test_utils.cleanup() + + def _create_config_model(name, *, config=None, username=DEF_USERNAME, diff --git a/src/tests/test_utils.py b/src/tests/test_utils.py index b623ba14..887b89b4 100644 --- a/src/tests/test_utils.py +++ b/src/tests/test_utils.py @@ -4,6 +4,8 @@ import stat import threading import uuid +from copy import copy +from unittest.case import TestCase import utils.file_utils as file_utils import utils.os_utils as os_utils @@ -212,7 +214,8 @@ def create_config_model(name, *, parameter_values=None, script_command='ls', output_files=None, - requires_terminal=None): + requires_terminal=None, + schedulable=True): result_config = {} if config: @@ -232,6 +235,9 @@ def create_config_model(name, *, if requires_terminal is not None: result_config['requires_terminal'] = requires_terminal + if schedulable is not None: + result_config['scheduling'] = {'enabled': schedulable} + result_config['script_path'] = script_command return ConfigModel(result_config, path, username, audit_name, parameter_values=parameter_values) @@ -392,6 +398,13 @@ def get_argument(arg_name): return request_handler +def assert_dir_files(expected_files, dir_path, test_case: TestCase): + expected_files_sorted = sorted(copy(expected_files)) + actual_files = sorted(os.listdir(dir_path)) + + test_case.assertSequenceEqual(expected_files_sorted, actual_files) + + class _MockProcessWrapper(ProcessWrapper): def __init__(self, executor, command, working_directory, env_variables): super().__init__(command, working_directory, env_variables) diff --git a/src/tests/web/server_test.py b/src/tests/web/server_test.py index 94e6fae0..f392dd9c 100644 --- a/src/tests/web/server_test.py +++ b/src/tests/web/server_test.py @@ -112,6 +112,7 @@ def start_server(self, port, address): authorizer, None, None, + None, ConfigService(authorizer, self.conf_folder), None, None, diff --git a/src/utils/date_utils.py b/src/utils/date_utils.py index 3b14220f..16f74885 100644 --- a/src/utils/date_utils.py +++ b/src/utils/date_utils.py @@ -1,3 +1,4 @@ +import calendar import sys import time from datetime import datetime, timezone @@ -24,10 +25,6 @@ def sec_to_datetime(time_seconds): return datetime.fromtimestamp(time_seconds, tz=timezone.utc) -def datetime_now(): - return datetime.now(tz=timezone.utc) - - def astimezone(datetime_value, new_timezone): if (datetime_value.tzinfo is not None) or (sys.version_info >= (3, 6)): return datetime_value.astimezone(new_timezone) @@ -47,3 +44,41 @@ def days_to_ms(days): def ms_to_days(ms): return float(ms) / MS_IN_DAY + + +def parse_iso_datetime(date_str): + return datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=timezone.utc) + + +def to_iso_string(datetime_value: datetime): + if datetime_value.tzinfo is not None: + datetime_value = datetime_value.astimezone(timezone.utc) + + return datetime_value.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + + +def is_past(dt: datetime): + return now(tz=dt.tzinfo) > dt + + +def seconds_between(start: datetime, end: datetime): + delta = end - start + return delta.total_seconds() + + +def add_months(datetime_value: datetime, months): + month = datetime_value.month - 1 + months + year = datetime_value.year + month // 12 + month = month % 12 + 1 + day = min(datetime_value.day, calendar.monthrange(year, month)[1]) + return datetime_value.replace(year=year, month=month, day=day) + + +_mocked_now = None + + +def now(tz=timezone.utc): + if _mocked_now is not None: + return _mocked_now + + return datetime.now(tz) diff --git a/src/utils/file_utils.py b/src/utils/file_utils.py index bc258645..cf9c852b 100644 --- a/src/utils/file_utils.py +++ b/src/utils/file_utils.py @@ -172,9 +172,9 @@ def split_all(path): def to_filename(txt): if os_utils.is_win(): - return txt.replace(':', '-') + return re.sub('[<>:"/\\\\|?*]', '_', txt) - return txt + return txt.replace('/', '_') def create_unique_filename(preferred_path, retries=9999999): diff --git a/src/web/server.py b/src/web/server.py index 7a694e32..4fc75382 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -35,6 +35,7 @@ from model.parameter_config import WrongParameterUsageException from model.script_config import InvalidValueException, ParameterNotFoundException from model.server_conf import ServerConfig +from scheduling.schedule_service import ScheduleService, UnavailableScriptException, InvalidScheduleException from utils import audit_utils, tornado_utils, os_utils, env_utils from utils import file_utils as file_utils from utils.audit_utils import get_audit_name_from_request @@ -433,8 +434,7 @@ def prepare_download_url(self, file): @tornado.web.stream_request_body -class ScriptExecute(BaseRequestHandler): - +class StreamUploadRequestHandler(BaseRequestHandler): def __init__(self, application, request, **kwargs): super().__init__(application, request, **kwargs) @@ -457,6 +457,11 @@ def prepare(self): def data_received(self, chunk): self.form_reader.read(chunk) + +class ScriptExecute(StreamUploadRequestHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + @inject_user def post(self, user): script_name = None @@ -800,6 +805,39 @@ def get(self, user, execution_id): self.write(json.dumps(long_log)) +@tornado.web.stream_request_body +class AddSchedule(StreamUploadRequestHandler): + + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + @inject_user + def post(self, user): + arguments = self.form_reader.values + execution_info = external_model.to_execution_info(arguments) + parameter_values = execution_info.param_values + + if self.form_reader.files: + for key, value in self.form_reader.files.items(): + parameter_values[key] = value.path + + schedule_config = json.loads(parameter_values['__schedule_config']) + del parameter_values['__schedule_config'] + + try: + id = self.application.schedule_service.create_job( + execution_info.script, + parameter_values, + external_model.parse_external_schedule(schedule_config), + user) + except (UnavailableScriptException, InvalidScheduleException) as e: + raise tornado.web.HTTPError(422, reason=str(e)) + except InvalidValueException as e: + raise tornado.web.HTTPError(422, reason=e.get_user_message()) + + self.write(json.dumps({'id': id})) + + def wrap_to_server_event(event_type, data): return json.dumps({ "event": event_type, @@ -863,6 +901,7 @@ def init(server_config: ServerConfig, authenticator, authorizer, execution_service: ExecutionService, + schedule_service: ScheduleService, execution_logging_service: ExecutionLoggingService, config_service: ConfigService, alerts_service: AlertsService, @@ -900,6 +939,7 @@ def init(server_config: ServerConfig, (r'/executions/status/(.*)', GetExecutionStatus), (r'/history/execution_log/short', GetShortHistoryEntriesHandler), (r'/history/execution_log/long/(.*)', GetLongHistoryEntryHandler), + (r'/schedule', AddSchedule), (r'/auth/info', AuthInfoHandler), (r'/result_files/(.*)', DownloadResultFile, @@ -934,6 +974,7 @@ def init(server_config: ServerConfig, application.file_download_feature = file_download_feature application.file_upload_feature = file_upload_feature application.execution_service = execution_service + application.schedule_service = schedule_service application.execution_logging_service = execution_logging_service application.config_service = config_service application.alerts_service = alerts_service diff --git a/web-src/src/assets/css/materializecss/material-datepicker.css b/web-src/src/assets/css/materializecss/material-datepicker.css new file mode 100644 index 00000000..aab0871f --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-datepicker.css @@ -0,0 +1,3 @@ +.input-field.inline .datepicker-container .select-dropdown { + margin-bottom: 0; +} \ No newline at end of file diff --git a/web-src/src/assets/css/materializecss/material-textfield.css b/web-src/src/assets/css/materializecss/material-textfield.css new file mode 100644 index 00000000..9a9791cd --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-textfield.css @@ -0,0 +1,15 @@ +.input-field:after { + content: attr(data-error); + color: #F44336; + font-size: 0.9em; + display: block; + position: absolute; + top: 1.7em; + left: 0.8em; +} + +.input-field input[type="text"]:invalid, +.input-field input[type="number"]:invalid { + border-bottom: 1px solid #e51c23; + box-shadow: 0 1px 0 0 #e51c23; +} diff --git a/web-src/src/common/components/PromisableButton.vue b/web-src/src/common/components/PromisableButton.vue index 5d2ae0c8..6ad508d7 100644 --- a/web-src/src/common/components/PromisableButton.vue +++ b/web-src/src/common/components/PromisableButton.vue @@ -1,23 +1,23 @@ \ No newline at end of file diff --git a/web-src/src/common/components/inputs/TimePicker.vue b/web-src/src/common/components/inputs/TimePicker.vue new file mode 100644 index 00000000..5c300b0a --- /dev/null +++ b/web-src/src/common/components/inputs/TimePicker.vue @@ -0,0 +1,96 @@ + + + + + \ No newline at end of file diff --git a/web-src/src/common/components/textfield.vue b/web-src/src/common/components/textfield.vue index 07faceda..9fcded86 100644 --- a/web-src/src/common/components/textfield.vue +++ b/web-src/src/common/components/textfield.vue @@ -17,6 +17,7 @@ import {isBlankString, isEmptyString, isNull} from '@/common/utils/common'; export default { + name: 'Textfield', props: { 'value': [String, Number], 'config': Object, diff --git a/web-src/src/common/materializecss/color_variables.scss b/web-src/src/common/materializecss/color_variables.scss deleted file mode 100644 index d8a15503..00000000 --- a/web-src/src/common/materializecss/color_variables.scss +++ /dev/null @@ -1,31 +0,0 @@ -@import "materialize-css/sass/components/_color-variables.scss"; - -$light-blue: ( - "darken-1": #039be5 -); - -$green: ( - "base": #4CAF50 -); - -$grey: ( - "base": #9e9e9e, - "lighten-2": #e0e0e0, - "lighten-1": #bdbdbd -); - -$orange: ( - "accent-2": #ffab40 -); - - -// override materialize css colors, because there are too many of them, and they are not used -$colors: ( - "materialize-red": $materialize-red, - "red": $red, - "teal": $teal, - "orange": $orange, - "grey": $grey, - "green": $green, - "light-blue": $light-blue -); \ No newline at end of file diff --git a/web-src/src/common/materializecss/imports/datepicker.js b/web-src/src/common/materializecss/imports/datepicker.js new file mode 100644 index 00000000..b1193cb7 --- /dev/null +++ b/web-src/src/common/materializecss/imports/datepicker.js @@ -0,0 +1,8 @@ +// DO NOT TOUCH ORDER +import './global' +import './modal' +import './select' + +import 'materialize-css/js/datepicker'; +import 'materialize-css/sass/components/_datepicker.scss'; +import '@/assets/css/materializecss/material-datepicker.css' \ No newline at end of file diff --git a/web-src/src/common/materializecss/imports/modal.js b/web-src/src/common/materializecss/imports/modal.js index d304424d..8f772784 100644 --- a/web-src/src/common/materializecss/imports/modal.js +++ b/web-src/src/common/materializecss/imports/modal.js @@ -1,5 +1,6 @@ // DO NOT TOUCH ORDER import './global' +import 'materialize-css/js/anime.min'; import 'materialize-css/js/modal'; import 'materialize-css/sass/components/_modal.scss'; \ No newline at end of file diff --git a/web-src/src/common/style_imports.js b/web-src/src/common/style_imports.js index 362f2443..0bfbc4c4 100644 --- a/web-src/src/common/style_imports.js +++ b/web-src/src/common/style_imports.js @@ -1,10 +1,11 @@ import '@/common/materializecss/imports/global' import 'material-design-icons/iconfont/material-icons.css'; import 'typeface-roboto'; - // DO NOT TOUCH ORDER +import "materialize-css/sass/components/_normalize.scss"; import 'materialize-css/sass/components/_color-classes.scss'; import 'materialize-css/sass/components/_grid.scss'; import 'materialize-css/sass/components/_buttons.scss'; import 'materialize-css/sass/components/_waves.scss'; -import '@/assets/css/materializecss/material-buttons.css'; \ No newline at end of file +import '@/assets/css/materializecss/material-buttons.css'; +import '@/assets/css/materializecss/material-textfield.css'; \ No newline at end of file diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue new file mode 100644 index 00000000..8b82b3b3 --- /dev/null +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -0,0 +1,370 @@ + + + + + \ No newline at end of file diff --git a/web-src/src/main-app/components/schedule/ToggleDayButton.vue b/web-src/src/main-app/components/schedule/ToggleDayButton.vue new file mode 100644 index 00000000..df211db9 --- /dev/null +++ b/web-src/src/main-app/components/schedule/ToggleDayButton.vue @@ -0,0 +1,48 @@ + + + + + \ No newline at end of file diff --git a/web-src/src/main-app/components/schedule/schedulePanelFields.js b/web-src/src/main-app/components/schedule/schedulePanelFields.js new file mode 100644 index 00000000..f7031a0d --- /dev/null +++ b/web-src/src/main-app/components/schedule/schedulePanelFields.js @@ -0,0 +1,11 @@ +export const repeatPeriodField = { + required: true, + type: 'int', + min: 1, + max: 50 +}; + +export const repeatTimeUnitField = { + type: 'list', + values: ['hours', 'days', 'weeks', 'months'] +} diff --git a/web-src/src/main-app/components/scripts/ScheduleButton.vue b/web-src/src/main-app/components/scripts/ScheduleButton.vue new file mode 100644 index 00000000..3811622f --- /dev/null +++ b/web-src/src/main-app/components/scripts/ScheduleButton.vue @@ -0,0 +1,74 @@ + + + + + \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptViewScheduleHolder.vue b/web-src/src/main-app/components/scripts/ScriptViewScheduleHolder.vue new file mode 100644 index 00000000..807f847e --- /dev/null +++ b/web-src/src/main-app/components/scripts/ScriptViewScheduleHolder.vue @@ -0,0 +1,121 @@ + + + + + \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/script-parameters-view.vue b/web-src/src/main-app/components/scripts/script-parameters-view.vue index 701b1a09..6d1245f9 100644 --- a/web-src/src/main-app/components/scripts/script-parameters-view.vue +++ b/web-src/src/main-app/components/scripts/script-parameters-view.vue @@ -102,22 +102,6 @@ padding-left: 28px; } - .script-parameters-panel >>> input[type="text"]:invalid, - .script-parameters-panel >>> input[type="number"]:invalid { - border-bottom: 1px solid #e51c23; - box-shadow: 0 1px 0 0 #e51c23; - } - - .script-parameters-panel >>> .input-field:after { - content: attr(data-error); - color: #F44336; - font-size: 0.9rem; - display: block; - position: absolute; - top: 23px; - left: 0.75rem; - } - .script-parameters-panel >>> .input-field .select-wrapper + label { transform: scale(0.8); top: -18px; diff --git a/web-src/src/main-app/components/scripts/script-view.vue b/web-src/src/main-app/components/scripts/script-view.vue index f8aa9b88..d7ca335d 100644 --- a/web-src/src/main-app/components/scripts/script-view.vue +++ b/web-src/src/main-app/components/scripts/script-view.vue @@ -5,7 +5,7 @@
+
+
@@ -27,7 +29,7 @@
  • {{ error }}
  • - + @@ -53,11 +59,13 @@ import FileDownloadIcon from '@/assets/file_download.png' import LogPanel from '@/common/components/log_panel' import {deepCloneObject, forEachKeyValue, isEmptyObject, isEmptyString, isNull} from '@/common/utils/common'; + import ScheduleButton from '@/main-app/components/scripts/ScheduleButton'; import ScriptLoadingText from '@/main-app/components/scripts/ScriptLoadingText'; import marked from 'marked'; import {mapActions, mapState} from 'vuex' import {STATUS_DISCONNECTED, STATUS_ERROR, STATUS_EXECUTING, STATUS_FINISHED} from '../../store/scriptExecutor'; import ScriptParametersView from './script-parameters-view' + import ScriptViewScheduleHolder from "@/main-app/components/scripts/ScriptViewScheduleHolder"; export default { data: function () { @@ -67,7 +75,9 @@ errors: [], nextLogIndex: 0, lastInlineImages: {}, - downloadIcon: FileDownloadIcon + downloadIcon: FileDownloadIcon, + scheduleMode: false, + scriptConfigComponentsHeight: 0 } }, @@ -82,13 +92,16 @@ components: { ScriptLoadingText, LogPanel, - ScriptParametersView + ScriptParametersView, + ScheduleButton, + ScriptViewScheduleHolder }, computed: { ...mapState('scriptConfig', { scriptDescription: state => state.scriptConfig ? state.scriptConfig.description : '', - loading: 'loading' + loading: 'loading', + scriptConfig: 'scriptConfig' }), ...mapState('scriptSetup', { parameterErrors: 'errors' @@ -128,6 +141,28 @@ }, enableExecuteButton() { + if (this.scheduleMode) { + return false; + } + + if (this.hideExecutionControls) { + return false; + } + + if (this.loading) { + return false; + } + + if (isNull(this.currentExecutor)) { + return true; + } + + return this.currentExecutor.state.status === STATUS_FINISHED + || this.currentExecutor.state.status === STATUS_DISCONNECTED + || this.currentExecutor.state.status === STATUS_ERROR; + }, + + enableScheduleButton() { if (this.hideExecutionControls) { return false; } @@ -168,7 +203,7 @@ }, showLog() { - return !isNull(this.currentExecutor); + return !isNull(this.currentExecutor) && !this.scheduleMode; }, downloadableFiles() { @@ -209,6 +244,10 @@ killEnabledTimeout() { return isNull(this.currentExecutor) ? null : this.currentExecutor.state.killTimeoutSec; + }, + + schedulable() { + return this.scriptConfig && this.scriptConfig.schedulable; } }, @@ -223,7 +262,7 @@ } }, - executeScript: function () { + validatePreExecution: function () { this.errors = []; const errors = this.parameterErrors; @@ -231,12 +270,29 @@ forEachKeyValue(errors, (paramName, error) => { this.errors.push(paramName + ': ' + error); }); + return false; + } + + return true; + }, + + executeScript: function () { + if (!this.validatePreExecution()) { return; } this.startExecution(); }, + openSchedule: function () { + if (!this.validatePreExecution()) { + return; + } + + this.$refs.scheduleHolder.open(); + this.scheduleMode = true; + }, + ...mapActions('executions', { startExecution: 'startExecution' }), @@ -332,6 +388,25 @@ this.lastInlineImages = deepCloneObject(newValue); } + }, + + scriptConfig: { + immediate: true, + handler() { + this.$nextTick(() => { + // 200 is a rough height for headers,buttons, description, etc. + const otherElemsHeight = 200; + + if (isNull(this.$refs.parametersView)) { + this.scriptConfigComponentsHeight = otherElemsHeight; + return; + } + + const paramHeight = this.$refs.parametersView.$el.clientHeight; + + this.scriptConfigComponentsHeight = paramHeight + otherElemsHeight; + }) + } } } } @@ -366,21 +441,26 @@ } .actions-panel { + margin-top: 8px; display: flex; } - .button-execute { - flex: 6 1 5em; + .actions-panel > .button-gap { + flex: 3 1 1px; + } - margin-right: 0; - margin-top: 6px; + .button-execute { + flex: 4 1 312px; } .button-stop { - flex: 1 0 5em; + margin-left: 16px; + flex: 1 1 104px; + } - margin-left: 12px; - margin-top: 6px; + .schedule-button { + margin-left: 32px; + flex: 1 0 auto; } .script-input-panel { @@ -409,11 +489,16 @@ overflow-y: auto; flex: 1; - margin: 17px 12px 7px; + margin: 20px 0 8px; + } + + .validation-panel .header { + padding-left: 0; } .validation-errors-list { - margin-left: 17px; + margin-left: 12px; + margin-top: 8px; } .validation-errors-list li { diff --git a/web-src/src/main-app/store/index.js b/web-src/src/main-app/store/index.js index f9a30e2c..b8d62f23 100644 --- a/web-src/src/main-app/store/index.js +++ b/web-src/src/main-app/store/index.js @@ -5,6 +5,7 @@ import get from 'lodash/get'; import Vue from 'vue' import Vuex from 'vuex' import authModule from './auth'; +import scheduleModule from './scriptSchedule'; import pageModule from './page'; import scriptConfigModule from './scriptConfig'; @@ -25,7 +26,8 @@ const store = new Vuex.Store({ executions: scriptExecutionManagerModule, auth: authModule, history: historyModule(), - page: pageModule + page: pageModule, + scriptSchedule: scheduleModule }, actions: { init({dispatch}) { diff --git a/web-src/src/main-app/store/mainStoreHelper.js b/web-src/src/main-app/store/mainStoreHelper.js new file mode 100644 index 00000000..600d0fc1 --- /dev/null +++ b/web-src/src/main-app/store/mainStoreHelper.js @@ -0,0 +1,18 @@ +import {forEachKeyValue, isNull} from "@/common/utils/common"; + +export function parametersToFormData(parameterValues) { + const formData = new FormData(); + + forEachKeyValue(parameterValues, function (parameter, value) { + if (Array.isArray(value)) { + for (let i = 0; i < value.length; i++) { + const valueElement = value[i]; + formData.append(parameter, valueElement); + } + } else if (!isNull(value)) { + formData.append(parameter, value); + } + }); + + return formData; +} \ No newline at end of file diff --git a/web-src/src/main-app/store/scriptExecutionManager.js b/web-src/src/main-app/store/scriptExecutionManager.js index e25030cb..efff076e 100644 --- a/web-src/src/main-app/store/scriptExecutionManager.js +++ b/web-src/src/main-app/store/scriptExecutionManager.js @@ -3,6 +3,7 @@ import axios from 'axios'; import clone from 'lodash/clone'; import get from 'lodash/get'; import scriptExecutor, {STATUS_EXECUTING, STATUS_FINISHED, STATUS_INITIALIZING} from './scriptExecutor'; +import {parametersToFormData} from "@/main-app/store/mainStoreHelper"; export const axiosInstance = axios.create(); @@ -125,20 +126,9 @@ export default { const parameterValues = clone(rootState.scriptSetup.parameterValues); const scriptName = rootState.scriptConfig.scriptConfig.name; - var formData = new FormData(); + const formData = parametersToFormData(parameterValues); formData.append('__script_name', scriptName); - forEachKeyValue(parameterValues, function (parameter, value) { - if (Array.isArray(value)) { - for (let i = 0; i < value.length; i++) { - const valueElement = value[i]; - formData.append(parameter, valueElement); - } - } else if (!isNull(value)) { - formData.append(parameter, value); - } - }); - const executor = scriptExecutor(null, scriptName, parameterValues); store.registerModule(['executions', 'temp'], executor); store.dispatch('executions/temp/setInitialising'); diff --git a/web-src/src/main-app/store/scriptSchedule.js b/web-src/src/main-app/store/scriptSchedule.js new file mode 100644 index 00000000..b03bf0c7 --- /dev/null +++ b/web-src/src/main-app/store/scriptSchedule.js @@ -0,0 +1,28 @@ +import axios from 'axios'; +import clone from 'lodash/clone'; +import {parametersToFormData} from "@/main-app/store/mainStoreHelper"; + +export const axiosInstance = axios.create(); + +export default { + state: {}, + namespaced: true, + actions: { + schedule({state, commit, dispatch, rootState}, {scheduleSetup}) { + const parameterValues = clone(rootState.scriptSetup.parameterValues); + const scriptName = rootState.scriptConfig.scriptConfig.name; + + const formData = parametersToFormData(parameterValues); + formData.append('__script_name', scriptName); + formData.append('__schedule_config', JSON.stringify(scheduleSetup)) + + return axiosInstance.post('schedule', formData) + .catch(e => { + if (e.response.status === 422) { + e.userMessage = e.response.data; + } + throw e; + }); + }, + } +} diff --git a/web-src/tests/unit/combobox_test.js b/web-src/tests/unit/combobox_test.js index 55eeea1c..faf57c0b 100644 --- a/web-src/tests/unit/combobox_test.js +++ b/web-src/tests/unit/combobox_test.js @@ -34,20 +34,25 @@ describe('Test ComboBox', function () { comboBox.destroy(); }); - function assertListElements(expectedTexts, searchHeader = false) { + function assertListElements(expectedTexts, searchHeader = false, showHeader = true) { const listChildren = comboBox.findAll('li'); - expect(listChildren).toHaveLength(expectedTexts.length + 1); + + const extraChildrenCount = showHeader ? 1 : 0; + + expect(listChildren).toHaveLength(expectedTexts.length + extraChildrenCount); const headerText = listChildren.at(0).text(); if (!searchHeader) { - expect(headerText).toBe('Choose your option'); + if (showHeader) { + expect(headerText).toBe('Choose your option'); + } } else { expect(headerText.trim()).toBe('Search'); } for (let i = 0; i < expectedTexts.length; i++) { const value = expectedTexts[i]; - expect(listChildren.at(i + 1).text()).toBe(value); + expect(listChildren.at(i + extraChildrenCount).text()).toBe(value); } } @@ -123,6 +128,13 @@ describe('Test ComboBox', function () { assertListElements(values); }); + + it('Test hide header', async function () { + comboBox.setProps({showHeader: false}) + await vueTicks(); + + assertListElements(['Value A', 'Value B', 'Value C'], false, false); + }); }); describe('Test values', function () { diff --git a/web-src/tests/unit/common/components/inputs/TimePicker_test.js b/web-src/tests/unit/common/components/inputs/TimePicker_test.js new file mode 100644 index 00000000..552d78da --- /dev/null +++ b/web-src/tests/unit/common/components/inputs/TimePicker_test.js @@ -0,0 +1,138 @@ +'use strict'; + +import {mount} from '@vue/test-utils'; +import TimePicker from "@/common/components/inputs/TimePicker"; +import {vueTicks, wrapVModel} from "../../../test_utils"; + +describe('Test TimePicker', function () { + let timepicker; + + before(function () { + + }); + beforeEach(async function () { + timepicker = mount(TimePicker, { + propsData: { + label: 'Test picker', + value: '15:30', + required: true + } + }); + timepicker.vm.$parent.$forceUpdate(); + wrapVModel(timepicker); + + await vueTicks(); + }); + + afterEach(async function () { + await vueTicks(); + timepicker.destroy(); + }); + + after(function () { + }); + + describe('Test config', function () { + + it('Test initial props', function () { + expect(timepicker.find('label').text()).toBe('Test picker') + expect(timepicker.find('input').element.value).toBe('15:30') + expect(timepicker.vm.value).toBe('15:30') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test user changes time to 19:30', async function () { + timepicker.find('input').setValue('19:30'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('19:30') + expect(timepicker.vm.value).toBe('19:30') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test user changes time to 23:59', async function () { + timepicker.find('input').setValue('23:59'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('23:59') + expect(timepicker.vm.value).toBe('23:59') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test user changes time to 24:00', async function () { + timepicker.find('input').setValue('24:00'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('24:00') + expect(timepicker.vm.value).toBe('15:30') + expect(timepicker.vm.error).toBe('Format HH:MM') + expect(timepicker.currentError).toBe('Format HH:MM') + }); + + it('Test user changes time to 9:45', async function () { + timepicker.find('input').setValue('9:45'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('9:45') + expect(timepicker.vm.value).toBe('9:45') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test user changes time to 2:10', async function () { + timepicker.find('input').setValue('2:10'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('2:10') + expect(timepicker.vm.value).toBe('2:10') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test user changes time to 09:10', async function () { + timepicker.find('input').setValue('09:10'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('09:10') + expect(timepicker.vm.value).toBe('09:10') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test user changes time to 09:60', async function () { + timepicker.find('input').setValue('09:60'); + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('09:60') + expect(timepicker.vm.value).toBe('15:30') + expect(timepicker.vm.error).toBe('Format HH:MM') + expect(timepicker.currentError).toBe('Format HH:MM') + }); + + it('Test system changes time to 16:01', async function () { + timepicker.setProps({'value': '16:01'}) + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('16:01') + expect(timepicker.vm.value).toBe('16:01') + expect(timepicker.vm.error).toBeEmpty() + }); + + it('Test system changes time to 31:01', async function () { + timepicker.setProps({'value': '31:01'}) + + await vueTicks(); + + expect(timepicker.find('input').element.value).toBe('31:01') + expect(timepicker.vm.value).toBe('31:01') + expect(timepicker.vm.error).toBe('Format HH:MM') + expect(timepicker.currentError).toBe('Format HH:MM') + }); + + }); +}); \ No newline at end of file diff --git a/web-src/vue.config.js b/web-src/vue.config.js index 57fb01c5..581adc21 100644 --- a/web-src/vue.config.js +++ b/web-src/vue.config.js @@ -44,7 +44,6 @@ module.exports = { scss: { prependData: '@import "./src/assets/css/color_variables.scss"; ' + '@import "materialize-css/sass/components/_variables.scss"; ' - + '@import "materialize-css/sass/components/_normalize.scss"; ' + '@import "materialize-css/sass/components/_global.scss"; ' + '@import "materialize-css/sass/components/_typography.scss"; ' } From 63d02ef7e3114f3f2ab797047a6fb11d1791b675 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sun, 9 Aug 2020 13:46:06 +0200 Subject: [PATCH 026/398] #305 added groups logging for OAuth --- src/auth/auth_abstract_oauth.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/auth/auth_abstract_oauth.py b/src/auth/auth_abstract_oauth.py index 71f5c00f..81210085 100644 --- a/src/auth/auth_abstract_oauth.py +++ b/src/auth/auth_abstract_oauth.py @@ -107,6 +107,7 @@ async def authenticate(self, request_handler): if self.group_support: user_groups = await self.fetch_user_groups(access_token) + LOGGER.info('Loaded groups for ' + user_email + ': ' + str(user_groups)) user_state.groups = user_groups now = time.time() @@ -255,6 +256,7 @@ async def _do_update_user_auth_async(self, username, user_state, access_token): if self.group_support: try: user_groups = await self.fetch_user_groups(access_token) + LOGGER.info('Updated groups for ' + username + ': ' + str(user_groups)) user_state.groups = user_groups except AuthFailureError: LOGGER.error('Failed to fetch user %s groups', username) From 61e89c3703263ca55a2da03f5cb7fd330f038b58 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 15 Aug 2020 14:09:38 +0200 Subject: [PATCH 027/398] fixed wrong default values --- .../common/components/PromisableButton.vue | 35 ++++++++++--------- .../common/components/inputs/DatePicker.vue | 29 ++++++++------- 2 files changed, 35 insertions(+), 29 deletions(-) diff --git a/web-src/src/common/components/PromisableButton.vue b/web-src/src/common/components/PromisableButton.vue index 6ad508d7..ade143f6 100644 --- a/web-src/src/common/components/PromisableButton.vue +++ b/web-src/src/common/components/PromisableButton.vue @@ -21,23 +21,26 @@ \ No newline at end of file From 249e2066881340f34e010a7f31cea7dbafd7b857 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 26 Sep 2020 14:04:20 +0200 Subject: [PATCH 034/398] #342 fixed not showing scripts in admin panel, if they are not allowed --- src/config/config_service.py | 10 +++++--- src/tests/config_service_test.py | 28 ++++++++++++++++++++--- src/web/server.py | 4 +++- web-src/src/admin/store/scripts-module.js | 2 +- 4 files changed, 36 insertions(+), 8 deletions(-) diff --git a/src/config/config_service.py b/src/config/config_service.py index 6cdc83cd..9ceb1c5a 100644 --- a/src/config/config_service.py +++ b/src/config/config_service.py @@ -92,7 +92,11 @@ def _save_config(self, config, path): config_json = json.dumps(sorted_config, indent=2) file_utils.write_file(path, config_json) - def list_configs(self, user): + def list_configs(self, user, mode=None): + edit_mode = mode == 'edit' + if edit_mode: + self._check_admin_access(user) + conf_service = self def load_script(path, content): @@ -103,7 +107,7 @@ def load_script(path, content): if short_config is None: return None - if not conf_service._can_access_script(user, short_config): + if (not edit_mode) and (not conf_service._can_access_script(user, short_config)): return None return short_config @@ -194,7 +198,7 @@ def _can_access_script(self, user, short_config): def _check_admin_access(self, user): if not self._authorizer.is_admin(user.user_id): - raise AdminAccessRequiredException('Access to script is prohibited for ' + str(user)) + raise AdminAccessRequiredException('Admin access to scripts is prohibited for ' + str(user)) class ConfigNotAllowedException(Exception): diff --git a/src/tests/config_service_test.py b/src/tests/config_service_test.py index a23767e7..29ddc9ef 100644 --- a/src/tests/config_service_test.py +++ b/src/tests/config_service_test.py @@ -126,6 +126,27 @@ def test_list_configs_when_none_allowed(self): self.assert_list_config_names(self.user1, []) + def test_list_configs_when_edit_mode_and_admin(self): + _create_script_config_file('a1', allowed_users=['adm_user']) + _create_script_config_file('c2', allowed_users=['adm_user']) + + self.assert_list_config_names(self.admin_user, ['a1', 'c2'], mode='edit') + + def test_list_configs_when_edit_mode_and_admin_without_allowance(self): + _create_script_config_file('a1', allowed_users=['user1']) + _create_script_config_file('c2', allowed_users=['adm_user']) + + self.assert_list_config_names(self.admin_user, ['a1', 'c2'], mode='edit') + + def test_list_configs_when_edit_mode_and_non_admin(self): + _create_script_config_file('a1', allowed_users=['user1']) + _create_script_config_file('c2', allowed_users=['user1']) + + self.assertRaises(AdminAccessRequiredException, + self.config_service.list_configs, + self.user1, + 'edit') + def test_load_config_when_user_allowed(self): _create_script_config_file('my_script', allowed_users=['ABC', 'user1', 'qwerty']) @@ -138,8 +159,8 @@ def test_load_config_when_user_not_allowed(self): self.assertRaises(ConfigNotAllowedException, self.config_service.load_config_model, 'my_script', self.user1) - def assert_list_config_names(self, user, expected_names): - configs = self.config_service.list_configs(user) + def assert_list_config_names(self, user, expected_names, mode=None): + configs = self.config_service.list_configs(user, mode) conf_names = [config.name for config in configs] self.assertCountEqual(expected_names, conf_names) @@ -151,8 +172,9 @@ def setUp(self): super().setUp() test_utils.setup() - authorizer = Authorizer([], [], [], EmptyGroupProvider()) + authorizer = Authorizer([], ['adm_user'], [], EmptyGroupProvider()) self.user1 = User('user1', {}) + self.admin_user = User('adm_user', {}) self.config_service = ConfigService(authorizer, test_utils.temp_folder) diff --git a/src/web/server.py b/src/web/server.py index a2e7314d..682385d3 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -209,7 +209,9 @@ class GetScripts(BaseRequestHandler): @check_authorization @inject_user def get(self, user): - configs = self.application.config_service.list_configs(user) + mode = self.get_query_argument('mode', default=None) + + configs = self.application.config_service.list_configs(user, mode) scripts = [{'name': conf.name, 'group': conf.group} for conf in configs] diff --git a/web-src/src/admin/store/scripts-module.js b/web-src/src/admin/store/scripts-module.js index d8f44d56..02b70da6 100644 --- a/web-src/src/admin/store/scripts-module.js +++ b/web-src/src/admin/store/scripts-module.js @@ -13,7 +13,7 @@ export default { init({commit}) { commit('SET_LOADING', true); - axiosInstance.get('scripts').then(({data}) => { + axiosInstance.get('scripts', {params: {mode: 'edit'}}).then(({data}) => { const {scripts} = data; let scriptNames = scripts.map(s => s.name); scriptNames.sort(function (name1, name2) { From bcc8de680b3556e1003e9a75963a68cb0fe41b01 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sun, 27 Sep 2020 12:39:42 +0200 Subject: [PATCH 035/398] #337 fixed LDAP authentication when user contains parentheses --- samples/ldap/bootstrap.ldif | 37 ++++++++++++++++++++++++++++ samples/ldap/start-ldap-docker.sh | 17 +++++++++++++ src/auth/auth_ldap.py | 40 +++++++++++++++++++++---------- src/tests/auth_ldap_test.py | 34 ++++++++++++++++++++++++++ 4 files changed, 116 insertions(+), 12 deletions(-) create mode 100644 samples/ldap/bootstrap.ldif create mode 100755 samples/ldap/start-ldap-docker.sh diff --git a/samples/ldap/bootstrap.ldif b/samples/ldap/bootstrap.ldif new file mode 100644 index 00000000..2484adb6 --- /dev/null +++ b/samples/ldap/bootstrap.ldif @@ -0,0 +1,37 @@ +dn: ou=People,dc=script-server,dc=net +objectClass: organizationalUnit +ou: People + +dn: uid=user1,ou=People,dc=script-server,dc=net +objectClass: inetOrgPerson +objectClass: posixAccount +cn: John Smith +sn: Smith +uid: user1 +uidNumber: 1000 +gidNumber: 1000 +homeDirectory: /home/user1 +userPassword: qwerty + +dn: uid=user with space,ou=People,dc=script-server,dc=net +objectClass: inetOrgPerson +cn: user with space +sn: Uws +uid: user with space +userPassword: 123 456 + +dn: uid=user (with brackets),ou=People,dc=script-server,dc=net +objectClass: inetOrgPerson +cn: user with brackets +sn: UwB +uid: user (with brackets) +userPassword: 666 + +dn: cn=all_users,dc=script-server,dc=net +objectClass: posixGroup +cn: all_users +description: All users group +gidNumber: 10000 +memberUid: user1 +memberUid: user with space +memberUid: user (with brackets) diff --git a/samples/ldap/start-ldap-docker.sh b/samples/ldap/start-ldap-docker.sh new file mode 100755 index 00000000..bcd8ea08 --- /dev/null +++ b/samples/ldap/start-ldap-docker.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +docker stop script-server-ldap +docker rm script-server-ldap + +set -e + +docker run \ +--name script-server-ldap \ +--env LDAP_ORGANISATION="Script server" \ +--env LDAP_DOMAIN="script-server.net" \ +--env LDAP_ADMIN_PASSWORD="admin_passw" \ +--volume "$PWD"/bootstrap.ldif:/container/service/slapd/assets/config/bootstrap/ldif/50-bootstrap.ldif \ +--detach \ +osixia/openldap:1.4.0 \ +--copy-service \ +--loglevel debug \ No newline at end of file diff --git a/src/auth/auth_ldap.py b/src/auth/auth_ldap.py index 5b645ed9..de717791 100644 --- a/src/auth/auth_ldap.py +++ b/src/auth/auth_ldap.py @@ -5,6 +5,7 @@ from ldap3 import Connection, SIMPLE from ldap3.core.exceptions import LDAPAttributeError +from ldap3.utils.conv import escape_filter_chars from auth import auth_base from model import model_helper @@ -38,19 +39,21 @@ def _resolve_base_dn(full_username): return '' -def _search(dn, search_filter, attributes, connection): - success = connection.search(dn, search_filter, attributes=attributes) +def _search(dn, search_request, attributes, connection): + search_string = search_request.as_search_string() + + success = connection.search(dn, search_string, attributes=attributes) if not success: if connection.last_error: LOGGER.warning('ldap search failed: ' + connection.last_error - + '. dn:' + dn + ', filter: ' + search_filter) + + '. dn:' + dn + ', filter: ' + search_string) return None return connection.entries -def _load_multiple_entries_values(dn, search_filter, attribute_name, connection): - entries = _search(dn, search_filter, [attribute_name], connection) +def _load_multiple_entries_values(dn, search_request, attribute_name, connection): + entries = _search(dn, search_request, [attribute_name], connection) if entries is None: return [] @@ -174,12 +177,13 @@ def _fetch_user_groups(self, user_dn, user_uid, connection): result = set() - result.update(_load_multiple_entries_values(base_dn, '(member=%s)' % user_dn, 'cn', connection)) + result.update( + _load_multiple_entries_values(base_dn, SearchRequest('(member=%s)', user_dn), 'cn', connection)) if user_uid: result.update(_load_multiple_entries_values( base_dn, - '(&(objectClass=posixGroup)(memberUid=%s))' % user_uid, + SearchRequest('(&(objectClass=posixGroup)(memberUid=%s))', user_uid), 'cn', connection)) @@ -191,23 +195,23 @@ def _get_user_ids(self, full_username, connection): username_lower = full_username.lower() if ',dc=' in username_lower: base_dn = username_lower - search_filter = '(objectClass=*)' + search_request = SearchRequest('(objectClass=*)') elif '@' in full_username: - search_filter = '(userPrincipalName=%s)' % full_username + search_request = SearchRequest('(userPrincipalName=%s)', full_username) elif '\\' in full_username: username_index = full_username.rfind('\\') + 1 username = full_username[username_index:] - search_filter = '(sAMAccountName=%s)' % username + search_request = SearchRequest('(sAMAccountName=%s)', username) else: LOGGER.warning('Unsupported username pattern for ' + full_username) return full_username, None - entries = _search(base_dn, search_filter, ['uid'], connection) + entries = _search(base_dn, search_request, ['uid'], connection) if not entries: return full_username, None if len(entries) > 1: - LOGGER.warning('More than one user found by filter: ' + search_filter) + LOGGER.warning('More than one user found by filter: ' + str(search_request)) return full_username, None entry = entries[0] @@ -225,3 +229,15 @@ def _set_user_groups(self, user, groups): new_groups_content = json.dumps(self._user_groups, indent=2) file_utils.write_file(self._groups_file, new_groups_content) + + +class SearchRequest: + def __init__(self, template, *variables) -> None: + escaped_vars = [escape_filter_chars(var) for var in variables] + self.search_string = template % tuple(escaped_vars) + + def as_search_string(self): + return self.search_string + + def __str__(self) -> str: + return self.as_search_string() diff --git a/src/tests/auth_ldap_test.py b/src/tests/auth_ldap_test.py index 21bb7c13..bb81275e 100644 --- a/src/tests/auth_ldap_test.py +++ b/src/tests/auth_ldap_test.py @@ -143,6 +143,24 @@ def test_load_multiple_groups_by_uid_when_dn_template(self): groups = self.auth_and_get_groups('user1', auth_wrapper) self.assertCountEqual(['group1', 'group2', 'group3'], groups) + def test_load_multiple_groups_by_uid_when_not_all_match(self): + auth_wrapper = _LdapAuthenticatorMockWrapper('cn=$username,cn=Users,dc=ldap,dc=test', 'dc=ldap,dc=test') + auth_wrapper.add_posix_user('user1', '1234', 'uid_X') + auth_wrapper.add_posix_group('group1', ['uid_X']) + auth_wrapper.add_posix_group('group2', ['uid_123']) + auth_wrapper.add_posix_group('group3', ['uid_X']) + + groups = self.auth_and_get_groups('user1', auth_wrapper) + self.assertCountEqual(['group1', 'group3'], groups) + + def test_load_single_group_by_uid_when_dn_has_parenthesss(self): + auth_wrapper = _LdapAuthenticatorMockWrapper('cn=$username,cn=Users,dc=ldap,dc=test', 'dc=ldap,dc=test') + auth_wrapper.add_posix_user('user (1)', '1234', 'uid (X)') + auth_wrapper.add_posix_group('group1', ['uid (X)']) + + groups = self.auth_and_get_groups('user (1)', auth_wrapper) + self.assertCountEqual(['group1'], groups) + def test_load_multiple_groups_by_member_and_uid_when_dn_template(self): auth_wrapper = _LdapAuthenticatorMockWrapper('cn=$username,cn=Users,dc=ldap,dc=test', 'dc=ldap,dc=test') auth_wrapper.add_posix_user('user1', '1234', 'uid_X') @@ -169,6 +187,14 @@ def test_load_single_group_by_member_when_sam_account_template(self): groups = self.auth_and_get_groups('user1', auth_wrapper) self.assertEqual(['group1'], groups) + def test_load_single_group_by_member_when_sam_account_template_with_parentheses(self): + auth_wrapper = _LdapAuthenticatorMockWrapper('some_domain\\$username', 'dc=some_domain,dc=test') + auth_wrapper.add_user('User (Noname)', '1234', sAMAccountName='user (1)') + auth_wrapper.add_group('group1', ['User (Noname)']) + + groups = self.auth_and_get_groups('user (1)', auth_wrapper) + self.assertEqual(['group1'], groups) + def test_load_single_group_by_uid_when_sam_account_template(self): auth_wrapper = _LdapAuthenticatorMockWrapper('some_domain\\$username', 'dc=some_domain,dc=test') auth_wrapper.add_posix_user('User Noname', '1234', 'uid_X', sAMAccountName='user1') @@ -185,6 +211,14 @@ def test_load_single_group_by_member_when_user_principal_template(self): groups = self.auth_and_get_groups('user1', auth_wrapper) self.assertEqual(['group1'], groups) + def test_load_single_group_by_member_when_user_principal_template_with_parentheses(self): + auth_wrapper = _LdapAuthenticatorMockWrapper('$username@buggy.net', 'dc=buggy,dc=net') + auth_wrapper.add_user('User (Noname)', '1234', userPrincipalName='user (1)@buggy.net') + auth_wrapper.add_group('group1', ['User (Noname)']) + + groups = self.auth_and_get_groups('user (1)', auth_wrapper) + self.assertEqual(['group1'], groups) + def test_cannot_load_group_when_same_principal_names(self): auth_wrapper = _LdapAuthenticatorMockWrapper('$username@buggy.net', 'dc=buggy,dc=net') auth_wrapper.add_user('user1', '1234', userPrincipalName='userX@buggy.net') From 4cec0c48b5db49927103a9e7225616896e267f7a Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 10 Oct 2020 20:03:15 +0200 Subject: [PATCH 036/398] #331 added support for ip4 subnets in trusted_ips --- src/auth/identification.py | 9 ++++---- src/execution/logging.py | 4 ++-- src/model/__init__.py | 0 src/model/server_conf.py | 8 ++++--- src/model/trusted_ips.py | 19 ++++++++++++++++ src/tests/ip_idenfication_test.py | 35 +++++++++++++++-------------- src/tests/model/__init__.py | 0 src/tests/model/test_trusted_ips.py | 31 +++++++++++++++++++++++++ src/web/server.py | 2 +- 9 files changed, 81 insertions(+), 27 deletions(-) create mode 100644 src/model/__init__.py create mode 100644 src/model/trusted_ips.py create mode 100644 src/tests/model/__init__.py create mode 100644 src/tests/model/test_trusted_ips.py diff --git a/src/auth/identification.py b/src/auth/identification.py index 9fd64a4e..cba542a1 100644 --- a/src/auth/identification.py +++ b/src/auth/identification.py @@ -4,6 +4,7 @@ import tornado.websocket +from model.trusted_ips import TrustedIpValidator from utils import tornado_utils, date_utils, audit_utils from utils.date_utils import days_to_ms @@ -39,13 +40,13 @@ class IpBasedIdentification(Identification): COOKIE_KEY = 'client_id_token' EMPTY_TOKEN = (None, None) - def __init__(self, trusted_ips, user_header_name) -> None: - self._trusted_ips = set(trusted_ips) + def __init__(self, ip_validator: TrustedIpValidator, user_header_name) -> None: + self._ip_validator = ip_validator self._user_header_name = user_header_name def identify(self, request_handler): remote_ip = request_handler.request.remote_ip - new_trusted = remote_ip in self._trusted_ips + new_trusted = self._ip_validator.is_trusted(remote_ip) if new_trusted: if request_handler.get_cookie(self.COOKIE_KEY): @@ -77,7 +78,7 @@ def identify(self, request_handler): def identify_for_audit(self, request_handler): remote_ip = request_handler.request.remote_ip - if (remote_ip in self._trusted_ips) and (self._user_header_name): + if self._ip_validator.is_trusted(remote_ip) and (self._user_header_name): return request_handler.request.headers.get(self._user_header_name, None) return None diff --git a/src/execution/logging.py b/src/execution/logging.py index 7d00f2c6..b8b6a3ff 100644 --- a/src/execution/logging.py +++ b/src/execution/logging.py @@ -190,8 +190,8 @@ def find_history_entry(self, execution_id, user_id): LOGGER.warning('find_history_entry: cannot parse file for %s', execution_id) elif not self._can_access_entry(entry, user_id): - message = 'User ' + user_id + ' has not access to execution #' + str(execution_id) - LOGGER.warning('%s. Original user: %s', message, execution_id) + message = 'User ' + user_id + ' has no access to execution #' + str(execution_id) + LOGGER.warning('%s. Original user: %s', message, entry.user_id) raise AccessProhibitedException(message) return entry diff --git a/src/model/__init__.py b/src/model/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/model/server_conf.py b/src/model/server_conf.py index 88b2b2e7..c6b4eb54 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -6,6 +6,7 @@ from auth.authorization import ANY_USER from model import model_helper from model.model_helper import read_list, read_int_from_config, read_bool_from_config +from model.trusted_ips import TrustedIpValidator from utils.string_utils import strip LOGGER = logging.getLogger('server_conf') @@ -25,7 +26,7 @@ def __init__(self) -> None: self.admin_config = None self.title = None self.enable_script_titles = None - self.trusted_ips = [] + self.ip_validator = TrustedIpValidator([]) self.user_groups = None self.admin_users = [] self.full_history_users = [] @@ -113,11 +114,11 @@ def from_json(conf_path, temp_folder): def_admins = def_trusted_ips if access_config: - config.trusted_ips = strip(read_list(access_config, 'trusted_ips', default=def_trusted_ips)) + trusted_ips = strip(read_list(access_config, 'trusted_ips', default=def_trusted_ips)) admin_users = _parse_admin_users(access_config, default_admins=def_admins) full_history_users = _parse_history_users(access_config) else: - config.trusted_ips = def_trusted_ips + trusted_ips = def_trusted_ips admin_users = def_admins full_history_users = [] @@ -129,6 +130,7 @@ def from_json(conf_path, temp_folder): config.admin_users = admin_users config.full_history_users = full_history_users config.user_header_name = user_header_name + config.ip_validator = TrustedIpValidator(trusted_ips) config.max_request_size_mb = read_int_from_config('max_request_size', json_object, default=10) diff --git a/src/model/trusted_ips.py b/src/model/trusted_ips.py new file mode 100644 index 00000000..d4147abf --- /dev/null +++ b/src/model/trusted_ips.py @@ -0,0 +1,19 @@ +import ipaddress + + +class TrustedIpValidator: + def __init__(self, trusted_ips) -> None: + self._simple_ips = {ip for ip in trusted_ips if '/' not in ip} + self._networks = [ipaddress.ip_network(ip) for ip in trusted_ips if '/' in ip] + + def is_trusted(self, ip): + if ip in self._simple_ips: + return True + + if self._networks: + address = ipaddress.ip_address(ip) + for network in self._networks: + if address in network: + return True + + return False diff --git a/src/tests/ip_idenfication_test.py b/src/tests/ip_idenfication_test.py index 82958b72..52f66aed 100644 --- a/src/tests/ip_idenfication_test.py +++ b/src/tests/ip_idenfication_test.py @@ -2,6 +2,7 @@ from auth.identification import IpBasedIdentification from auth.tornado_auth import TornadoAuth +from model.trusted_ips import TrustedIpValidator from tests.test_utils import mock_object from utils import date_utils @@ -13,7 +14,7 @@ def mock_request_handler(ip=None, x_forwarded_for=None, x_real_ip=None, saved_to handler_mock.application = mock_object() handler_mock.application.auth = TornadoAuth(None) - handler_mock.application.identification = IpBasedIdentification(['127.0.0.1'], user_header_name) + handler_mock.application.identification = IpBasedIdentification(TrustedIpValidator(['127.0.0.1']), user_header_name) handler_mock.request = mock_object() handler_mock.request.headers = {} @@ -55,22 +56,22 @@ def clear_cookie(key): class IpIdentificationTest(unittest.TestCase): def test_localhost_ip_trusted_identification(self): - identification = IpBasedIdentification(['127.0.0.1'], None) + identification = IpBasedIdentification(TrustedIpValidator(['127.0.0.1']), None) id = identification.identify(mock_request_handler(ip='127.0.0.1')) self.assertEqual('127.0.0.1', id) def test_some_ip_trusted_identification(self): - identification = IpBasedIdentification(['192.168.21.13'], None) + identification = IpBasedIdentification(TrustedIpValidator(['192.168.21.13']), None) id = identification.identify(mock_request_handler(ip='192.168.21.13')) self.assertEqual('192.168.21.13', id) def test_ip_untrusted_identification(self): - identification = IpBasedIdentification([], None) + identification = IpBasedIdentification(TrustedIpValidator([]), None) id = identification.identify(mock_request_handler(ip='192.168.21.13')) self.assertNotEqual('192.168.21.13', id) def test_ip_untrusted_identification_for_different_connections(self): - identification = IpBasedIdentification([], None) + identification = IpBasedIdentification(TrustedIpValidator([]), None) ids = set() for _ in range(0, 100): @@ -79,7 +80,7 @@ def test_ip_untrusted_identification_for_different_connections(self): self.assertEqual(100, len(ids)) def test_ip_untrusted_identification_same_connection(self): - identification = IpBasedIdentification([], None) + identification = IpBasedIdentification(TrustedIpValidator([]), None) request_handler = mock_request_handler(ip='192.168.21.13') id1 = identification.identify(request_handler) @@ -87,14 +88,14 @@ def test_ip_untrusted_identification_same_connection(self): self.assertEqual(id1, id2) def test_proxied_ip_behind_trusted(self): - identification = IpBasedIdentification(['127.0.0.1'], None) + identification = IpBasedIdentification(TrustedIpValidator(['127.0.0.1']), None) request_handler = mock_request_handler(ip='127.0.0.1', x_forwarded_for='192.168.21.13') id = identification.identify(request_handler) self.assertEqual('192.168.21.13', id) def test_proxied_ip_behind_untrusted(self): - identification = IpBasedIdentification([], None) + identification = IpBasedIdentification(TrustedIpValidator([]), None) request_handler = mock_request_handler(ip='127.0.0.1', x_forwarded_for='192.168.21.13') id = identification.identify(request_handler) @@ -104,9 +105,9 @@ def test_proxied_ip_behind_untrusted(self): def test_change_to_trusted(self): request_handler = mock_request_handler(ip='192.168.21.13') - old_id = IpBasedIdentification([], None).identify(request_handler) + old_id = IpBasedIdentification(TrustedIpValidator([]), None).identify(request_handler) - trusted_identification = IpBasedIdentification(['192.168.21.13'], None) + trusted_identification = IpBasedIdentification(TrustedIpValidator(['192.168.21.13']), None) new_id = trusted_identification.identify(request_handler) self.assertNotEqual(old_id, new_id) @@ -116,10 +117,10 @@ def test_change_to_trusted(self): def test_change_to_untrusted(self): request_handler = mock_request_handler(ip='192.168.21.13') - trusted_identification = IpBasedIdentification(['192.168.21.13'], None) + trusted_identification = IpBasedIdentification(TrustedIpValidator(['192.168.21.13']), None) old_id = trusted_identification.identify(request_handler) - new_id = IpBasedIdentification([], None).identify(request_handler) + new_id = IpBasedIdentification(TrustedIpValidator([]), None).identify(request_handler) self.assertNotEqual(old_id, new_id) self.assertNotEqual(new_id, '192.168.21.13') @@ -128,7 +129,7 @@ def test_change_to_untrusted(self): def test_no_cookie_change_for_same_user(self): request_handler = mock_request_handler(ip='192.168.21.13') - identification = IpBasedIdentification([], None) + identification = IpBasedIdentification(TrustedIpValidator([]), None) identification.identify(request_handler) cookie1 = request_handler.get_cookie(COOKIE_KEY) @@ -140,7 +141,7 @@ def test_no_cookie_change_for_same_user(self): def test_refresh_old_cookie_with_same_id(self): request_handler = mock_request_handler(ip='192.168.21.13') - identification = IpBasedIdentification([], None) + identification = IpBasedIdentification(TrustedIpValidator([]), None) id = '1234567' token_expiry = str(date_utils.get_current_millis() + date_utils.days_to_ms(2)) @@ -157,7 +158,7 @@ def test_broken_token_structure(self): request_handler = mock_request_handler(ip='192.168.21.13') request_handler.set_secure_cookie(COOKIE_KEY, 'something') - IpBasedIdentification([], None).identify(request_handler) + IpBasedIdentification(TrustedIpValidator([]), None).identify(request_handler) new_token = request_handler.get_cookie(COOKIE_KEY) @@ -167,7 +168,7 @@ def test_broken_token_timestamp(self): request_handler = mock_request_handler(ip='192.168.21.13') request_handler.set_secure_cookie(COOKIE_KEY, 'something&hello') - id = IpBasedIdentification([], None).identify(request_handler) + id = IpBasedIdentification(TrustedIpValidator([]), None).identify(request_handler) new_token = request_handler.get_cookie(COOKIE_KEY) @@ -178,7 +179,7 @@ def test_old_token_timestamp(self): request_handler = mock_request_handler(ip='192.168.21.13') request_handler.set_secure_cookie(COOKIE_KEY, 'something&100000') - id = IpBasedIdentification([], None).identify(request_handler) + id = IpBasedIdentification(TrustedIpValidator([]), None).identify(request_handler) new_token = request_handler.get_cookie(COOKIE_KEY) diff --git a/src/tests/model/__init__.py b/src/tests/model/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/tests/model/test_trusted_ips.py b/src/tests/model/test_trusted_ips.py new file mode 100644 index 00000000..d94000c5 --- /dev/null +++ b/src/tests/model/test_trusted_ips.py @@ -0,0 +1,31 @@ +from unittest import TestCase + +from parameterized import parameterized + +from model.trusted_ips import TrustedIpValidator + + +class TestTrustedIpValidator(TestCase): + @parameterized.expand([ + (['192.168.0.15'], '192.168.0.15', True), + (['192.168.0.0'], '192.168.0.15', False), + (['192.168.0.1'], '192.168.0.15', False), + (['127.0.0.1'], '127.0.0.1', True), + (['::1'], '::1', True), + (['192.168.0.15/32'], '192.168.0.15', True), + (['192.168.0.16/32'], '192.168.0.15', False), + (['192.168.0.14/31'], '192.168.0.15', True), + (['192.168.0.16/31'], '192.168.0.15', False), + (['192.168.0.0/28'], '192.168.0.15', True), + (['192.168.0.0/28'], '192.168.0.15', True), + (['192.168.0.0/28'], '192.168.0.16', False), + (['192.168.0.16/28'], '192.168.0.15', False), + (['192.168.0.16/28'], '192.168.0.16', True), + (['192.168.0.0/24'], '192.168.0.16', True), + (['192.168.0.0/16'], '192.168.32.127', True), + ]) + def test_is_trusted(self, configured_ips, user_ip, expected_result): + validator = TrustedIpValidator(configured_ips) + trusted = validator.is_trusted(user_ip) + self.assertEqual(expected_result, trusted, user_ip + ' is trusted=' + str(trusted) + + ' but should be ' + str(expected_result) + ' for ' + str(configured_ips)) diff --git a/src/web/server.py b/src/web/server.py index 682385d3..c7707e85 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -931,7 +931,7 @@ def init(server_config: ServerConfig, if auth.is_enabled(): identification = AuthBasedIdentification(auth) else: - identification = IpBasedIdentification(server_config.trusted_ips, server_config.user_header_name) + identification = IpBasedIdentification(server_config.ip_validator, server_config.user_header_name) downloads_folder = file_download_feature.get_result_files_folder() From 5accf80335987836c30cbe3b73210cd4969ad261 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sun, 1 Nov 2020 13:03:57 +0100 Subject: [PATCH 037/398] #342 added admin_users to script configuration --- src/config/config_service.py | 16 ++++- src/model/script_config.py | 17 ++++- src/tests/config_service_test.py | 49 +++++++++++++ src/web/server.py | 11 ++- .../scripts-config/ScriptConfigForm.vue | 72 ++++++++++++++----- .../scripts-config/script-fields.js | 3 + web-src/tests/unit/admin/ScriptConfig_test.js | 24 +++++++ 7 files changed, 172 insertions(+), 20 deletions(-) diff --git a/src/config/config_service.py b/src/config/config_service.py index 9ceb1c5a..07dce49b 100644 --- a/src/config/config_service.py +++ b/src/config/config_service.py @@ -49,6 +49,9 @@ def load_config(self, name, user): if config_object.get('name') is None: config_object['name'] = short_config.name + if not self._can_edit_script(user, short_config): + raise ConfigNotAllowedException(str(user) + ' has no admin access to ' + short_config.name) + return {'config': config_object, 'filename': os.path.basename(path)} def create_config(self, user, config): @@ -84,6 +87,9 @@ def update_config(self, user, config, filename): if (found_config_path is not None) and (os.path.basename(found_config_path) != filename): raise InvalidConfigException('Another script found with the same name: ' + name) + if (short_config is not None) and not self._can_edit_script(user, short_config): + raise ConfigNotAllowedException(str(user) + ' is not allowed to modify ' + short_config.name) + LOGGER.info('Updating script config "' + name + '" in ' + original_file_path) self._save_config(config, original_file_path) @@ -107,6 +113,9 @@ def load_script(path, content): if short_config is None: return None + if edit_mode and (not conf_service._can_edit_script(user, short_config)): + return None + if (not edit_mode) and (not conf_service._can_access_script(user, short_config)): return None @@ -196,14 +205,17 @@ def _load_script_config(self, path, content_or_json_dict, user, parameter_values def _can_access_script(self, user, short_config): return self._authorizer.is_allowed(user.user_id, short_config.allowed_users) + def _can_edit_script(self, user, short_config): + return self._authorizer.is_allowed(user.user_id, short_config.admin_users) + def _check_admin_access(self, user): if not self._authorizer.is_admin(user.user_id): raise AdminAccessRequiredException('Admin access to scripts is prohibited for ' + str(user)) class ConfigNotAllowedException(Exception): - def __init__(self): - pass + def __init__(self, message=None): + super().__init__(message) class AdminAccessRequiredException(Exception): diff --git a/src/model/script_config.py b/src/model/script_config.py index dfeb3b25..fd16a947 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -20,6 +20,7 @@ class ShortConfig(object): def __init__(self): self.name = None self.allowed_users = [] + self.admin_users = [] self.group = None @@ -256,6 +257,7 @@ def read_short(file_path, json_object): config.name = _read_name(file_path, json_object) config.allowed_users = json_object.get('allowed_users') + config.admin_users = json_object.get('admin_users') config.group = read_str_from_config(json_object, 'group', blank_to_none=True) hidden = read_bool_from_config('hidden', json_object, default=False) @@ -267,6 +269,11 @@ def read_short(file_path, json_object): elif (config.allowed_users == '*') or ('*' in config.allowed_users): config.allowed_users = ANY_USER + if config.admin_users is None: + config.admin_users = ANY_USER + elif (config.admin_users == '*') or ('*' in config.admin_users): + config.admin_users = ANY_USER + return config @@ -352,7 +359,15 @@ def get(self): def get_sorted_config(config): - key_order = ['name', 'script_path', 'working_directory', 'hidden', 'description', 'allowed_users', 'include', + key_order = ['name', 'script_path', + 'working_directory', + 'hidden', + 'description', + 'group', + 'allowed_users', + 'admin_users', + 'schedulable', + 'include', 'output_files', 'requires_terminal', 'bash_formatting', 'parameters'] def get_order(key): diff --git a/src/tests/config_service_test.py b/src/tests/config_service_test.py index 29ddc9ef..dfae4d14 100644 --- a/src/tests/config_service_test.py +++ b/src/tests/config_service_test.py @@ -138,6 +138,12 @@ def test_list_configs_when_edit_mode_and_admin_without_allowance(self): self.assert_list_config_names(self.admin_user, ['a1', 'c2'], mode='edit') + def test_list_configs_when_edit_mode_and_admin_not_in_admin_users(self): + _create_script_config_file('a1', admin_users=['user1']) + _create_script_config_file('c2', admin_users=['adm_user']) + + self.assert_list_config_names(self.admin_user, ['c2'], mode='edit') + def test_list_configs_when_edit_mode_and_non_admin(self): _create_script_config_file('a1', allowed_users=['user1']) _create_script_config_file('c2', allowed_users=['user1']) @@ -266,6 +272,14 @@ def test_insert_sorted_values(self): ('requires_terminal', False), ('parameters', [{'name': 'param1'}])])) + def test_create_config_with_admin_users(self): + config = _prepare_script_config_object('conf1', + description='My wonderful test config', + admin_users=['another_user']) + self.config_service.create_config(self.admin_user, config) + + _validate_config(self, 'conf1.json', config) + class ConfigServiceUpdateConfigTest(unittest.TestCase): @@ -375,6 +389,32 @@ def test_update_sorted_values(self): ('parameters', [{'name': 'param1'}])]) _validate_config(self, 'confX.json', body) + def test_update_config_allowed_admin_user(self): + config = _prepare_script_config_object('Conf X', + description='My wonderful test config', + admin_users=['admin_user']) + self.config_service.update_config(self.admin_user, config, 'confX.json') + + new_config = _prepare_script_config_object('Conf X', + description='New desc') + self.config_service.update_config(self.admin_user, new_config, 'confX.json') + + _validate_config(self, 'confX.json', new_config) + + def test_update_config_different_admin_user(self): + config = _prepare_script_config_object('Conf X', + description='My wonderful test config', + admin_users=['another_user']) + self.config_service.update_config(self.admin_user, config, 'confX.json') + + new_config = _prepare_script_config_object('Conf X', + description='New desc', + admin_users=['admin_user']) + self.assertRaisesRegex(ConfigNotAllowedException, 'is not allowed to modify', + self.config_service.update_config, self.admin_user, new_config, 'confX.json') + + _validate_config(self, 'confX.json', config) + class ConfigServiceLoadConfigForAdminTest(unittest.TestCase): def setUp(self): @@ -413,6 +453,15 @@ def test_load_config_when_not_exists(self): config = self.config_service.load_config('ConfX', self.admin_user) self.assertIsNone(config) + def test_load_config_when_script_has_admin_users(self): + _create_script_config_file('ConfX', admin_users=['admin_user']) + config = self.config_service.load_config('ConfX', self.admin_user) + self.assertEqual(config['filename'], 'ConfX.json') + + def test_load_config_when_script_has_different_admin_users(self): + _create_script_config_file('ConfX', admin_users=['admin_user2']) + self.assertRaises(ConfigNotAllowedException, self.config_service.load_config, 'ConfX', self.admin_user) + def _create_script_config_file(filename, *, name=None, **kwargs): conf_folder = os.path.join(test_utils.temp_folder, 'runners') diff --git a/src/web/server.py b/src/web/server.py index c7707e85..684f7b03 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -241,13 +241,22 @@ def put(self, user): self.application.config_service.update_config(user, config, filename) except (InvalidConfigException, InvalidFileException) as e: raise tornado.web.HTTPError(422, str(e)) + except ConfigNotAllowedException: + LOGGER.warning('Admin access to the script "' + config['name'] + '" is denied for ' + user.get_audit_name()) + respond_error(self, 403, 'Access to the script is denied') + return class AdminGetScriptEndpoint(BaseRequestHandler): @requires_admin_rights @inject_user def get(self, user, script_name): - config = self.application.config_service.load_config(script_name, user) + try: + config = self.application.config_service.load_config(script_name, user) + except ConfigNotAllowedException: + LOGGER.warning('Admin access to the script "' + script_name + '" is denied for ' + user.get_audit_name()) + respond_error(self, 403, 'Access to the script is denied') + return if config is None: raise tornado.web.HTTPError(404, str('Failed to find config for name: ' + script_name)) diff --git a/web-src/src/admin/components/scripts-config/ScriptConfigForm.vue b/web-src/src/admin/components/scripts-config/ScriptConfigForm.vue index d6c27d49..4f3578bc 100644 --- a/web-src/src/admin/components/scripts-config/ScriptConfigForm.vue +++ b/web-src/src/admin/components/scripts-config/ScriptConfigForm.vue @@ -21,6 +21,16 @@ v-model="allowAllUsers"/> +
    +
    + + +
    + + +
    +
    u !== '*'); - this.allowAllUsers = isNull(config['allowed_users']) || allowedUsers.includes('*'); + this.updateAccessFieldInVm(config, + 'allowedUsers', + 'allowAllUsers', + 'allowed_users') + + this.updateAccessFieldInVm(config, + 'adminUsers', + 'allowAllAdmins', + 'admin_users') } }, allowAllUsers() { @@ -133,25 +150,48 @@ }, allowedUsers() { this.updateAllowedUsers(); + }, + allowAllAdmins() { + this.updateAdminUsers(); + }, + adminUsers() { + this.updateAdminUsers(); } }, methods: { updateAllowedUsers() { - if (this.allowAllUsers) { - if (isEmptyArray(this.allowedUsers)) { - this.$delete(this.value, 'allowed_users'); - } else { - if (this.allowedUsers.includes('*')) { - this.value['allowed_users'] = this.allowedUsers; + this.updateAccessFieldInValue(this.allowAllUsers, 'allowedUsers', 'allowed_users'); + }, + updateAdminUsers() { + this.updateAccessFieldInValue(this.allowAllAdmins, 'adminUsers', 'admin_users'); + }, + updateAccessFieldInValue(allowAll, vmPropertyName, valuePropertyName) { + const newValue = this[vmPropertyName]; + + if (isEmptyArray(newValue)) { + this.$delete(this.value, valuePropertyName); + } else { + if (allowAll) { + if (newValue.includes('*')) { + this.value[valuePropertyName] = newValue; } else { - this.value['allowed_users'] = [...this.allowedUsers, '*']; + this.value[valuePropertyName] = [...newValue, '*']; } + } else { + this.value[valuePropertyName] = newValue; } - } else { - this.value['allowed_users'] = this.allowedUsers; } - } + }, + updateAccessFieldInVm(config, vmPropertyName, vmAllowAllPropertyName, valuePropertyName) { + let users = get(config, valuePropertyName); + if (isNull(users)) { + users = []; + } + this[vmPropertyName] = users.filter(u => u !== '*'); + this[vmAllowAllPropertyName] = isNull(config[valuePropertyName]) || users.includes('*'); + }, + } } diff --git a/web-src/src/admin/components/scripts-config/script-fields.js b/web-src/src/admin/components/scripts-config/script-fields.js index ecab72c9..3aa9b1da 100644 --- a/web-src/src/admin/components/scripts-config/script-fields.js +++ b/web-src/src/admin/components/scripts-config/script-fields.js @@ -19,6 +19,9 @@ export const workDirField = { export const allowAllField = { name: 'Allow all' }; +export const allowAllAdminsField = { + name: 'Any admin' +}; export const bashFormattingField = { name: 'Bash formatting', description: 'Enable ANSI escape sequences for text formatting and cursor moves' diff --git a/web-src/tests/unit/admin/ScriptConfig_test.js b/web-src/tests/unit/admin/ScriptConfig_test.js index 37155504..8aaaefac 100644 --- a/web-src/tests/unit/admin/ScriptConfig_test.js +++ b/web-src/tests/unit/admin/ScriptConfig_test.js @@ -100,4 +100,28 @@ describe('Test ScriptConfig', function () { }); }); + describe('Test edit allowed_users', function () { + it('Test edit allowed_users manually', async function () { + await _setValueByUser('Allow all', false); + await _setValueByUser('Allowed users', ['user A', 'user B']); + + expect(store.state.scriptConfig.scriptConfig.allowed_users).toEqual(['user A', 'user B']) + }); + }); + + describe('Test edit admin_users', function () { + it('Test edit admin_users manually', async function () { + await _setValueByUser('Any admin', false); + await _setValueByUser('Admin users', ['user A', 'user B']); + + expect(store.state.scriptConfig.scriptConfig.admin_users).toEqual(['user A', 'user B']) + }); + + it('Test set any admin = false without any user, manually', async function () { + await _setValueByUser('Any admin', false); + + expect(store.state.scriptConfig.scriptConfig.admin_users).toBeNil() + }); + }); + }); \ No newline at end of file From 6379e615dae050ee5d8617379dddc674ba6a5212 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Mon, 2 Nov 2020 14:11:37 +0100 Subject: [PATCH 038/398] preparing 1.16 release --- web-src/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web-src/package.json b/web-src/package.json index a92d9d7f..e40db3b4 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -1,6 +1,6 @@ { "name": "script-server", - "version": "1.15.0", + "version": "1.16.0", "private": true, "dependencies": { "axios": "^0.19.2", From 85f923c3c055a89ea6fc8be9327021f96bb92b64 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Wed, 18 Nov 2020 22:47:36 +0100 Subject: [PATCH 039/398] #367 made user authorization case-insensitive --- src/auth/authorization.py | 45 ++++++++++++++++++++++++--------- src/tests/authorization_test.py | 20 +++++++++++++++ 2 files changed, 53 insertions(+), 12 deletions(-) diff --git a/src/auth/authorization.py b/src/auth/authorization.py index 16831273..d8c0bf46 100644 --- a/src/auth/authorization.py +++ b/src/auth/authorization.py @@ -5,31 +5,52 @@ GROUP_PREFIX = '@' +def _normalize_user(user): + if user: + return user.lower() + return user + + +def _normalize_users(allowed_users): + if isinstance(allowed_users, list): + if ANY_USER in allowed_users: + return ANY_USER + + return [_normalize_user(user) for user in allowed_users] + + return allowed_users + + class Authorizer: def __init__(self, app_allowed_users, admin_users, full_history_users, groups_provider): - self._app_allowed_users = app_allowed_users - self._admin_users = admin_users - self._full_history_users = full_history_users + self._app_allowed_users = _normalize_users(app_allowed_users) + self._admin_users = _normalize_users(admin_users) + self._full_history_users = _normalize_users(full_history_users) self._groups_provider = groups_provider def is_allowed_in_app(self, user_id): - return self.is_allowed(user_id, self._app_allowed_users) + return self._is_allowed_internal(user_id, self._app_allowed_users) def is_admin(self, user_id): - return self.is_allowed(user_id, self._admin_users) + return self._is_allowed_internal(user_id, self._admin_users) def has_full_history_access(self, user_id): - return self.is_admin(user_id) or self.is_allowed(user_id, self._full_history_users) + return self.is_admin(user_id) or self._is_allowed_internal(user_id, self._full_history_users) def is_allowed(self, user_id, allowed_users): - if not allowed_users: + normalized_users = _normalize_users(allowed_users) + + return self._is_allowed_internal(user_id, normalized_users) + + def _is_allowed_internal(self, user_id, normalized_allowed_users): + if not normalized_allowed_users: return False - if (allowed_users == ANY_USER) or (ANY_USER in allowed_users): + if normalized_allowed_users == ANY_USER: return True - if user_id in allowed_users: + if _normalize_user(user_id) in normalized_allowed_users: return True user_groups = self._groups_provider.get_groups(user_id) @@ -37,7 +58,7 @@ def is_allowed(self, user_id, allowed_users): return False for group in user_groups: - if (GROUP_PREFIX + group) in allowed_users: + if _normalize_user(GROUP_PREFIX + group) in normalized_allowed_users: return True return False @@ -92,10 +113,10 @@ def __init__(self, groups) -> None: if member.startswith(GROUP_PREFIX): self._lazy_group_parents[member[1:]].append(group) else: - self._user_groups[member].append(group) + self._user_groups[_normalize_user(member)].append(group) def get_groups(self, user, known_groups=None): - user_groups = set(self._user_groups[user]) + user_groups = set(self._user_groups[_normalize_user(user)]) if known_groups: for known_group in known_groups: diff --git a/src/tests/authorization_test.py b/src/tests/authorization_test.py index eec1f8ce..04a9c4ba 100644 --- a/src/tests/authorization_test.py +++ b/src/tests/authorization_test.py @@ -9,6 +9,9 @@ class TestIsAllowed(unittest.TestCase): def test_allowed_from_single_user(self): self.assertTrue(self.authorizer.is_allowed('user1', ['user1'])) + def test_allowed_from_single_user_ignore_case(self): + self.assertTrue(self.authorizer.is_allowed('USer1', ['usER1'])) + def test_not_allowed_from_single_user(self): self.assertFalse(self.authorizer.is_allowed('user1', ['user2'])) @@ -22,6 +25,10 @@ def test_allowed_from_single_group(self): self.user_groups['user1'] = ['group1'] self.assertTrue(self.authorizer.is_allowed('user1', ['@group1'])) + def test_allowed_from_single_group_ignore_case(self): + self.user_groups['user1'] = ['Group1'] + self.assertTrue(self.authorizer.is_allowed('user1', ['@groUP1'])) + def test_not_allowed_from_single_group_invalid_name_in_provider(self): self.user_groups['user1'] = ['@group1'] self.assertFalse(self.authorizer.is_allowed('user1', ['@group1'])) @@ -66,6 +73,9 @@ class TestIsAllowedInApp(unittest.TestCase): def test_single_user_allowed(self): self.assertAllowed('user1', ['user1'], True) + def test_single_user_allowed_ignore_case(self): + self.assertAllowed('User1', ['uSEr1'], True) + def test_multiple_users_allowed(self): self.assertAllowed('user2', ['user1', 'user2', 'user3'], True) @@ -98,6 +108,9 @@ class TestIsAdmin(unittest.TestCase): def test_single_admin_allowed(self): self.assertAdmin('admin1', ['admin1'], True) + def test_single_admin_allowed_ignore_case(self): + self.assertAdmin('adMin1', ['AdmiN1'], True) + def test_multiple_admins_allowed(self): self.assertAdmin('admin2', ['admin1', 'admin2', 'admin3'], True) @@ -130,6 +143,9 @@ class TestHistoryAccess(unittest.TestCase): def test_user_in_the_list(self): self.assert_has_access('user1', [], ['user1'], True) + def test_user_in_the_list_ignore_case(self): + self.assert_has_access('useR1', [], ['UsEr1'], True) + def test_any_user_allowed(self): self.assert_has_access('user2', [], [ANY_USER], True) @@ -163,6 +179,10 @@ def test_single_user_in_single_group(self): provider = PreconfiguredGroupProvider({'group1': ['user1']}) self.assertCountEqual(provider.get_groups('user1'), ['group1']) + def test_single_user_in_single_group_ignore_case(self): + provider = PreconfiguredGroupProvider({'group1': ['USER1']}) + self.assertCountEqual(provider.get_groups('User1'), ['group1']) + def test_two_users_in_different_groups(self): provider = PreconfiguredGroupProvider( {'group1': ['user1'], From a199e29908885eca25d0d8b3bb4556605c65ee2d Mon Sep 17 00:00:00 2001 From: Danny Rehelis Date: Fri, 11 Dec 2020 13:24:21 +0200 Subject: [PATCH 040/398] review fixes --- src/model/external_model.py | 1 + src/model/parameter_config.py | 7 ++++++- .../components/scripts-config/ParameterConfigForm.vue | 4 ++++ .../components/scripts-config/parameter-fields.js | 5 +++++ web-src/src/common/components/textfield.vue | 10 ++++++++-- 5 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/model/external_model.py b/src/model/external_model.py index 2287e100..51da173f 100644 --- a/src/model/external_model.py +++ b/src/model/external_model.py @@ -41,6 +41,7 @@ def parameter_to_external(parameter): 'type': parameter.type, 'min': parameter.min, 'max': parameter.max, + 'max_length': parameter.max_length, 'values': parameter.values, 'secure': parameter.secure, 'fileRecursive': parameter.file_recursive, diff --git a/src/model/parameter_config.py b/src/model/parameter_config.py index 34b0525e..6caf2db5 100644 --- a/src/model/parameter_config.py +++ b/src/model/parameter_config.py @@ -27,6 +27,7 @@ 'type', 'min', 'max', + 'max_length', 'constant', '_values_provider', 'values', @@ -71,6 +72,7 @@ def _reload(self): self.required = read_bool_from_config('required', config, default=False) self.min = config.get('min') self.max = config.get('max') + self.max_length = config.get('max_length') self.secure = read_bool_from_config('secure', config, default=False) self.separator = config.get('separator', ',') self.multiple_arguments = read_bool_from_config('multiple_arguments', config, default=False) @@ -277,6 +279,9 @@ def validate_value(self, value, *, ignore_required=False): return None if self.type == 'text': + if (not is_empty(self.max_length)) and (len(value) > int(self.max_length)): + return 'is longer than allowed char length (' \ + + str(len(value)) + ' > ' + str(self.max_length) + ')' return None if self.type == 'file_upload': @@ -291,7 +296,7 @@ def validate_value(self, value, *, ignore_required=False): int_value = int(value) if (not is_empty(self.max)) and (int_value > int(self.max)): - return 'is greater than allowed value (' \ + return 'is longer than allowed value (' \ + value_string + ' > ' + str(self.max) + ')' if (not is_empty(self.min)) and (int_value < int(self.min)): diff --git a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue index 83c7aa44..9eff9623 100644 --- a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue +++ b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue @@ -134,6 +134,7 @@ secure: 'secure', min: 'min', max: 'max', + max_length: 'max_length', multipleArguments: 'multiple_arguments', sameArgParam: 'same_arg_param', separator: 'separator', @@ -169,6 +170,7 @@ description: null, min: null, max: null, + max_length: null, allowedValues: null, allowedValuesScript: null, allowedValuesFromScript: null, @@ -193,6 +195,7 @@ descriptionField, minField, maxField: Object.assign({}, maxField), + maxLengthField, allowedValuesScriptField, allowedValuesFromScriptField, defaultValueField: Object.assign({}, defaultValueField), @@ -221,6 +224,7 @@ this.required = get(config, 'required', false); this.min = config['min']; this.max = config['max']; + this.max_length = config['max_length']; this.constant = !!get(config, 'constant', false); this.secure = !!get(config, 'secure', false); this.multipleArguments = !!get(config, 'multiple_arguments', false); diff --git a/web-src/src/admin/components/scripts-config/parameter-fields.js b/web-src/src/admin/components/scripts-config/parameter-fields.js index 087a9351..b82a7a86 100644 --- a/web-src/src/admin/components/scripts-config/parameter-fields.js +++ b/web-src/src/admin/components/scripts-config/parameter-fields.js @@ -67,6 +67,11 @@ export const allowedValuesScriptField = { required: true }; +export const maxLengthField = { + name: 'Max Characters', + type: 'int' +}; + export const allowedValuesFromScriptField = { name: 'Load from script', withoutValue: true, diff --git a/web-src/src/common/components/textfield.vue b/web-src/src/common/components/textfield.vue index 9fcded86..83a8f341 100644 --- a/web-src/src/common/components/textfield.vue +++ b/web-src/src/common/components/textfield.vue @@ -121,7 +121,7 @@ } if (!empty) { - var typeError = getValidByTypeError(value, this.config.type, this.config.min, this.config.max); + var typeError = getValidByTypeError(value, this.config.type, this.config.min, this.config.max, this.config.max_length); if (!isEmptyString(typeError)) { return typeError; } @@ -149,7 +149,13 @@ } } - function getValidByTypeError(value, type, min, max) { + function getValidByTypeError(value, type, min, max, max_length) { + if (type === 'text') { + if (value.length > max_length) { + return 'Max chars allowed: ' + max_length + } + } + if (type === 'int') { const isInteger = /^(((-?[1-9])(\d*))|0)$/.test(value); if (!isInteger) { From 00c455ebda5f1ab1374fffc4baf783d3e5a0f221 Mon Sep 17 00:00:00 2001 From: Danny Rehelis Date: Sat, 19 Dec 2020 13:43:45 +0200 Subject: [PATCH 041/398] check if max_length is set to apply validation --- web-src/src/common/components/textfield.vue | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/web-src/src/common/components/textfield.vue b/web-src/src/common/components/textfield.vue index 83a8f341..2800f582 100644 --- a/web-src/src/common/components/textfield.vue +++ b/web-src/src/common/components/textfield.vue @@ -151,8 +151,10 @@ function getValidByTypeError(value, type, min, max, max_length) { if (type === 'text') { - if (value.length > max_length) { - return 'Max chars allowed: ' + max_length + if (max_length) { + if (value.length > max_length) { + return 'Max chars allowed: ' + max_length + } } } From ee0ff7c520f547c3679eecfe571e60a6701d91b9 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 19 Dec 2020 20:24:23 +0100 Subject: [PATCH 042/398] #375 fixed missing import --- .../scripts-config/ParameterConfigForm.vue | 79 ++++++++++--------- 1 file changed, 40 insertions(+), 39 deletions(-) diff --git a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue index 9eff9623..1ec412cb 100644 --- a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue +++ b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue @@ -71,47 +71,48 @@ diff --git a/web-src/src/admin/AdminApp.vue b/web-src/src/admin/AdminApp.vue index f2ca7fcf..3c381d42 100644 --- a/web-src/src/admin/AdminApp.vue +++ b/web-src/src/admin/AdminApp.vue @@ -1,147 +1,139 @@ - - \ No newline at end of file diff --git a/web-src/src/admin/components/scripts-config/ParamListItem.vue b/web-src/src/admin/components/scripts-config/ParamListItem.vue index 651b9df7..84f35266 100644 --- a/web-src/src/admin/components/scripts-config/ParamListItem.vue +++ b/web-src/src/admin/components/scripts-config/ParamListItem.vue @@ -1,50 +1,50 @@ \ No newline at end of file diff --git a/web-src/src/admin/components/scripts-config/ScriptParamList.vue b/web-src/src/admin/components/scripts-config/ScriptParamList.vue index 1198929b..441b16eb 100644 --- a/web-src/src/admin/components/scripts-config/ScriptParamList.vue +++ b/web-src/src/admin/components/scripts-config/ScriptParamList.vue @@ -13,32 +13,32 @@ \ No newline at end of file diff --git a/web-src/src/assets/css/admin.css b/web-src/src/assets/css/admin.css deleted file mode 100644 index 3572eef9..00000000 --- a/web-src/src/assets/css/admin.css +++ /dev/null @@ -1,13 +0,0 @@ -.input-field:after { - content: attr(data-error); - color: #F44336; - position: absolute; - left: 0.9rem; - bottom: -0.9rem; - font-size: 0.9rem; -} - -#toast-container { - right: 5%; - left: unset; -} diff --git a/web-src/src/assets/css/color_variables.scss b/web-src/src/assets/css/color_variables.scss index d8a15503..e3fc2945 100644 --- a/web-src/src/assets/css/color_variables.scss +++ b/web-src/src/assets/css/color_variables.scss @@ -21,11 +21,70 @@ $orange: ( // override materialize css colors, because there are too many of them, and they are not used $colors: ( - "materialize-red": $materialize-red, - "red": $red, - "teal": $teal, - "orange": $orange, - "grey": $grey, - "green": $green, - "light-blue": $light-blue -); \ No newline at end of file + "materialize-red": $materialize-red, + "red": $red, + "teal": $teal, + "orange": $orange, + "grey": $grey, + "green": $green, + "light-blue": $light-blue +); + +$bg-color: var(--background-color); +$bg-hover-color-opaque: var(--hover-color); +$bg-focus-color-opaque: var(--focus-color); +$bg-hover-color-solid: var(--hover-color); +$bg-focus-color-solid: var(--focus-color); +$secondary-color: var(--primary-color); +$secondary-color-when-hovered-solid: var(--primary-color-raised-hover-solid); +$secondary-color-when-focused-solid: var(--primary-color-raised-focus-solid); +$button-raised-color: var(--font-on-primary-color-main); +$button-disabled-background: var(--background-color-disabled); +$button-disabled-color: var(--font-color-disabled); +$button-flat-color: var(--font-color-main); +$button-flat-disabled-color: var(--font-color-disabled); +$button-floating-color: var(--font-on-primary-color-main); +$card-bg-color: var(--background-color); +$datepicker-weekday-bg: #00FF00; +$datepicker-calendar-header-color: var(--font-color-medium); +$datepicker-selected-outfocus: #00FF00; +$datepicker-day-focus: var(--focus-color); +$datepicker-year: var(--font-on-primary-color-medium); +$datepicker-disabled-day-color: var(--font-color-disabled); + +$switch-track-checked-bg: var(--primary-color-dark-color); +$collection-active-color: var(--primary-color); +$collection-active-bg-color: var(--focus-color); +$collection-bg-color: transparent; +$collection-hover-bg-color: var(--hover-color); +$collection-border-color: var(--separator-color); +$progress-bar-track-color: transparent; +$off-black: var(--font-color-main); + +$chip-selected-color: var(--primary-color); +$chip-bg-color: var(--background-color-high-emphasis); +$chip-border-color: var(--font-color-medium); + +$select-background: var(--background-color-level-16dp); +$select-focus: transparent; +$select-option-hover: var(--hover-color); +$select-option-focus: var(--focus-color); +$select-option-selected: var(--focus-color); +$select-disabled-color: var(--font-color-disabled); + +$collapsible-header-color: var(--background-color-level-8dp); +$collapsible-border-color: var(--separator-color); + +$input-border-color: var(--font-color-medium); +$input-disabled-color: var(--font-color-disabled); +$input-disabled-solid-color: var(--font-color-disabled); +$placeholder-text-color: var(--font-color-medium); + +$radio-empty-color: var(--font-color-medium) !default; + +$navbar-font-color: var(--font-color-main); + +$table-striped-color: var(--background-color-slight-emphasis); + +$card-link-color: var(--primary-color); +$card-link-color-light: var(--primary-color-raised-hover-solid); diff --git a/web-src/src/assets/css/index.css b/web-src/src/assets/css/index.css index 4d278d3e..268cfda9 100644 --- a/web-src/src/assets/css/index.css +++ b/web-src/src/assets/css/index.css @@ -9,7 +9,7 @@ html { } body { - background: #eee; + background: var(--surface-color); } /* materialized styles */ @@ -47,25 +47,6 @@ h6.header { padding: 0.5rem; } -input:not([type]), -input[type=text]:not(.browser-default), -input[type=password]:not(.browser-default), -input[type=email]:not(.browser-default), -input[type=url]:not(.browser-default), -input[type=time]:not(.browser-default), -input[type=date]:not(.browser-default), -input[type=datetime]:not(.browsertele-default), -input[type=datetime-local]:not(.browser-default), -input[type=tel]:not(.browser-default), -input[type=number]:not(.browser-default), -input[type=search]:not(.browser-default), -textarea.materialize-textarea { - color: rgba(0, 0, 0, 0.87); -} - -input[type=checkbox]:not(.browser-default) + span { - color: #9e9e9e; -} #login-body { height: 100vh; @@ -74,13 +55,12 @@ input[type=checkbox]:not(.browser-default) + span { #login-panel { width: 300px; margin: auto; + background-color: var(--background-color-level-4dp); } #login-panel .card-image { height: 50px; - background: url('../titleBackground_login.jpg') no-repeat; - background-size: cover; - background-position-x: 28%; + background: var(--login-header-background); } #login-panel .login-form .input-field { @@ -124,72 +104,53 @@ input[type=checkbox]:not(.browser-default) + span { margin-top: 16px; } -#login-google_oauth-button { +.oauth-button { height: 40px; width: 188px; - padding-left: 34px; - margin: auto; - margin-top: 34px; display: block; + margin-top: 34px; + margin-left: auto; + margin-right: auto; + padding-left: 36px; font-size: 14px; font-weight: 500; - color: #757575; + color: var(--font-color-main); border-radius: 2px; - box-shadow: 0 1px 3px -1px #202020; + box-shadow: var(--shadow-4dp); border: none; - background-image: url('../g-logo-plain.png'); - background-color: white; - background-position-y: 50%; - background-position-x: -4px; - background-size: 48px; background-repeat: no-repeat; + background-size: contain; + background-color: var(--background-color-level-4dp); + + outline: 0; +} + +.oauth-button:hover, +.oauth-button:active { + box-shadow: var(--shadow-6dp); } -#login-google_oauth-button:active { - background-color: #EEE; - background-image: url('../g-logo-plain-pressed.png'); +.oauth-button:active, +.oauth-button:focus { + background-color: var(--focus-color-solid); } -#login-google_oauth-button[disabled] { - color: #B0B0B0; +.oauth-button[disabled] { + color: var(--font-color-disabled); } +#login-google_oauth-button { + background-image: url('../g-logo-plain.png'); +} #login-panel .login-gitlab .login-info-label { margin-top: 16px; } #login-gitlab-button { - height: 40px; - width: 188px; - padding-left: 34px; - margin: auto; - margin-top: 34px; - display: block; - - font-size: 14px; - font-weight: 500; - color: #757575; - - border-radius: 2px; - box-shadow: 0 1px 3px -1px #202020; - border: none; - background-image: url('../gitlab-icon-rgb.png'); - background-color: white; - background-position-y: 50%; - background-position-x: -4px; - background-size: 48px; - background-repeat: no-repeat; -} - -#login-gitlab-button:active { - background-color: #EEE; -} - -#login-gitlab-button[disabled] { - color: #B0B0B0; + background-position-x: 6px; } diff --git a/web-src/src/assets/css/materializecss/material-buttons.css b/web-src/src/assets/css/materializecss/material-buttons.css index 47c2e64b..0d2073ee 100644 --- a/web-src/src/assets/css/materializecss/material-buttons.css +++ b/web-src/src/assets/css/materializecss/material-buttons.css @@ -1,3 +1,69 @@ .btn-flat:hover { - background-color: rgba(0, 0, 0, 0.1); -} \ No newline at end of file + background-color: var(--hover-color); +} + +.primary-color-dark .btn-flat { + color: var(--font-on-primary-color-dark-main) +} + +.primary-color-dark .btn-flat:hover { + background-color: var(--primary-color-dark-when-hovered); +} + +.primary-color-dark .btn-flat:focus { + background-color: var(--primary-color-dark-when-focused); +} + +.primary-color-light .btn-flat { + color: var(--font-on-primary-color-light--main) +} + +.primary-color-light .btn-flat:hover { + background-color: var(--primary-color-light-when-hovered); +} + +.primary-color-light .btn-flat:focus { + background-color: var(--primary-color-light-when-focused); +} + +.btn-icon-flat { + color: var(--font-color-medium); + border-radius: 50%; + cursor: pointer; + display: inline-block; + width: 40px; + height: 40px; + line-height: 40px; + outline: none; +} + +.btn-icon-flat i { + font-size: 1.6rem; + line-height: inherit; +} + +.btn-icon-flat.btn-small { + width: 36px; + height: 36px; + line-height: 36px; + padding: 0; + background-color: transparent; + box-shadow: none; +} + +.btn-icon-flat.btn-small i { + font-size: 1.45rem; +} + +.btn-icon-flat:hover { + background-color: var(--hover-color); +} + +.btn-icon-flat:focus { + background-color: var(--focus-color); +} + +.btn-icon-flat[disabled], +.btn-icon-flat:disabled { + pointer-events: none; +} diff --git a/web-src/src/assets/css/materializecss/material-chips.css b/web-src/src/assets/css/materializecss/material-chips.css new file mode 100644 index 00000000..dc055112 --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-chips.css @@ -0,0 +1,11 @@ +.chips-list .chips input:not(.browser-default).input { + color: var(--font-color-main) +} + +.chips-list .chip { + color: var(--font-color-medium) +} + +.chips-list .chip:focus { + color: var(--font-on-primary-color-main); +} diff --git a/web-src/src/assets/css/materializecss/material-datepicker.css b/web-src/src/assets/css/materializecss/material-datepicker.css index aab0871f..0148ff4b 100644 --- a/web-src/src/assets/css/materializecss/material-datepicker.css +++ b/web-src/src/assets/css/materializecss/material-datepicker.css @@ -1,3 +1,21 @@ .input-field.inline .datepicker-container .select-dropdown { margin-bottom: 0; -} \ No newline at end of file +} + +.datepicker-date-display { + color: var(--font-on-primary-color-main); +} + +.datepicker-table td.is-selected { + color: var(--font-on-primary-color-main); +} + +.datepicker-controls button:hover, +.datepicker-day-button:hover { + background-color: var(--hover-color); +} + +.datepicker-controls button:focus, +.datepicker-day-button:focus { + background-color: var(--focus-color); +} diff --git a/web-src/src/assets/css/materializecss/material-dropdown.css b/web-src/src/assets/css/materializecss/material-dropdown.css index 8124268f..155a1571 100644 --- a/web-src/src/assets/css/materializecss/material-dropdown.css +++ b/web-src/src/assets/css/materializecss/material-dropdown.css @@ -2,3 +2,19 @@ -webkit-backface-visibility: hidden; backface-visibility: hidden; } + +.select-wrapper .caret { + fill: var(--font-color-main) +} + +.select-dropdown.dropdown-content.multiple-select-dropdown li.selected { + background-color: transparent; +} + +.select-dropdown.dropdown-content.multiple-select-dropdown li:hover:not(.disabled) { + background-color: var(--hover-color); +} + +.select-dropdown.dropdown-content.multiple-select-dropdown li:focus:not(.disabled) { + background-color: var(--hover-color); +} \ No newline at end of file diff --git a/web-src/src/assets/css/materializecss/material-global.css b/web-src/src/assets/css/materializecss/material-global.css new file mode 100644 index 00000000..0f7bbe43 --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-global.css @@ -0,0 +1,12 @@ +.primary-color-dark .breadcrumb, +.primary-color-dark .breadcrumb:before { + color: var(--font-on-primary-color-dark-medium) +} + +.primary-color-dark .breadcrumb:last-child { + color: var(--font-on-primary-color-dark-main) +} + +.script-server table.striped > tbody > tr:hover { + background-color: var(--hover-color); +} diff --git a/web-src/src/assets/css/materializecss/material-modal.css b/web-src/src/assets/css/materializecss/material-modal.css new file mode 100644 index 00000000..b07173c7 --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-modal.css @@ -0,0 +1,12 @@ +.modal { + background-color: var(--background-color); +} + +.modal .card { + background-color: var(--background-color-level-4dp); +} + +.modal .card-action { + background: none; + border-top-color: var(--separator-color); +} diff --git a/web-src/src/assets/css/materializecss/material-select.css b/web-src/src/assets/css/materializecss/material-select.css new file mode 100644 index 00000000..aa6447e6 --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-select.css @@ -0,0 +1,5 @@ +.select-dropdown.dropdown-content li.disabled, +.select-dropdown.dropdown-content li.disabled > span, +.select-dropdown.dropdown-content li.optgroup { + background-color: transparent; +} \ No newline at end of file diff --git a/web-src/src/assets/css/materializecss/material-tabs.css b/web-src/src/assets/css/materializecss/material-tabs.css new file mode 100644 index 00000000..dafbae0f --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-tabs.css @@ -0,0 +1,44 @@ +.tabs .tab a, +.tabs .tab a:focus, +.tabs .tab a:hover { + color: var(--font-on-primary-color-medium) +} + +.tabs .tab a.active, +.tabs .tab a.active:hover, +.tabs .tab a.active:focus { + color: var(--font-on-primary-color-main) +} + +.tabs .indicator { + background-color: var(--font-on-primary-color-main); +} + +.tabs .tab a:hover { + background-color: var(--primary-color-when-hovered); +} + +.tabs .tab a:focus, +.tabs .tab a.active:focus { + background-color: var(--primary-color-when-focused); +} + +.primary-color-dark .tabs .tab a { + color: var(--font-on-primary-color-dark-medium) +} + +.primary-color-dark .tabs .tab a.active { + color: var(--font-on-primary-color-dark-main) +} + +.primary-color-dark .tabs .indicator { + background-color: var(--font-on-primary-color-dark-main); +} + +.primary-color-dark .tabs .tab a:hover { + background-color: var(--primary-color-dark-when-hovered); +} + +.primary-color-dark .tabs .tab a:focus { + background-color: var(--primary-color-dark-when-focused); +} diff --git a/web-src/src/assets/css/materializecss/material-textfield.css b/web-src/src/assets/css/materializecss/material-textfield.css index 9c5a8b1a..854e97a7 100644 --- a/web-src/src/assets/css/materializecss/material-textfield.css +++ b/web-src/src/assets/css/materializecss/material-textfield.css @@ -13,3 +13,19 @@ border-bottom: 1px solid #e51c23; box-shadow: 0 1px 0 0 #e51c23; } + +input:not([type]), +input[type=text]:not(.browser-default), +input[type=password]:not(.browser-default), +input[type=email]:not(.browser-default), +input[type=url]:not(.browser-default), +input[type=time]:not(.browser-default), +input[type=date]:not(.browser-default), +input[type=datetime]:not(.browsertele-default), +input[type=datetime-local]:not(.browser-default), +input[type=tel]:not(.browser-default), +input[type=number]:not(.browser-default), +input[type=search]:not(.browser-default), +textarea.materialize-textarea { + color: var(--font-color-main); +} diff --git a/web-src/src/assets/css/materializecss/material-waves.css b/web-src/src/assets/css/materializecss/material-waves.css new file mode 100644 index 00000000..97e88e81 --- /dev/null +++ b/web-src/src/assets/css/materializecss/material-waves.css @@ -0,0 +1,7 @@ +.waves-effect .waves-ripple { + background-color: var(--focus-color); +} + +.primary-color-dark .waves-effect .waves-ripple { + background-color: var(--primary-color-dark-when-focused); +} diff --git a/web-src/src/assets/css/shared.css b/web-src/src/assets/css/shared.css new file mode 100644 index 00000000..d1d0ba77 --- /dev/null +++ b/web-src/src/assets/css/shared.css @@ -0,0 +1,86 @@ +.primary-color { + background-color: var(--primary-color) !important; +} + +.primary-color-dark { + background-color: var(--primary-color-dark-color) !important; +} + +.primary-color-light { + background-color: var(--primary-color-light-color) !important; + color: var(--font-on-primary-color-light--main); +} + +.primary-color-text { + color: var(--primary-color) !important; +} + +.shadow-8dp { + position: relative; +} + +.shadow-8dp:before { + content: ""; + position: absolute; + top: 0; + bottom: 0; + left: 0; + right: 0; + z-index: -1; + box-shadow: var(--shadow-8dp); +} + +:root { + + /* https://gist.github.com/serglo/f9f0be9a66fd6755a0bda85f9c64e85f */ + --shadow-4dp: 0 4px 5px 0 rgba(0, 0, 0, 0.14), 0 1px 10px 0 rgba(0, 0, 0, 0.12), 0 2px 4px -1px rgba(0, 0, 0, 0.20); + --shadow-6dp: 0 6px 10px 0 rgba(0, 0, 0, 0.14), 0 1px 18px 0 rgba(0, 0, 0, 0.12), 0 3px 5px -1px rgba(0, 0, 0, 0.20); + --shadow-8dp: 0 5px 5px -3px rgba(0, 0, 0, .2), 0 8px 10px 1px rgba(0, 0, 0, .14), 0 3px 14px 2px rgba(0, 0, 0, .12); + + --hover-color: rgba(0, 0, 0, 0.04); + --focus-color: rgba(0, 0, 0, 0.12); + --focus-color-solid: #E0E0E0; + + --font-color-main: rgba(0, 0, 0, 0.87); + --font-color-medium: rgba(0, 0, 0, 0.56); + --font-color-disabled: rgba(0, 0, 0, 0.38); + + --primary-color: #26a69a; + --primary-color-raised-hover-solid: #30B0A4; /* raised buttons */ + --primary-color-raised-focus-solid: #44C4B8; + --primary-color-when-focused: rgba(255, 255, 255, 0.12); + --primary-color-when-hovered: rgba(255, 255, 255, 0.04); + --font-on-primary-color-main: rgba(255, 255, 255, 0.87); + --font-on-primary-color-medium: rgba(255, 255, 255, 0.60); + + --primary-color-dark-color: #00796B; + --primary-color-dark-when-focused: rgba(255, 255, 255, 0.12); + --primary-color-dark-when-hovered: rgba(255, 255, 255, 0.04); + --font-on-primary-color-dark-main: rgba(255, 255, 255, 0.87); + --font-on-primary-color-dark-medium: rgba(255, 255, 255, 0.60); + + --primary-color-light-color: #e0f2f1; + --primary-color-light-when-hovered: rgba(0, 0, 0, 0.04); + --primary-color-light-when-focused: rgba(0, 0, 0, 0.12); + --font-on-primary-color-light--main: rgba(0, 0, 0, 0.87); + + --surface-color: #EEEEEE; /* surface color and log panel color */ + + --background-color: #FFFFFF; + --background-color-slight-emphasis: rgba(0, 0, 0, 0.025); /* stripes in table */ + --background-color-high-emphasis: rgba(0, 0, 0, 0.06); /* chips, toggle-day buttons */ + --background-color-level-4dp: var(--background-color); + --background-color-level-8dp: var(--background-color); + --background-color-level-16dp: var(--background-color); + --background-color-disabled: rgba(0, 0, 0, 0.12); /* disabled button */ + + --script-header-background: url('../titleBackground_small.jpg') center left / cover no-repeat; + --login-header-background: url('../titleBackground_login.jpg') center / cover no-repeat; + + --separator-color: #DDDDDD; /* borders between components */ + + --outline-color: rgba(0, 0, 0, 0.22); /* outlined buttons */ + --outline-color-disabled: var(--background-color-disabled); + +} + diff --git a/web-src/src/assets/file_download.png b/web-src/src/assets/file_download.png deleted file mode 100644 index 2ffcd1ceb6b8b8cece79fc7bd4af2db8d1156c13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 301 zcmeAS@N?(olHy`uVBq!ia0vp^GC-`v!3HFKYIk}9DVAa<&kznEsNqQI0P;BtJR*x3 z7^K&OFk_t5M34^264!{5;QX|b^2DN4hVt@qz0ADq;^f4FRK5J7^x5xhq=1T6dAc}; zcyzwKzE`l>fT!)DzvQtqQ@)(!+Tu4|bb-0^MxM+f8I~N+gx^J$d)L-hS^XE7ap!{n zoq4H`-#V`R+MO2B$76EAf%&w%?~B`=A66}%sLrsWN$BUts`_*4)q!7{84tYD(rnV( zz&k_s@1e4TIWPFcSp63Z>Tt@r2}^LV3S<**ObTh}bQBX!(lY4MGBDlYIQ7QEm=}Av q=j!uC+Dm5EyQlyD?;cY6f%kh_n8V@NWw(H?X7F_Nb6Mw<&;$Tl$88G$ diff --git a/web-src/src/assets/g-logo-plain-pressed.png b/web-src/src/assets/g-logo-plain-pressed.png deleted file mode 100644 index 99c31aa05ffe3ca87fcdf213f8fa1f40a6eb878c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1502 zcmbW1`#aMM9LK-IZ0<9YTQww3a!EZ&a%*#aCKkEh)|`;*v}GMTa@pL8+$Q&mQ4%7H zB@?;j9+p@xDV;JR4(gb*{)Ilz^ZLA==l#>?m(M#D>tG`#t}G4!fRvrBrIWz8UqD3# zd$EvgB#V0O{0Sb@X{wjM+PkdXTY2q-F16oe3x-32SiqL4fka=?7LU;qHX zA$FGMFWq>@EA|ORxFCA>$pI}hVXPVM_&ppG+q{W>H>!;|eH$W_21RJ#>fFt-at4~3 zB9dd!^T-r4m|u#;MDrL7D=B_V1g)HTFAouR9m7O=DOR>LSIvZzncO?AX0QP?c^YLN z@XoY@S=!p#zRYJVm$eh=Tbr8%Sex1?A{&5P{0-9VjU}OT9D?e%Zg}OG9Y=}h zOrq+Hpx)t!yyCt;Q4M`B)PhqJQPSQ?Z-J3X+vhXDWOt+h*%iG#UFwr)bq2CneFLN16Py~+c77PIxI;#ew-!6g;E)coV<8f197+31Q1wAc^w zdYDCw&D0CZheU{<-dXyp9~XjUwu5e3y&#r(Z=2A@={A0yLE6m`FWPpi2HCmwEa1VK zWcS)!C;OOI1+CosGS3>S|H3egsZR2tUEtcl=ySqetttsIgr#{`lXG=ier_Op9(Bo7 z3V;QQ;wZ1AoC~(2sNEXSD{m+p*X32YJD|~U^DP};t8q~o9n1^}1e*#4;BZDez#YJj zZQH#4x}HV0D3ME^-^jVR^__0g5@?IFxY>4qB1E%5FKy=0CAz(=Cr8TU{ACut6{E9B z1K_qQRkex3t!eg!v!$4Nb8Cf#6rsxIet8(KQHyQ6lZ?j#3SqY)Ro>+jjqV@(Pk^s7 zJZ?ii?h~DQ@s6VrXRMjpwoOYY$%*9l<#iwQ6FAK(jUS{}q|#1X`PZO1sxjqC%@CX_ zG*O8!jJ}oe%vZMWrahb*#DjOe$2490yqn|1L{u70 z#Yucx7!fX_@nX^&V^!UzjFN6eJ9O2H^yqmC4?ne(ZaRS$#$Obi2lb>2zRN{rfI?rS z-~E}qT@{n&XEs3!9gJPB7M11vJYxRAiip1i0FrMpSa}j2vzFT#`?OnDAMDUKxp9}V zw%WSi5}uSc*PXIG1kc_FvV1^{yvdqBYYqAmQ?4(|jvU)fXZX)QW2LrD3^y#>h*8Pz z>Sdl4FE!Moz)*YkRLOV^Pnz;@i9E8FSj#*bJX6b;ULm?H?#*>1o>4s8uF)O&NPqmy z9w2&FVbi0FW2ENx`u^2(bA}v`l7g&IFU^AiDoM>Sz65EZXy17DylgcbJ(%J>bb??^&Ae-wzCT1DSy zUMNf9pPIh5Y=bdW8wxIYQi4epKmoh9aXewxYB1BG;Fi>lSOx&Fo>p4c?sWYBlp$a;?%& z^vF0BoYJ$OIH+q(HLdyQhYJ09UC=5^D55W!KWYI3Ry0GNb3?5jPRA$Yg_@i^BBPYr o#6Y<_+ptNAzIHjk+f{P=!k--$=3_QF(Z6ci&dR~^F$$OPKR|Vx-2eap diff --git a/web-src/src/assets/github.png b/web-src/src/assets/github.png deleted file mode 100644 index 135350ed91f3e2563240f03cefd87240c2547081..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1353 zcmV-P1-AN$P)41lCDJ zK~zY`l~!#`Q)d)D_q&B#+S&sBq+Kb52(_^DBPV6MvjlSw~eewU_1Q9 zN2Td$K>&+Zo>NuYG4mwUIo#M^le~YC zg;iSe{=EcNx&AVi7Tr{tGMKn?cVL#l>h(F)Ri(cPqm#c6H}=;6T$9WPyanrC z+Xz$l9tg{U+nG8#chk$dh`%*~zgYPzf#6$6r{w@?c6KK3-(ztA0Ni410tMvs(AZvp zYZ3qeRcz`c3Jsf!3ckJ|OvXDvy01%>V$J`)=7ow}(1BDl3bA36M=l}#k5uIiyis?05 z#mV@iv9{Jw<^xN8eKlb+ys=sW!eZQFsjGi028Gfud?vz9Rg0LTBqw5vH{gpcG-wDibe^p=wnDx@b*>?3%8| zWI$=1VYYZPo(aynIMt6lLbr?W|>PzP$`}qz_oPMJQZA&lHAy8!KK7>5Siw? zvZSk{H%akiKPtx9o0w6%vCva%pH;#6_Ni1~;2kl_K3)l2oSYg+^#);SJ>1QP~Ze<)YSq&qwUxc2Gi18;P3D!Pkl$qE%kNPcwX*ys?2#5K@bE0 zK#>(W{dlNJN~ErbTUu@bfSi@RLC+|%Eb;L~HyUd75A#FeZpv+|EA-UbgQrfPQ)Yxw zrP$;L043aV_PWfAp$s?x0D>Z@!V4qemX@3I!L5wfrE*&t{@G~PczfFj4gio2MvfD< zf*m;}_R9v}XWN8{=y$oVl>g&uJ@cl_rNg*U|LOvDsHw8~Yli)MDbj7bq{uQq6Mo(R z0Dxxt2iNyxKVdd*rp=a1JSI&|KltlyDa!r<3dk~-USz`&EDPMTkJPwa-FV@;*SY?u zpJqD4>^TTEH(%v?1BXbbxhBu)ypD;|)bKao?io7z)iyB_>seuTyYfm)ZV^SM_qo2n z!O)rJUlt4;008#(?YnAc$sp;p9Kh{`pXXL=&}b?)KE?E!4{)7keU>!YWPmQKc%*x8 zd!xG2{WGE>4P0+vPw4wIKdm_QO&-QZS@TblFPJ|>P!wCXlghHhKO60u2|sU`+c1|U zm>r`YrU=n#c4B(E8q?7gs9II<|FtC|#gqL~EOCR6C%PBAO=Rp}qrwm;9QZ^>00000 LNkvXXu0mjfvu}XZ diff --git a/web-src/src/assets/gitlab-icon-rgb.png b/web-src/src/assets/gitlab-icon-rgb.png index 21a02db58782252d22ed6d2c913b4cdb3d32a76d..40058c715aa6b207c5ae86d7113856200a2454c9 100644 GIT binary patch literal 9914 zcmeHtcU;rSw{}8rf)o*?hmM3KlmMYaXi}v&m5>6VC6LfXihvYBk)kv~=?Y3mic~3z zD@YSiq^StfR8&NeH=w)gzIX5MzWe#S_rC=KnVIK2=gf1?%rM_i5-iOP4>28M0ssJq zjE#_1l)tX~F9tfw@A+#%DgXd$U8s!%*$NdTO!Oz<@ZMNqatIMCj19))0D$1B)hy>s z{-#67y(T6|6d`(Gj(IVd{&XAU+GOQPlkE2s=!hnhQ;vDIJ@khekd|vZ3+v0@do9di z%`Vxar*AJxetEGl8@sjW{$g?VV|bg^r|_iq_LgRD7@g@1|E}usG!obz2d6IDhDD3`B)V7*s;vnCgfa}Genv@6<__q^4V>Ew0W z;9OyGrZc|2UZS#5#JGm5d>ENa=GFL2qndd-?A>sf(0OBeQMNXXtqvZACyF0sYoEZe zpH;ax978f}?VacC&Y=79;Of=ye4PVl7GkK=%{5<-j-=$Ci=$F9%dYnrE4g{9&wV^P zY7; zbXztvS3nkxf#x4hiW__pKJX$agrvG>7?LEEMapb$2a%pweMx$}d#y1akmVX(Ogg%MK3yhdxAYPT*9FOGcc8Znj4Db{TnH$+B8qxRLkL z#+u`uX{nb--Z?0f$Em(mC5^3c?Uq2TY3-K5boIKgH|WGZZGO`)xS9QCxil_$tzIZk zpVwsC$>OzuZtl*#h%K!!GvRN8T+6&IvNf?{_E{#)Ahk?EqV7@SPp_Jlbt+60o}b`a z6Kr`{a$HK>?EA2z!uTBhnEtaVw3)!>BP9>2-j98blD^G+jraV-hK)K@eZT?9yf%-C zMZGcz=_?GmI7jZL0S)?Bun4X%q>^D5-i9R(t^Uy=^Z5oy0LK|kRApJ(yk=M-zrRZg z+2auJ67q_K_R>nbSLS4(hxj8@&Te|DkxH zYr@sQwIbyvr@7p+^hX`fcGrql%HsB%4^MRK5G|C}Jp=TF7mtBPPGre+OCYsgiK`fS z7nrzJUOQBy%zf{bS=VYsZhqvbKwA%W@*XeSo{rbm>ZxHR`MUmRj+!4BYWR%Wt+w8t zdeeV-0Dt=w25A)AmD@7Yt^mEsv&cH4S@2AAnhjcAbJOxdiTPQs@&FmGTU7Qe+BTxO zWU~6GBV1NNP*dr71LT30l&c!gtT>>^jBhFY+10DU1?ZP4t@lqNrJ5Wj#kr4i-AHnZ z60bZv-Lv>8p-!}rD<#qI_-ubxX2M%t-*J9ZlX+s6NL#FXl0xN$iwGknLpB*)o~X#V z%LK6UyGl%!jsNW&nTwo{4fvgV^uAyPeYT0A?7=kdbC=KFekE9!&kVp|&fi@O<`foE z|47f&F`3jfd2E8aEHr03r*5T4L2v?bhVEJ5<*Ii)=bzPq&%G>~h1xln^B?v2zU;o5 zvmK?Pt<9DdkPVvVc-w6Q35dnfAT>5^(Np&iL*gRM7|y*NOC42|nCLojGO~{8)d->QKFWNe}0-B~fYMrBX9rwthEO?`Zs%GyCQyL8=JEry%^BC{p<=0%QNa-TvMTsPN3A6+P zlEWLM{N2H{;KQ_>))AGFJKg5hb=jNmswR1vE2|i}qdx#d9j;$dmdHKp7hJ^b)F2&7 zH@zHN#W!#vuZ*!QV>u$hHTGHuJ}MPAmf(po%aLf3^&=Ua6=O*q%A8!emTHc^o1`oa zn0gBG3P#unm^@V6Nm4BPz@spyDdf@s)wHql*w8z@rH;_p-r{b#&Y+DOk~{jB5@zDNVKZ@a%s5%y%PUr zrTPLZ#3yU~rNCxG$v71}jW>PTVeWX}qMa(>YB8Y$qgO~|4x9;yU!K65Ug01?(^Dlr z>s@_($m!VT*|$x66OY|&R7610hGGN3N1%*l7tridO#IFg&QOmFhWPH1UPKFQ_ZFD5 z4emLATn|wLr1lqum8+{+b|+4$n$5Hdt!{8-X~&w$V8C*z5xi_pN{j-puH91njj0|s0{LQFrHw4_QM*D(4nR?*jT`p(W`fiJP zK7|-oRi3L~O~W|d@Zz$#rl+0ct5osyYMrc-S!+BoC+(vI3vmzAnF>Yc;N2^_M=`l0 zbdMUCcR@?=(%br?>GGDO-`a573{#3*Zb;>di>kOq(CiZ#oTHSHx4q5x>LT8rASH2{ zhoZ~%BcW{dxI{x`G>bM6<7dZcC!UD$lf|oWBLr?=E1vCaxU*VqL_~z5<<#yA%cl@5 z)EJ9e)tTajKa^h#6YzDdZ>YB$&psOu~<upDhi8hOUfL|{iTWO0ve+ulE+8&Ry}cSwh>DaJ$HZ%;)k zZG212G20SRkdua7=!N;|_$+ut38nIE)Fjk$J%pGXY02{7xprQKLwPA^VrL$f(=e)2 z8z_DRDR#TEDB1k!YI|U7-t$di2ioz6CBQKLSL0+jRf=6>uM8lgYAnp^&1f_5P+zu+ zPpnl*4IQwvlk1xh-HHS9bJ(_rFR)QZyjLja;0x_?eI2u20VH?{w;X z#7@t<+OEFA)vuU#4#&BYzA|(|c2)CI@0wak3v;v*Xh-CcMR4+({l3-|n_ws_{{jz+Y+d_89>O&{Z4iGD6!z|i)NODf4J zqM_>9hcyVp;Qj9r1$(cxZ?&{UP#t^NIVaa^P!0e9WAJ)QPGq+fQ}GuBSPR4!a6El#Yb=OhuVvgJ^LxTo@Nc~*uY zn(eTpp``v%>Gu)FH}o&k_w8=2wOwji2SgNh*(j*cm1Sd#(ilZ@oR8?OMv^YV&L-O#T{B%{ka(t;WIPasTUrdHr>{hM{ZiqHP zs7bD6r?=^CRld8yMbpshD!Q^b7Xt`aYlOYnWCCQVn)wjDNQ0>v$}=^r?YcgKE1WD(PK=#5(Y* zTE}df9PbrTT90jJ+S%QUZPN6cGgKO0zJ$u~TulJ)vP=LqKR;EW{-e`hf`fwG347P^ zpT1DH$ooHV;N7$Z0N9f7l;;T!W~K-hg|h4#Xdh2603 zcwY_BdP_S<7?05a*(;er&4_wf54=&RKh`?b+y))$g;vFYv^1I2gAo)0A1oOq9PH!m zOF{%|fDU*Ol=u7H5RmWzgzTjOaxk+L)+6|1g%#x#<)C2wV0@rFNRvre-5-NPSRoC5 zQczkNAP+K`h=4$Xf`a6N6yym0?hu%&swxC350RG#QxITMh%Xrx4E807?o<5WKw?Q~ ze>{rh>y7oH1VW-P!~TLN<8gl`=r8lxANenVP~82&{}=S{dL6`apeq7N zKnLtQHAZTH_Txoh2xvS8aqw2b4GTx(peV2+Tp0~kgsI}ds>%v*FcyV{LzOTXI1Y;W zh055ML`M0dvHMgMaydMO2Zh5Z%PY94fYBIvMX(|c3J0U$l%@&{qlj{YxuMWlw_hkM z{PC2Pfb#w|s(mU9g$fRXyJ65ODqt*HnG%v4S^=yI#Za0!7zPJ}!QrYX>;ct&9uPW~ z#u^}bIq07~mfk2bj^OX30W!t=1_b{(V1xI;T9Z-xqQT%$WtconRZ&$Hs;mT+{}W_~ z^(Rq^e4i5rm6QK5f70^mpO4TSIB=`?@5&=gJLiuBL+$l9c z$unhX9pqV9>Y)0h|B4Rsz*1`DI249}LP0;$E36LL-%x*_p8EclV`hf;G3_elL{&2KS5o&!YUF%zuUbu+}3GLnw>ZgKQDx`ybu^4e$qpDISgW zB@zBp=)XdK$nwj%Ly7rEALTrw9FLGc&c~l+vA=Tu7r%a%+y9~m3iZEE{*k`_%Jr{Y z|44y<1pc?W{*~(=De#ZL|5n%kH@TSpykTQ~DgWpNQSQ%a&O6sBcW*j36GJ3mZ~wdC zerXz|hmmOHNCE(uIrm>cKz1$#P(BHe<9u-x z!_^ltvh*U7HZxY1xu&1${1F;AcJtZLyop3udcgk@nqpd`cEoOKt#j_@r!}GY)wVems@{ z*iFHcV@^{@?0_SDxuEP-1^^>l4}y!`(hE%r;?%f1&KL3gxlW{$^S59H^}8uXG!CWB zZ-`8@*LwX68jTe)aE2i5rko3h3d>EdOy}HTxu<0i$NkN-YdY9Pfwj4*_tZdLo6DIZ z)}W-KX`dA|q1JnJRR62DM0mPZR$ukhB@L49!u*-!rq-?CtL3%tENhQmd*bMt?K^o@ zFC)4pYg5-%(0aMQRonmO^D3|McZ~*j!Z>=Em2Z>*MSFs{Ef-oPRG)|s7i^iDmz|6h zgSwm6mTEIpfgw+S6ORzT=j1yo{4`Q3zSw2cA(=aCu;1#w%83Qr5*p|O(SkYRa7booW~HjYFFDUIZnG9RmKeyu>DRee$ZlFXM&_me)KimVJ^ zmEW7cd6`F#>#gz7g5aff%lcb>r|Pt*FbYejT|;{E-jwMs_MGptq_;{_7v!^6y5;|L z{UL6+@q!4|t(nmHTe^#LD@uc|%c_BbJ#1y^I1&$yQ5ea>c0;}BMz8<~xp3GdSmjP* ziS&mAGT<&hdaOCV!h|sxMEs{Kuyt1BOl-#bf0a?DluPCkz^jAZ_#h(C>kV@EFC@ihC&?j9u>buL5GIT}JR#wp;bgV`t_ z2e^>b!Nyftbm}{#Yqt84=14$z3bH)f$0pZjmuh7g92RlgXKuhGv>7WzX9u{O=`(!7 zlpXDNRO4hZ{&>bY8D;17+v&A4Y|dL8K-#BJKr5rE-1Z0i*)Kb0i{@UrYPQ=o)_HPy z8cp-K+nYWc%pHZz*!H=_Ev0taA#fO^if5|?lLqe$J8!FK3!d>7pt0NX+0kYwK&r4- z0Iv@W>|}pn6JCKc(@*GySZI=u z^6q)L8V_9*G6JSEM7+(sPaDi~k4BqqsC><($ffQi^J;4UkTq9MXBo@(EIu zsBQbYVKhT#k?Y*8PCEf0>M{0v%s@(zB({hjB1h{7T xPSUeG^%Wl--d)T;P0V-D{cm=35sw|VlUue_538N1DaQ}MSl=9ZN7ps_{{U@VeEt9c delta 1163 zcmZ{k2UF8`0ET~o45%e+q7p@sRHmRtK(GaDQkEFD5iPR}4U~-poBq=>0v-q+AWMcJ zmJP=UEl5J-LLHbzY|h8YTO;g=)Hn-;h21}}K7Z{Ne%I)<>2w2LtVP^Bh>S&mMzi7SkxUD^i!0+`vQO7UNc+Zn zp_O{G4gax{h`I^rzG?$;vq;6-ypTn~AZ0VMe!Rg$XyDq%t0mB_w8CME#*|Zx1=!U7g2c-dC zmVKg4UcbxEGw$f^VZFH|rIZ2~mK)DdXn9;{UeT6grBHdt^t3l+Lz|5+mR^zsu57*Z zcE4uGyz&p3#kBD^Bzq1zu$W{d-0TyBR8CNc z%2cq{y^gI`rbXBdXPilwnbMW$aIy5K`c&Aq>eVRH(y5fQbWK>Q3T&M&;h%v!ov8Yk zHCBVksGc`NLbEe0bz0qg-wlxUQ9_9{3dlH|`huijUn7C~M?;ab;Z;EaqW2=`XmQTM zKpyoH;x!FAuc>b+-lKva&_%6vR;~wZ=qw3M-5#0k=fYbps$q>ewz_eQuACNtU0g)) zTkaa;yGQZc&8&x^dsMaPX$Zsr_-HY0OBCEt44t#=wCU#yXuqo4(svw&9J`5J^kNQ4 zaa);w3nC)|7spPL-v?8-&yG;ZWPAoCc8y{QfLb(ZCZBTch+I9alQhoRum=`pGMCGZkh8BS^|oeJVrG)h;f}M}VpIzHVXkx0KliZoRwn z+?$~*oYB`m`ZSlf-Uy4Rg4h#?3V2#7wJQjn^(VKoq3FEO2M}?KCQY&D_7n~9f#{oN z={+aoMuxH;yG!v!-CqOHxaHMSYq!?=GLPw&?KoZXNo!MLJn1aOtgz%=8iX&gSJ(?& YjIF5;OOpNe9|brQFB9O+7XyF)9|e{ip#T5? diff --git a/web-src/src/assets/logout.png b/web-src/src/assets/logout.png deleted file mode 100644 index f28bd62eb163e17936e145c34d6dee49323da5f1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 483 zcmV<90UZ8`P)~0!KhsXf^NB!6i@v+Q5xN16M!?DBIZa1VXDdU=er+ z_KoyY2WoxxfDfQ1w5lg{uw(JWNdMb)21fdQ;53$FB*Bu!UXFzcm$)ZKwzYQmp05@;0xFWw9u-QQh5j10{TF6+~kKoJD>{` zflqrxJ-|c;+OV=zZ1Qy@{d*^Z$ftJ!)KXLmfToeYN!r~cjq8B47g~+1VgR(`nolgV Z{Q`~jWfvgn1rY!M002ovPDHLkV1o3z#<~Ci diff --git a/web-src/src/common/components/AppLayout.vue b/web-src/src/common/components/AppLayout.vue index 2e36a1c7..ae7ebdef 100644 --- a/web-src/src/common/components/AppLayout.vue +++ b/web-src/src/common/components/AppLayout.vue @@ -1,226 +1,215 @@ - - \ No newline at end of file diff --git a/web-src/src/common/components/ComboboxSearch.vue b/web-src/src/common/components/ComboboxSearch.vue index 0b419d32..a2d54565 100644 --- a/web-src/src/common/components/ComboboxSearch.vue +++ b/web-src/src/common/components/ComboboxSearch.vue @@ -1,199 +1,188 @@ +function disableDropdownCloseOnInput(comboboxWrapper) { + comboboxWrapper.dropdown.options.closeOnClick = false; - \ No newline at end of file diff --git a/web-src/src/common/components/PromisableButton.vue b/web-src/src/common/components/PromisableButton.vue index ade143f6..210e653c 100644 --- a/web-src/src/common/components/PromisableButton.vue +++ b/web-src/src/common/components/PromisableButton.vue @@ -1,23 +1,23 @@ \ No newline at end of file diff --git a/web-src/src/common/components/combobox.vue b/web-src/src/common/components/combobox.vue index 0e89e3df..e53b6601 100644 --- a/web-src/src/common/components/combobox.vue +++ b/web-src/src/common/components/combobox.vue @@ -1,288 +1,289 @@ diff --git a/web-src/src/common/components/file_dialog.vue b/web-src/src/common/components/file_dialog.vue index b57744e6..d25cd86f 100644 --- a/web-src/src/common/components/file_dialog.vue +++ b/web-src/src/common/components/file_dialog.vue @@ -1,438 +1,433 @@ - - diff --git a/web-src/src/common/components/file_upload.vue b/web-src/src/common/components/file_upload.vue index d912397d..6a442088 100644 --- a/web-src/src/common/components/file_upload.vue +++ b/web-src/src/common/components/file_upload.vue @@ -1,17 +1,17 @@ \ No newline at end of file diff --git a/web-src/src/common/components/inputs/DatePicker.vue b/web-src/src/common/components/inputs/DatePicker.vue index b079e212..f7b7cb00 100644 --- a/web-src/src/common/components/inputs/DatePicker.vue +++ b/web-src/src/common/components/inputs/DatePicker.vue @@ -10,7 +10,7 @@ \ No newline at end of file diff --git a/web-src/src/common/materializecss/imports/chips.js b/web-src/src/common/materializecss/imports/chips.js index 74ef0b9b..b2722882 100644 --- a/web-src/src/common/materializecss/imports/chips.js +++ b/web-src/src/common/materializecss/imports/chips.js @@ -2,4 +2,5 @@ import './global' import 'materialize-css/js/chips'; -import 'materialize-css/sass/components/_chips.scss'; \ No newline at end of file +import 'materialize-css/sass/components/_chips.scss'; +import '@/assets/css/materializecss/material-chips.css'; \ No newline at end of file diff --git a/web-src/src/common/materializecss/imports/modal.js b/web-src/src/common/materializecss/imports/modal.js index 8f772784..d3b53462 100644 --- a/web-src/src/common/materializecss/imports/modal.js +++ b/web-src/src/common/materializecss/imports/modal.js @@ -3,4 +3,5 @@ import './global' import 'materialize-css/js/anime.min'; import 'materialize-css/js/modal'; -import 'materialize-css/sass/components/_modal.scss'; \ No newline at end of file +import 'materialize-css/sass/components/_modal.scss'; +import '@/assets/css/materializecss/material-modal.css'; \ No newline at end of file diff --git a/web-src/src/common/materializecss/imports/select.js b/web-src/src/common/materializecss/imports/select.js index fed86161..04f76299 100644 --- a/web-src/src/common/materializecss/imports/select.js +++ b/web-src/src/common/materializecss/imports/select.js @@ -3,4 +3,5 @@ import './global' import 'materialize-css/js/select'; import 'materialize-css/sass/components/forms/_forms.scss'; -import './dropdown' \ No newline at end of file +import './dropdown' +import '@/assets/css/materializecss/material-select.css'; \ No newline at end of file diff --git a/web-src/src/common/materializecss/imports/tabs.js b/web-src/src/common/materializecss/imports/tabs.js index 45203884..07f441b8 100644 --- a/web-src/src/common/materializecss/imports/tabs.js +++ b/web-src/src/common/materializecss/imports/tabs.js @@ -3,4 +3,5 @@ import './global' import 'materialize-css/js/anime.min'; import 'materialize-css/js/tabs'; -import 'materialize-css/sass/components/_tabs.scss'; \ No newline at end of file +import 'materialize-css/sass/components/_tabs.scss'; +import '@/assets/css/materializecss/material-tabs.css'; \ No newline at end of file diff --git a/web-src/src/common/style_imports.js b/web-src/src/common/style_imports.js index 0bfbc4c4..60028770 100644 --- a/web-src/src/common/style_imports.js +++ b/web-src/src/common/style_imports.js @@ -7,5 +7,8 @@ import 'materialize-css/sass/components/_color-classes.scss'; import 'materialize-css/sass/components/_grid.scss'; import 'materialize-css/sass/components/_buttons.scss'; import 'materialize-css/sass/components/_waves.scss'; +import '@/assets/css/materializecss/material-global.css'; import '@/assets/css/materializecss/material-buttons.css'; -import '@/assets/css/materializecss/material-textfield.css'; \ No newline at end of file +import '@/assets/css/materializecss/material-textfield.css'; +import '@/assets/css/materializecss/material-waves.css'; +import '@/assets/css/shared.css'; \ No newline at end of file diff --git a/web-src/src/common/utils/common.js b/web-src/src/common/utils/common.js index 0f0a856e..d2a6ccfb 100644 --- a/web-src/src/common/utils/common.js +++ b/web-src/src/common/utils/common.js @@ -666,4 +666,25 @@ export function trimTextNodes(el) { node.data = node.data.trim(); } } +} + +export function getElementsByTagNameRecursive(parent, tag) { + const tagLower = tag.toLowerCase(); + + const result = []; + + const queue = []; + queue.push(...parent.childNodes); + + while (!isEmptyArray(queue)) { + const next = queue.shift() + + if (next.tagName && (next.tagName.toLowerCase() === tagLower)) { + result.push(next); + } + + queue.push(...next.childNodes); + } + + return result; } \ No newline at end of file diff --git a/web-src/src/main-app/components/AppWelcomePanel.vue b/web-src/src/main-app/components/AppWelcomePanel.vue index 0fe61f7f..e250eca8 100644 --- a/web-src/src/main-app/components/AppWelcomePanel.vue +++ b/web-src/src/main-app/components/AppWelcomePanel.vue @@ -11,29 +11,29 @@ @@ -50,17 +50,17 @@ .main-content-header h3, .main-content-header .breadcrumb:before { - color: rgba(0, 0, 0, 0.87); - line-height: 1.3em; - font-size: 1.3em; + color: var(--font-color-main); + line-height: 1.3em; + font-size: 1.3em; } .main-content-header .execution-breadcrumb { - color: rgba(0, 0, 0, 0.87); - flex: 1 1 0; - min-width: 0; - overflow: hidden; - white-space: nowrap; - text-overflow: ellipsis; + color: var(--font-color-main); + flex: 1 1 0; + min-width: 0; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; } \ No newline at end of file diff --git a/web-src/src/main-app/components/history/AppHistoryPanel.vue b/web-src/src/main-app/components/history/AppHistoryPanel.vue index 343bd14d..f1ce8f79 100644 --- a/web-src/src/main-app/components/history/AppHistoryPanel.vue +++ b/web-src/src/main-app/components/history/AppHistoryPanel.vue @@ -5,30 +5,30 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ExecutionInstanceTabs.vue b/web-src/src/main-app/components/scripts/ExecutionInstanceTabs.vue index 5acec72e..7af81408 100644 --- a/web-src/src/main-app/components/scripts/ExecutionInstanceTabs.vue +++ b/web-src/src/main-app/components/scripts/ExecutionInstanceTabs.vue @@ -1,225 +1,226 @@ - - \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/MainAppContent.vue b/web-src/src/main-app/components/scripts/MainAppContent.vue index c3fb9c64..43ee0380 100644 --- a/web-src/src/main-app/components/scripts/MainAppContent.vue +++ b/web-src/src/main-app/components/scripts/MainAppContent.vue @@ -19,32 +19,32 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptLoadingText.vue b/web-src/src/main-app/components/scripts/ScriptLoadingText.vue index 42f4ce65..74d2c9c6 100644 --- a/web-src/src/main-app/components/scripts/ScriptLoadingText.vue +++ b/web-src/src/main-app/components/scripts/ScriptLoadingText.vue @@ -5,28 +5,28 @@ + }, - \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/script-view.vue b/web-src/src/main-app/components/scripts/script-view.vue index a95538dd..1fa49ba9 100644 --- a/web-src/src/main-app/components/scripts/script-view.vue +++ b/web-src/src/main-app/components/scripts/script-view.vue @@ -1,547 +1,547 @@ - - diff --git a/web-src/vue.config.js b/web-src/vue.config.js index 581adc21..f502c1c3 100644 --- a/web-src/vue.config.js +++ b/web-src/vue.config.js @@ -42,7 +42,7 @@ module.exports = { css: { loaderOptions: { scss: { - prependData: '@import "./src/assets/css/color_variables.scss"; ' + additionalData: '@import "./src/assets/css/color_variables.scss"; ' + '@import "materialize-css/sass/components/_variables.scss"; ' + '@import "materialize-css/sass/components/_global.scss"; ' + '@import "materialize-css/sass/components/_typography.scss"; ' From 2574e55386e35dc893091702ac0f84f5d31ea540 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 15:08:08 +0100 Subject: [PATCH 046/398] #324 fixed tests --- src/tests/web/server_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tests/web/server_test.py b/src/tests/web/server_test.py index f392dd9c..0761d7ee 100644 --- a/src/tests/web/server_test.py +++ b/src/tests/web/server_test.py @@ -119,6 +119,7 @@ def start_server(self, port, address): file_download_feature, 'cookie_secret', None, + self.conf_folder, start_server=False) self.start_loop() From 32ca66c18641a10025b10afc638902bf112df81f Mon Sep 17 00:00:00 2001 From: Danny Rehelis Date: Fri, 25 Dec 2020 16:21:49 +0200 Subject: [PATCH 047/398] Apply suggestions from code review Co-authored-by: Iaroslav Shepilov --- .../admin/components/scripts-config/ParameterConfigForm.vue | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue index 9335753e..4bfea44c 100644 --- a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue +++ b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue @@ -67,8 +67,8 @@ title="Allowed file extensions" v-model="fileExtensions"/>
    -
    - +
    +
    @@ -358,4 +358,4 @@ export default { .parameter-config-form >>> .row { margin-bottom: 0; } - \ No newline at end of file + From 9246997d52b7b936004a0109b7a8749224e307c6 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 15:23:39 +0100 Subject: [PATCH 048/398] #324 made travis to pick custom materialize-css dependency --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8b69a74f..d5732184 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,12 +21,13 @@ env: before_install: - sudo apt-get -y install python3-pip python3-setuptools apache2-utils install: -- sudo pip3 install -r requirements.txt + - sudo pip3 install -r requirements.txt - sudo pip3 install ldap3 parameterized bcrypt - sudo pip3 install requests --upgrade - sudo pip3 install pyasn1 --upgrade - cd web-src - npm install +- npm update - cd .. before_script: - cd src From c48ccc8f58add2b8332caac213a4220349aadf3c Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 15:50:58 +0100 Subject: [PATCH 049/398] fixed travis file format --- .travis.yml | 100 ++++++++++++++++++++++++++-------------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/.travis.yml b/.travis.yml index d5732184..04f08aa6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,68 +3,68 @@ sudo: required dist: xenial language: node_js node_js: -- '11' + - '11' cache: directories: - - web-src/node_modules + - web-src/node_modules addons: chrome: stable env: global: - - OWNER=${TRAVIS_REPO_SLUG%/*} - # GITHUB_TOKEN - - secure: GtWCiMPNz3MDDTvXqaVsgZBvlxYRZuTY6sUEhWTL37zJZHgRLlxTCTD27hHZp2P4B6G6KGV/0iEvMYxqzo6jMrzcEnt7TlGetteqI18dhV1dIQGH6uy8Y/PktkK2g2FuJeGV3FRK4+a21v6zzSuUaFa26k97mPapa4LS83XXj7rc13ll23HhhtObVF/a1n3U0Xwe4FkdoxbKlecJUnNujESxFk4xQl4N1tFv15fldDlFq0XWs26eEp3LU/n7wkMzbg9WEqPvDeaYlvir9VpvcYWpVvf8Uz+wpvW1jnE2R6bK1TDv5BNzQwbf2JtN284yZ8I1nwZtOJkc1RUr5wUFxCD0p8tc30sAlKemI385v2t1ccoBYMwH8LHFIUoSXoolHZBsnZeADqpo3a0d8hVFWv4AcxC27Q6SfMCltl0+ogAoQ7wVfkRT++044p415Ar4raEqIkqTm64FaRNMS0v2y5mS2634PMSGMRnK+NBrWz1yFKxsiuPKypIWygtrJ4pyiL2yPBZupnCluEgqva3q6AmMDlNuSUmcTEnCRGB01U2if6/oSEgISH1VK2lsRSxuoG5dFFuezwv90YENh/7pw0/hgge7EOse6OzDsU3uNRWcTuXF7eEBhjM1wBHWHlaAwV9vfHMZX4sHWP4R4CZhPjVEPRz6HPEg1tFCs2EvkuI= - # DOCKER_USER - - secure: 0AUrT+5xg+JGjAeZQfeCyVFHYw04YDSzCpeQILtt7Ca00rNJ4yshvCx+zEsyh0aMEO6+fLmGo4KCWC1tHKqKdO9ByIBBO1vbsQ83kDqC7GR1xorK8abN708NArxLdqylPRrrYK9Gr0VLk8t17DIfQdoP2QryJ2mDdsthzpliZOJ7c58LxIkBlog0uLabrX/d/m8ZEpgpqQalCUBmImc31tKBDprl5CVLk8ONLRVwdQ8WcYQaTNpOiIfx0OWp5iX+P9gMUyBTw8aFMlmwfZpXbDyGlwckDdIkKfWTUPq8FXNPLjyPbJ2zraID/kkNEw6J+x91w/F7VydhOoU/Gc4IAlL6TKN5xQquSclMz05kBHthSzZf7g6KUuQ0TgzK46zArcV4ZEItLU1h2IcLsPLi3+/O6TtUdUSQOIaCQX6YbQsnGDgEMWjtfpNJKpTyd+7SR+BuoQmtihr+Utl8rfq7vFTzkz+AiCvNnGQJzQoZKs83hgC57BRSD+LIkI0BrZ54ijNYoKCqvKHqmamkrXQEdiImBlTg8NpmBNHJgHdL6PRqi3NxLzJzdqKtz5pkI4MVVLYFXsUApD0AWvEOPejLnfVEGber5cA/Hm3HhqB7M9ja2BFml+oYBLc3mnzjKd/FT3VwWMiijfTJVvr6feFrhSQGiRyBLmaoRNmUSjUsfR0= - # DOCKER_PASSWORD - - secure: S22ffhnZOs1yFdBwJO9+uzy9DB3e7ehLWbj47U1zavsMKMDyDw0lkOIy8PMERFh4roBoM5dG95RIVbfNbrPXQxnY5Og2w7RTTv3eeHGdYzh+34Dppfk8mhEAhn3NL98sOe6is/5sEDvZ2ykPFLvoJmyV15V7Wvtuy1Zx+0lyZ0R0tX6sVJUWDlClHspCuSIKK+iptL5yLu4TtvX9Wks/c3kH6GIXYIJIeC63D+RRhuetbtGKND/RtFkq5IDP9qMZNXUAT/Mb8hrsk9HntFgl79dG2ChvBpDE8/LqjYDBiFTiUAtJfBhC0pVB3WaEwGTU/hWe8WTjl29JIkGcoaeT7+wncJ72lEPJoO60YWSdtWfTlNlUiN27AcxGqk39MDhB5NAbuJpKvcFLMmWFY2uJefrR6XVEXBZ+9yAwzuZmj0GYFOQTuczAqncyj/3BuOEqfIkkQ5BLAS5BUuzSEbHOjwasqbTVcWM1H3cv2ZYATXQQN8KhcZ5c5lxy8eD0NXHKvBFlS3HOXOXn3P6PqGgFHzjL+yyHMvzIXBJY4jEr8FIH16dwbXDqb4gi4lrrCZHDeIhVKsmLSUJjhmiKeP7dWcfUOGMxzLRmqA8r58TXcN3OvBrqNN63nUSG+Wb6XxmzLwE4PrlBy0fTRymG8WXrdE/Z2lglBhc8J3A8ER9c46s= + - OWNER=${TRAVIS_REPO_SLUG%/*} + # GITHUB_TOKEN + - secure: GtWCiMPNz3MDDTvXqaVsgZBvlxYRZuTY6sUEhWTL37zJZHgRLlxTCTD27hHZp2P4B6G6KGV/0iEvMYxqzo6jMrzcEnt7TlGetteqI18dhV1dIQGH6uy8Y/PktkK2g2FuJeGV3FRK4+a21v6zzSuUaFa26k97mPapa4LS83XXj7rc13ll23HhhtObVF/a1n3U0Xwe4FkdoxbKlecJUnNujESxFk4xQl4N1tFv15fldDlFq0XWs26eEp3LU/n7wkMzbg9WEqPvDeaYlvir9VpvcYWpVvf8Uz+wpvW1jnE2R6bK1TDv5BNzQwbf2JtN284yZ8I1nwZtOJkc1RUr5wUFxCD0p8tc30sAlKemI385v2t1ccoBYMwH8LHFIUoSXoolHZBsnZeADqpo3a0d8hVFWv4AcxC27Q6SfMCltl0+ogAoQ7wVfkRT++044p415Ar4raEqIkqTm64FaRNMS0v2y5mS2634PMSGMRnK+NBrWz1yFKxsiuPKypIWygtrJ4pyiL2yPBZupnCluEgqva3q6AmMDlNuSUmcTEnCRGB01U2if6/oSEgISH1VK2lsRSxuoG5dFFuezwv90YENh/7pw0/hgge7EOse6OzDsU3uNRWcTuXF7eEBhjM1wBHWHlaAwV9vfHMZX4sHWP4R4CZhPjVEPRz6HPEg1tFCs2EvkuI= + # DOCKER_USER + - secure: 0AUrT+5xg+JGjAeZQfeCyVFHYw04YDSzCpeQILtt7Ca00rNJ4yshvCx+zEsyh0aMEO6+fLmGo4KCWC1tHKqKdO9ByIBBO1vbsQ83kDqC7GR1xorK8abN708NArxLdqylPRrrYK9Gr0VLk8t17DIfQdoP2QryJ2mDdsthzpliZOJ7c58LxIkBlog0uLabrX/d/m8ZEpgpqQalCUBmImc31tKBDprl5CVLk8ONLRVwdQ8WcYQaTNpOiIfx0OWp5iX+P9gMUyBTw8aFMlmwfZpXbDyGlwckDdIkKfWTUPq8FXNPLjyPbJ2zraID/kkNEw6J+x91w/F7VydhOoU/Gc4IAlL6TKN5xQquSclMz05kBHthSzZf7g6KUuQ0TgzK46zArcV4ZEItLU1h2IcLsPLi3+/O6TtUdUSQOIaCQX6YbQsnGDgEMWjtfpNJKpTyd+7SR+BuoQmtihr+Utl8rfq7vFTzkz+AiCvNnGQJzQoZKs83hgC57BRSD+LIkI0BrZ54ijNYoKCqvKHqmamkrXQEdiImBlTg8NpmBNHJgHdL6PRqi3NxLzJzdqKtz5pkI4MVVLYFXsUApD0AWvEOPejLnfVEGber5cA/Hm3HhqB7M9ja2BFml+oYBLc3mnzjKd/FT3VwWMiijfTJVvr6feFrhSQGiRyBLmaoRNmUSjUsfR0= + # DOCKER_PASSWORD + - secure: S22ffhnZOs1yFdBwJO9+uzy9DB3e7ehLWbj47U1zavsMKMDyDw0lkOIy8PMERFh4roBoM5dG95RIVbfNbrPXQxnY5Og2w7RTTv3eeHGdYzh+34Dppfk8mhEAhn3NL98sOe6is/5sEDvZ2ykPFLvoJmyV15V7Wvtuy1Zx+0lyZ0R0tX6sVJUWDlClHspCuSIKK+iptL5yLu4TtvX9Wks/c3kH6GIXYIJIeC63D+RRhuetbtGKND/RtFkq5IDP9qMZNXUAT/Mb8hrsk9HntFgl79dG2ChvBpDE8/LqjYDBiFTiUAtJfBhC0pVB3WaEwGTU/hWe8WTjl29JIkGcoaeT7+wncJ72lEPJoO60YWSdtWfTlNlUiN27AcxGqk39MDhB5NAbuJpKvcFLMmWFY2uJefrR6XVEXBZ+9yAwzuZmj0GYFOQTuczAqncyj/3BuOEqfIkkQ5BLAS5BUuzSEbHOjwasqbTVcWM1H3cv2ZYATXQQN8KhcZ5c5lxy8eD0NXHKvBFlS3HOXOXn3P6PqGgFHzjL+yyHMvzIXBJY4jEr8FIH16dwbXDqb4gi4lrrCZHDeIhVKsmLSUJjhmiKeP7dWcfUOGMxzLRmqA8r58TXcN3OvBrqNN63nUSG+Wb6XxmzLwE4PrlBy0fTRymG8WXrdE/Z2lglBhc8J3A8ER9c46s= before_install: -- sudo apt-get -y install python3-pip python3-setuptools apache2-utils + - sudo apt-get -y install python3-pip python3-setuptools apache2-utils install: - sudo pip3 install -r requirements.txt -- sudo pip3 install ldap3 parameterized bcrypt -- sudo pip3 install requests --upgrade -- sudo pip3 install pyasn1 --upgrade -- cd web-src -- npm install -- npm update -- cd .. + - sudo pip3 install ldap3 parameterized bcrypt + - sudo pip3 install requests --upgrade + - sudo pip3 install pyasn1 --upgrade + - cd web-src + - npm install + - npm update + - cd .. before_script: -- cd src -- python3 -m unittest discover -s tests -p "*.py" -t . -- cd ../web-src -- npm run test:unit-ci -- cd .. + - cd src + - python3 -m unittest discover -s tests -p "*.py" -t . + - cd ../web-src + - npm run test:unit-ci + - cd .. script: -- python3 tools/build.py + - python3 tools/build.py before_deploy: -- if ! [ "$BEFORE_DEPLOY_RUN" ]; then - export BEFORE_DEPLOY_RUN=1; + - if ! [ "$BEFORE_DEPLOY_RUN" ]; then + export BEFORE_DEPLOY_RUN=1; - . tools/add_git_tag.sh + . tools/add_git_tag.sh - fi + fi deploy: -- provider: releases - name: dev - api_key: "$GITHUB_TOKEN" - file: build/script-server.zip - prerelease: true - overwrite: true - skip_cleanup: true - on: - branch: master -- provider: releases - name: $(unzip -qc build/script-server.zip version.txt) - api_key: "$GITHUB_TOKEN" - file: build/script-server.zip - skip_cleanup: true - on: - branch: stable -- provider: script - script: tools/deploy_docker.sh - skip_cleanup: true - on: - tags: false - all_branches: true - condition: $TRAVIS_BRANCH =~ ^stable|master$ + - provider: releases + name: dev + api_key: "$GITHUB_TOKEN" + file: build/script-server.zip + prerelease: true + overwrite: true + skip_cleanup: true + on: + branch: master + - provider: releases + name: $(unzip -qc build/script-server.zip version.txt) + api_key: "$GITHUB_TOKEN" + file: build/script-server.zip + skip_cleanup: true + on: + branch: stable + - provider: script + script: tools/deploy_docker.sh + skip_cleanup: true + on: + tags: false + all_branches: true + condition: $TRAVIS_BRANCH =~ ^stable|master$ From 872584b484cd4484776d1556838f23b1bd2ff68b Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 16:10:31 +0100 Subject: [PATCH 050/398] changed dependency install order --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 04f08aa6..87b25bb8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,9 +22,9 @@ before_install: - sudo apt-get -y install python3-pip python3-setuptools apache2-utils install: - sudo pip3 install -r requirements.txt + - sudo pip3 install pyasn1 --upgrade - sudo pip3 install ldap3 parameterized bcrypt - sudo pip3 install requests --upgrade - - sudo pip3 install pyasn1 --upgrade - cd web-src - npm install - npm update From 3d45c88ee39ee0d8f50e9561f52a51a2edc383c0 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 16:14:49 +0100 Subject: [PATCH 051/398] reformatted vue files according to latest WebStorm style --- .../history/AdminExecutionsLogPage.vue | 56 +- .../scripts-config/ParamListItem.vue | 106 +-- .../scripts-config/ParameterConfigForm.vue | 582 ++++++++--------- .../scripts-config/ScriptConfigForm.vue | 376 +++++------ .../scripts-config/ScriptConfigListPage.vue | 14 +- .../scripts-config/ScriptParamList.vue | 282 ++++---- .../components/scripts-config/ScriptsList.vue | 98 +-- web-src/src/common/components/ChipsList.vue | 140 ++-- .../src/common/components/PageProgress.vue | 34 +- web-src/src/common/components/TextArea.vue | 162 ++--- web-src/src/common/components/checkbox.vue | 82 +-- web-src/src/common/components/file_upload.vue | 138 ++-- .../components/history/execution-details.vue | 238 +++---- .../history/executions-log-page.vue | 34 +- .../history/executions-log-table.vue | 94 +-- .../components/history/executions-log.vue | 92 +-- .../common/components/inputs/DatePicker.vue | 108 ++-- .../common/components/inputs/TimePicker.vue | 156 ++--- web-src/src/common/components/log_panel.vue | 312 ++++----- .../src/common/components/readonly-field.vue | 40 +- .../common/components/server_file_field.vue | 220 +++---- web-src/src/common/components/textfield.vue | 494 +++++++------- web-src/src/main-app/MainApp.vue | 112 ++-- .../main-app/components/AppWelcomePanel.vue | 64 +- .../components/DocumentTitleManager.vue | 76 +-- .../main-app/components/FaviconManager.vue | 62 +- .../main-app/components/MainAppSidebar.vue | 216 +++---- .../src/main-app/components/SearchPanel.vue | 128 ++-- .../components/history/AppHistoryHeader.vue | 66 +- .../components/history/AppHistoryPanel.vue | 46 +- .../components/schedule/SchedulePanel.vue | 612 +++++++++--------- .../components/scripts/MainAppContent.vue | 88 +-- .../components/scripts/ScheduleButton.vue | 120 ++-- .../components/scripts/ScriptHeader.vue | 76 +-- .../components/scripts/ScriptListGroup.vue | 62 +- .../components/scripts/ScriptListItem.vue | 166 ++--- .../components/scripts/ScriptLoadingText.vue | 172 ++--- .../components/scripts/ScriptsList.vue | 164 ++--- .../scripts/script-parameters-view.vue | 178 ++--- 39 files changed, 3133 insertions(+), 3133 deletions(-) diff --git a/web-src/src/admin/components/history/AdminExecutionsLogPage.vue b/web-src/src/admin/components/history/AdminExecutionsLogPage.vue index f20eefd0..2abf0c0c 100644 --- a/web-src/src/admin/components/history/AdminExecutionsLogPage.vue +++ b/web-src/src/admin/components/history/AdminExecutionsLogPage.vue @@ -1,37 +1,37 @@ \ No newline at end of file diff --git a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue index 1ec412cb..419e5f64 100644 --- a/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue +++ b/web-src/src/admin/components/scripts-config/ParameterConfigForm.vue @@ -1,73 +1,73 @@ \ No newline at end of file diff --git a/web-src/src/common/components/history/execution-details.vue b/web-src/src/common/components/history/execution-details.vue index 5da37215..2a53e077 100644 --- a/web-src/src/common/components/history/execution-details.vue +++ b/web-src/src/common/components/history/execution-details.vue @@ -1,129 +1,129 @@ \ No newline at end of file diff --git a/web-src/src/common/components/history/executions-log-page.vue b/web-src/src/common/components/history/executions-log-page.vue index 7bc791f8..a396efeb 100644 --- a/web-src/src/common/components/history/executions-log-page.vue +++ b/web-src/src/common/components/history/executions-log-page.vue @@ -1,27 +1,27 @@ diff --git a/web-src/src/common/components/history/executions-log-table.vue b/web-src/src/common/components/history/executions-log-table.vue index 437ed2f3..d4c04f7c 100644 --- a/web-src/src/common/components/history/executions-log-table.vue +++ b/web-src/src/common/components/history/executions-log-table.vue @@ -1,27 +1,27 @@ \ No newline at end of file diff --git a/web-src/src/common/components/history/executions-log.vue b/web-src/src/common/components/history/executions-log.vue index cd791451..fef05f76 100644 --- a/web-src/src/common/components/history/executions-log.vue +++ b/web-src/src/common/components/history/executions-log.vue @@ -1,64 +1,64 @@ \ No newline at end of file diff --git a/web-src/src/common/components/inputs/DatePicker.vue b/web-src/src/common/components/inputs/DatePicker.vue index f7b7cb00..a256f4d5 100644 --- a/web-src/src/common/components/inputs/DatePicker.vue +++ b/web-src/src/common/components/inputs/DatePicker.vue @@ -1,12 +1,12 @@ \ No newline at end of file diff --git a/web-src/src/common/components/inputs/TimePicker.vue b/web-src/src/common/components/inputs/TimePicker.vue index 5c300b0a..569fbd79 100644 --- a/web-src/src/common/components/inputs/TimePicker.vue +++ b/web-src/src/common/components/inputs/TimePicker.vue @@ -1,94 +1,94 @@ diff --git a/web-src/src/common/components/readonly-field.vue b/web-src/src/common/components/readonly-field.vue index 46232b0a..ee631b34 100644 --- a/web-src/src/common/components/readonly-field.vue +++ b/web-src/src/common/components/readonly-field.vue @@ -1,30 +1,30 @@ \ No newline at end of file diff --git a/web-src/src/common/components/server_file_field.vue b/web-src/src/common/components/server_file_field.vue index 0f990439..8b63872d 100644 --- a/web-src/src/common/components/server_file_field.vue +++ b/web-src/src/common/components/server_file_field.vue @@ -1,30 +1,30 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/AppWelcomePanel.vue b/web-src/src/main-app/components/AppWelcomePanel.vue index e250eca8..29ffb640 100644 --- a/web-src/src/main-app/components/AppWelcomePanel.vue +++ b/web-src/src/main-app/components/AppWelcomePanel.vue @@ -1,13 +1,13 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/DocumentTitleManager.vue b/web-src/src/main-app/components/DocumentTitleManager.vue index ea5052ed..407bd68e 100644 --- a/web-src/src/main-app/components/DocumentTitleManager.vue +++ b/web-src/src/main-app/components/DocumentTitleManager.vue @@ -3,45 +3,45 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/SearchPanel.vue b/web-src/src/main-app/components/SearchPanel.vue index 6127fdd8..4ebcdb48 100644 --- a/web-src/src/main-app/components/SearchPanel.vue +++ b/web-src/src/main-app/components/SearchPanel.vue @@ -1,20 +1,20 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/history/AppHistoryHeader.vue b/web-src/src/main-app/components/history/AppHistoryHeader.vue index e7f7275f..47e8b63e 100644 --- a/web-src/src/main-app/components/history/AppHistoryHeader.vue +++ b/web-src/src/main-app/components/history/AppHistoryHeader.vue @@ -1,12 +1,12 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/history/AppHistoryPanel.vue b/web-src/src/main-app/components/history/AppHistoryPanel.vue index f1ce8f79..8246eb16 100644 --- a/web-src/src/main-app/components/history/AppHistoryPanel.vue +++ b/web-src/src/main-app/components/history/AppHistoryPanel.vue @@ -1,7 +1,7 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index 613a9c58..6f87efaf 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -1,68 +1,68 @@ - - \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/MainAppContent.vue b/web-src/src/main-app/components/scripts/MainAppContent.vue index 43ee0380..38d5f8e4 100644 --- a/web-src/src/main-app/components/scripts/MainAppContent.vue +++ b/web-src/src/main-app/components/scripts/MainAppContent.vue @@ -1,21 +1,21 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScheduleButton.vue b/web-src/src/main-app/components/scripts/ScheduleButton.vue index 3d68fbd1..9ea648ba 100644 --- a/web-src/src/main-app/components/scripts/ScheduleButton.vue +++ b/web-src/src/main-app/components/scripts/ScheduleButton.vue @@ -1,74 +1,74 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptHeader.vue b/web-src/src/main-app/components/scripts/ScriptHeader.vue index 89fa14ac..e65294fb 100644 --- a/web-src/src/main-app/components/scripts/ScriptHeader.vue +++ b/web-src/src/main-app/components/scripts/ScriptHeader.vue @@ -1,48 +1,48 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptListGroup.vue b/web-src/src/main-app/components/scripts/ScriptListGroup.vue index 7801f1cc..ab9e5283 100644 --- a/web-src/src/main-app/components/scripts/ScriptListGroup.vue +++ b/web-src/src/main-app/components/scripts/ScriptListGroup.vue @@ -1,18 +1,18 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptLoadingText.vue b/web-src/src/main-app/components/scripts/ScriptLoadingText.vue index 74d2c9c6..d2cda16d 100644 --- a/web-src/src/main-app/components/scripts/ScriptLoadingText.vue +++ b/web-src/src/main-app/components/scripts/ScriptLoadingText.vue @@ -1,7 +1,7 @@ diff --git a/web-src/src/main-app/components/scripts/ScriptsList.vue b/web-src/src/main-app/components/scripts/ScriptsList.vue index 698e0c74..f283a29f 100644 --- a/web-src/src/main-app/components/scripts/ScriptsList.vue +++ b/web-src/src/main-app/components/scripts/ScriptsList.vue @@ -1,104 +1,104 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/script-parameters-view.vue b/web-src/src/main-app/components/scripts/script-parameters-view.vue index 9aed7a4d..5e7c56b1 100644 --- a/web-src/src/main-app/components/scripts/script-parameters-view.vue +++ b/web-src/src/main-app/components/scripts/script-parameters-view.vue @@ -1,16 +1,16 @@ From 2fec7cc32dfca61ade3fc4efe88f46ee8f97b595 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 16:20:57 +0100 Subject: [PATCH 052/398] added verbose logging to track materialize-css issue --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 87b25bb8..b661c3e3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ install: - sudo pip3 install ldap3 parameterized bcrypt - sudo pip3 install requests --upgrade - cd web-src - - npm install + - npm install --loglevel verbose - npm update - cd .. before_script: From 6e320f8e04a4beb40c135827baeb88094d0db554 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 16:30:22 +0100 Subject: [PATCH 053/398] added debug printing for package-lock after install --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b661c3e3..dcc42a7f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,8 +26,8 @@ install: - sudo pip3 install ldap3 parameterized bcrypt - sudo pip3 install requests --upgrade - cd web-src - - npm install --loglevel verbose - - npm update + - npm install + - cat package-lock.json - cd .. before_script: - cd src From df0f628668a2490db80638a6a4cf421b495fcc4f Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 18:06:16 +0100 Subject: [PATCH 054/398] fixed wrong github dependency link --- .travis.yml | 1 - web-src/package.json | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index dcc42a7f..d31ea1a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,7 +27,6 @@ install: - sudo pip3 install requests --upgrade - cd web-src - npm install - - cat package-lock.json - cd .. before_script: - cd src diff --git a/web-src/package.json b/web-src/package.json index 89c8e725..baac3502 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -7,7 +7,7 @@ "core-js": "^3.6.4", "marked": "^0.7.0", "material-design-icons": "^3.0.1", - "materialize-css": "git://github.com/bugy/materialize.git#bugy-stable", + "materialize-css": "git+https://github.com/bugy/materialize.git#bugy-stable", "typeface-roboto": "0.0.75", "vue": "^2.6.11", "vue-router": "^3.1.5", From 643b052ccf4159695652c23780e62d04e236f6ae Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 18:24:43 +0100 Subject: [PATCH 055/398] moved to latest nodejs version and specific materializecss commit --- .travis.yml | 4 ++-- web-src/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index d31ea1a8..569dd3a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,9 @@ if: tag IS blank sudo: required -dist: xenial +dist: bionic language: node_js node_js: - - '11' + - '15' cache: directories: - web-src/node_modules diff --git a/web-src/package.json b/web-src/package.json index baac3502..7344dd0d 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -7,7 +7,7 @@ "core-js": "^3.6.4", "marked": "^0.7.0", "material-design-icons": "^3.0.1", - "materialize-css": "git+https://github.com/bugy/materialize.git#bugy-stable", + "materialize-css": "git+https://github.com/bugy/materialize.git#91c2699", "typeface-roboto": "0.0.75", "vue": "^2.6.11", "vue-router": "^3.1.5", From abed7796e74f0454d8bbddf4be7236ed14963e1d Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 18:29:48 +0100 Subject: [PATCH 056/398] switched back to branch-dependency --- web-src/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web-src/package.json b/web-src/package.json index 7344dd0d..baac3502 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -7,7 +7,7 @@ "core-js": "^3.6.4", "marked": "^0.7.0", "material-design-icons": "^3.0.1", - "materialize-css": "git+https://github.com/bugy/materialize.git#91c2699", + "materialize-css": "git+https://github.com/bugy/materialize.git#bugy-stable", "typeface-roboto": "0.0.75", "vue": "^2.6.11", "vue-router": "^3.1.5", From 5cc739a8c5bffd78097f2a66c02ee5fa47875bf9 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Dec 2020 18:34:38 +0100 Subject: [PATCH 057/398] switched back to commit-dependency --- web-src/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web-src/package.json b/web-src/package.json index baac3502..7344dd0d 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -7,7 +7,7 @@ "core-js": "^3.6.4", "marked": "^0.7.0", "material-design-icons": "^3.0.1", - "materialize-css": "git+https://github.com/bugy/materialize.git#bugy-stable", + "materialize-css": "git+https://github.com/bugy/materialize.git#91c2699", "typeface-roboto": "0.0.75", "vue": "^2.6.11", "vue-router": "^3.1.5", From 20cf080be609df19d30d3400c80c08c1e7876660 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 26 Dec 2020 16:02:24 +0100 Subject: [PATCH 058/398] #350 added possibility to overwrite script server title using conf.json --- .../main-app/components/MainAppSidebar.vue | 33 ++++++- .../src/main-app/components/SearchPanel.vue | 2 + .../components/MainAppSidebar_test.js | 86 +++++++++++++++++++ 3 files changed, 117 insertions(+), 4 deletions(-) create mode 100644 web-src/tests/unit/main-app/components/MainAppSidebar_test.js diff --git a/web-src/src/main-app/components/MainAppSidebar.vue b/web-src/src/main-app/components/MainAppSidebar.vue index f9b5a274..522e1d6e 100644 --- a/web-src/src/main-app/components/MainAppSidebar.vue +++ b/web-src/src/main-app/components/MainAppSidebar.vue @@ -1,8 +1,14 @@ +
    @@ -11,11 +12,12 @@ import {isBlankString, isEmptyString, isNull, removeElement} from '@/common/utils/common'; import {mapState} from 'vuex'; import ScriptListGroup from './ScriptListGroup'; +import ScriptFailedGroup from './ScriptFailedGroup'; import ScriptListItem from './ScriptListItem'; export default { name: 'ScriptsList', - components: {ScriptListGroup, ScriptListItem}, + components: {ScriptListGroup, ScriptListItem, ScriptFailedGroup}, props: { searchText: { type: String, @@ -30,7 +32,7 @@ export default { }, computed: { - ...mapState('scripts', ['scripts', 'selectedScript']), + ...mapState('scripts', ['scripts', 'selectedScript', 'failedScripts']), items() { let groups = this.scripts.filter(script => !isBlankString(script.group)) @@ -55,6 +57,12 @@ export default { result.sort((o1, o2) => o1.name.toLowerCase().localeCompare(o2.name.toLowerCase())); return result; + }, + + failedItems() { + const groupName = 'Failed to parse' + const result = {name: groupName, isGroup: true, scripts: [...this.failedScripts], isActive: this.activeGroup == groupName} + return result } }, methods: { diff --git a/web-src/src/main-app/store/scripts.js b/web-src/src/main-app/store/scripts.js index d3a8ac2f..3c8061f7 100644 --- a/web-src/src/main-app/store/scripts.js +++ b/web-src/src/main-app/store/scripts.js @@ -9,6 +9,7 @@ export default { state: { scripts: [], + failedScripts: [], selectedScript: null, predefinedParameters: null }, @@ -21,12 +22,14 @@ export default { axiosInstance.get('scripts') .then(({data}) => { - const {scripts} = data; + const scripts = data.scripts; + const failedScripts = data.failed; scripts.sort(function (script1, script2) { return script1.name.toLowerCase().localeCompare(script2.name.toLowerCase()); }); commit('SET_SCRIPTS', scripts); + commit('SET_FAILED_SCRIPTS', failedScripts); dispatch('selectScriptByHash'); }); }, @@ -73,6 +76,10 @@ export default { state.scripts = scripts }, + SET_FAILED_SCRIPTS(state, failedScripts) { + state.failedScripts = failedScripts + }, + SELECT_SCRIPT(state, {selectedScript, predefinedParameters}) { state.selectedScript = selectedScript; state.predefinedParameters = predefinedParameters; From fa4672ad898c71e5c8fd1429f9275b8c32feab4f Mon Sep 17 00:00:00 2001 From: RollingHog <46373241+RollingHog@users.noreply.github.com> Date: Fri, 14 Oct 2022 05:05:21 +0300 Subject: [PATCH 208/398] a lil clarification in group name --- web-src/src/main-app/components/scripts/ScriptsList.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web-src/src/main-app/components/scripts/ScriptsList.vue b/web-src/src/main-app/components/scripts/ScriptsList.vue index 6fd087a8..5bf762de 100644 --- a/web-src/src/main-app/components/scripts/ScriptsList.vue +++ b/web-src/src/main-app/components/scripts/ScriptsList.vue @@ -60,7 +60,7 @@ export default { }, failedItems() { - const groupName = 'Failed to parse' + const groupName = 'Failed to parse (these files have invalid JSON structure)' const result = {name: groupName, isGroup: true, scripts: [...this.failedScripts], isActive: this.activeGroup == groupName} return result } From 0c0fbda1f23d2eea8768034d4c7b02468f78807f Mon Sep 17 00:00:00 2001 From: Gurpinder Singh Date: Sat, 15 Oct 2022 11:16:15 +0530 Subject: [PATCH 209/398] #588 load_runner_files now returns [] in case of non-exisitent runners folder --- src/migrations/migrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/migrations/migrate.py b/src/migrations/migrate.py index 1af55046..5d600192 100644 --- a/src/migrations/migrate.py +++ b/src/migrations/migrate.py @@ -336,7 +336,7 @@ def _load_runner_files(conf_folder): runners_folder = os.path.join(conf_folder, 'runners') if not os.path.exists(runners_folder): - return + return [] conf_files = [os.path.join(runners_folder, file) for file in os.listdir(runners_folder) From 0cf987329fcab7aaefe6e018d2bd7d0b90d28a3c Mon Sep 17 00:00:00 2001 From: RollingHog <46373241+RollingHog@users.noreply.github.com> Date: Sun, 16 Oct 2022 01:42:24 +0300 Subject: [PATCH 210/398] added parsing_failed to ShortConfig type (.py files only) --- src/config/config_service.py | 12 +++++++----- src/model/script_config.py | 7 +------ src/web/server.py | 11 ++--------- 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/src/config/config_service.py b/src/config/config_service.py index 76edec32..5f7ff8b5 100644 --- a/src/config/config_service.py +++ b/src/config/config_service.py @@ -9,8 +9,7 @@ from config.exceptions import InvalidConfigException from model import script_config from model.model_helper import InvalidFileException -from model.script_config import get_sorted_config, GetShortConfigFailedError -from src.model.script_config import ShortConfig +from model.script_config import get_sorted_config, ShortConfig from utils import os_utils, file_utils, process_utils, custom_json, custom_yaml from utils.file_utils import to_filename from utils.string_utils import is_blank, strip @@ -177,7 +176,7 @@ def list_configs(self, user, mode=None): conf_service = self - def load_script(path, content) -> Union[ShortConfig, GetShortConfigFailedError]: + def load_script(path, content) -> ShortConfig: try: config_object = self.load_config_file(path, content) short_config = script_config.read_short(path, config_object) @@ -194,7 +193,10 @@ def load_script(path, content) -> Union[ShortConfig, GetShortConfigFailedError]: return short_config except: LOGGER.exception('Could not load script: ' + path) - return GetShortConfigFailedError(path) + failed_short_config = ShortConfig() + failed_short_config.name = path + failed_short_config.parsing_failed = True + return failed_short_config return self._visit_script_configs(load_script) @@ -211,7 +213,7 @@ def load_config_model(self, name, user, parameter_values=None, skip_invalid_para return self._load_script_config(path, config_object, user, parameter_values, skip_invalid_parameters) - def _visit_script_configs(self, visitor) -> List[ Union[ShortConfig, GetShortConfigFailedError] ]: + def _visit_script_configs(self, visitor) -> List[ ShortConfig ]: configs_dir = self._script_configs_folder files = os.listdir(configs_dir) diff --git a/src/model/script_config.py b/src/model/script_config.py index dac281da..f1ff0773 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -27,12 +27,7 @@ def __init__(self): self.allowed_users = [] self.admin_users = [] self.group = None - -class GetShortConfigFailedError(object): - def __init__(self, path): - self.path = path - # self.group = 'Parsing failed' - self.cannotParse = True + self.parsing_failed = False @observable_fields( 'script_command', diff --git a/src/web/server.py b/src/web/server.py index 3d2d68d7..b052fa3c 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -137,16 +137,9 @@ def get(self, user): configs = self.application.config_service.list_configs(user, mode) - scripts = [] - failed = [] + scripts = [{'name': conf.name, 'group': conf.group} for conf in configs] - for conf in configs: - if hasattr(conf, 'cannotParse'): - failed.append({'path': conf.path }) - else: - scripts.append({'name': conf.name, 'group': conf.group }) - - self.write(json.dumps({'scripts': scripts, 'failed': failed})) + self.write(json.dumps({'scripts': scripts})) class AdminUpdateScriptEndpoint(BaseRequestHandler): From 1e406ea19dfd098ce7d7910e6bb4ad4be993df0b Mon Sep 17 00:00:00 2001 From: RollingHog <46373241+RollingHog@users.noreply.github.com> Date: Sun, 16 Oct 2022 02:03:02 +0300 Subject: [PATCH 211/398] reverting vue changes --- .../components/scripts/ScriptFailedGroup.vue | 44 ------------------- .../components/scripts/ScriptListGroup.css | 20 --------- .../components/scripts/ScriptListGroup.vue | 20 ++++++++- .../components/scripts/ScriptsList.vue | 12 +---- web-src/src/main-app/store/scripts.js | 8 +--- 5 files changed, 22 insertions(+), 82 deletions(-) delete mode 100644 web-src/src/main-app/components/scripts/ScriptFailedGroup.vue delete mode 100644 web-src/src/main-app/components/scripts/ScriptListGroup.css diff --git a/web-src/src/main-app/components/scripts/ScriptFailedGroup.vue b/web-src/src/main-app/components/scripts/ScriptFailedGroup.vue deleted file mode 100644 index 171519fb..00000000 --- a/web-src/src/main-app/components/scripts/ScriptFailedGroup.vue +++ /dev/null @@ -1,44 +0,0 @@ - - - - - \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptListGroup.css b/web-src/src/main-app/components/scripts/ScriptListGroup.css deleted file mode 100644 index f9395280..00000000 --- a/web-src/src/main-app/components/scripts/ScriptListGroup.css +++ /dev/null @@ -1,20 +0,0 @@ -.script-list-group .collection-item.script-group { - border: none; - display: flex; - flex-direction: row; - padding-right: 16px; - align-items: center; -} - -.script-list-group .collection-item.script-group span { - flex: 1 1 auto; -} - -.script-list-group .collection-item.script-group i { - flex: 0 0 auto; - line-height: 16px; -} - -.script-list-group .collection-item.script-list-item { - padding-left: 36px; -} \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptListGroup.vue b/web-src/src/main-app/components/scripts/ScriptListGroup.vue index 8f9013c7..3d1711b9 100644 --- a/web-src/src/main-app/components/scripts/ScriptListGroup.vue +++ b/web-src/src/main-app/components/scripts/ScriptListGroup.vue @@ -30,5 +30,23 @@ export default { \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptsList.vue b/web-src/src/main-app/components/scripts/ScriptsList.vue index 5bf762de..6cbd8c68 100644 --- a/web-src/src/main-app/components/scripts/ScriptsList.vue +++ b/web-src/src/main-app/components/scripts/ScriptsList.vue @@ -4,7 +4,6 @@ - @@ -12,12 +11,11 @@ import {isBlankString, isEmptyString, isNull, removeElement} from '@/common/utils/common'; import {mapState} from 'vuex'; import ScriptListGroup from './ScriptListGroup'; -import ScriptFailedGroup from './ScriptFailedGroup'; import ScriptListItem from './ScriptListItem'; export default { name: 'ScriptsList', - components: {ScriptListGroup, ScriptListItem, ScriptFailedGroup}, + components: {ScriptListGroup, ScriptListItem}, props: { searchText: { type: String, @@ -32,7 +30,7 @@ export default { }, computed: { - ...mapState('scripts', ['scripts', 'selectedScript', 'failedScripts']), + ...mapState('scripts', ['scripts', 'selectedScript']), items() { let groups = this.scripts.filter(script => !isBlankString(script.group)) @@ -58,12 +56,6 @@ export default { return result; }, - - failedItems() { - const groupName = 'Failed to parse (these files have invalid JSON structure)' - const result = {name: groupName, isGroup: true, scripts: [...this.failedScripts], isActive: this.activeGroup == groupName} - return result - } }, methods: { groupClicked(groupName) { diff --git a/web-src/src/main-app/store/scripts.js b/web-src/src/main-app/store/scripts.js index 3c8061f7..4e92c997 100644 --- a/web-src/src/main-app/store/scripts.js +++ b/web-src/src/main-app/store/scripts.js @@ -22,14 +22,12 @@ export default { axiosInstance.get('scripts') .then(({data}) => { - const scripts = data.scripts; - const failedScripts = data.failed; + const {scripts} = data; scripts.sort(function (script1, script2) { return script1.name.toLowerCase().localeCompare(script2.name.toLowerCase()); }); commit('SET_SCRIPTS', scripts); - commit('SET_FAILED_SCRIPTS', failedScripts); dispatch('selectScriptByHash'); }); }, @@ -76,10 +74,6 @@ export default { state.scripts = scripts }, - SET_FAILED_SCRIPTS(state, failedScripts) { - state.failedScripts = failedScripts - }, - SELECT_SCRIPT(state, {selectedScript, predefinedParameters}) { state.selectedScript = selectedScript; state.predefinedParameters = predefinedParameters; From 56d0b8e2a36358de519d191ce7f913d0b26c78a6 Mon Sep 17 00:00:00 2001 From: RollingHog <46373241+RollingHog@users.noreply.github.com> Date: Sun, 16 Oct 2022 02:05:55 +0300 Subject: [PATCH 212/398] additional Vue reverts --- .../components/scripts/ScriptListGroup.vue | 37 ++++++++++--------- .../components/scripts/ScriptsList.vue | 2 +- web-src/src/main-app/store/scripts.js | 1 - 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/web-src/src/main-app/components/scripts/ScriptListGroup.vue b/web-src/src/main-app/components/scripts/ScriptListGroup.vue index 3d1711b9..ab9e5283 100644 --- a/web-src/src/main-app/components/scripts/ScriptListGroup.vue +++ b/web-src/src/main-app/components/scripts/ScriptListGroup.vue @@ -31,22 +31,25 @@ export default { \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ScriptsList.vue b/web-src/src/main-app/components/scripts/ScriptsList.vue index 6cbd8c68..f283a29f 100644 --- a/web-src/src/main-app/components/scripts/ScriptsList.vue +++ b/web-src/src/main-app/components/scripts/ScriptsList.vue @@ -55,7 +55,7 @@ export default { result.sort((o1, o2) => o1.name.toLowerCase().localeCompare(o2.name.toLowerCase())); return result; - }, + } }, methods: { groupClicked(groupName) { diff --git a/web-src/src/main-app/store/scripts.js b/web-src/src/main-app/store/scripts.js index 4e92c997..d3a8ac2f 100644 --- a/web-src/src/main-app/store/scripts.js +++ b/web-src/src/main-app/store/scripts.js @@ -9,7 +9,6 @@ export default { state: { scripts: [], - failedScripts: [], selectedScript: null, predefinedParameters: null }, From 62df42980a8a4f8ec58ac95432a472899a7e6821 Mon Sep 17 00:00:00 2001 From: RollingHog <46373241+RollingHog@users.noreply.github.com> Date: Mon, 17 Oct 2022 03:49:02 +0300 Subject: [PATCH 213/398] server.py parsing_failed parameter transmission fixed --- src/web/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/web/server.py b/src/web/server.py index b052fa3c..60684f9e 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -137,7 +137,7 @@ def get(self, user): configs = self.application.config_service.list_configs(user, mode) - scripts = [{'name': conf.name, 'group': conf.group} for conf in configs] + scripts = [{'name': conf.name, 'group': conf.group, 'parsing_failed': conf.parsing_failed } for conf in configs] self.write(json.dumps({'scripts': scripts})) From a33f79ee7c373d1604f6b9aa6c33b95e38feb69e Mon Sep 17 00:00:00 2001 From: RollingHog <46373241+RollingHog@users.noreply.github.com> Date: Mon, 17 Oct 2022 03:50:21 +0300 Subject: [PATCH 214/398] bad script red highlight added --- .../src/main-app/components/scripts/ScriptListItem.vue | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/web-src/src/main-app/components/scripts/ScriptListItem.vue b/web-src/src/main-app/components/scripts/ScriptListItem.vue index 3c653da1..088d1275 100644 --- a/web-src/src/main-app/components/scripts/ScriptListItem.vue +++ b/web-src/src/main-app/components/scripts/ScriptListItem.vue @@ -1,7 +1,7 @@ @@ -95,10 +140,12 @@ export default { return { oneTimeSchedule: true, + endOption: 'never', startDate: now, startTime: now.toTimeString().substr(0, 5), id: null, repeatPeriod: 1, + executeCount: 1, repeatTimeUnit: 'days', weekDays: [ {'day': 'Monday', active: currentDay === 1}, From 8a3c7bb3e51e36e644cb05f00f2e2febe7f3df86 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 15:58:55 +0200 Subject: [PATCH 303/398] Parse end args to the backend --- src/model/external_model.py | 2 ++ .../components/schedule/SchedulePanel.vue | 32 +++++++++++++++---- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/src/model/external_model.py b/src/model/external_model.py index 14597070..3e88e09f 100644 --- a/src/model/external_model.py +++ b/src/model/external_model.py @@ -129,6 +129,8 @@ def parse_external_schedule(external_schedule): return { 'repeatable': external_schedule.get('repeatable'), 'start_datetime': external_schedule.get('startDatetime'), + 'endOption': external_schedule.get('endOption'), + 'endArg': external_schedule.get('endArg'), 'repeat_unit': external_schedule.get('repeatUnit'), 'repeat_period': external_schedule.get('repeatPeriod'), 'weekdays': external_schedule.get('weekDays') diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index 5548a5d3..a7116677 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -72,10 +72,10 @@
    Ending - -
    @@ -138,11 +138,16 @@ export default { const now = new Date(); const currentDay = now.getDay(); + const endDay = new Date(now); + endDay.setDate(now.getDate() + 1); + return { oneTimeSchedule: true, - endOption: 'never', startDate: now, startTime: now.toTimeString().substr(0, 5), + endOption: 'never', + endDate: endDay, + endTime: endDay.toTimeString().substr(0, 5), id: null, repeatPeriod: 1, executeCount: 1, @@ -182,15 +187,28 @@ export default { buildScheduleSetup() { const startDatetime = new Date(this.startDate); - const [hours, minutes] = this.startTime.split(':') - startDatetime.setHours(parseInt(hours), parseInt(minutes), 0, 0) + const [hours, minutes] = this.startTime.split(':'); + startDatetime.setHours(parseInt(hours), parseInt(minutes), 0, 0); + + let endOption = this.endOption; + let endArg = null; + + if (this.endOption === 'after') { + endArg = this.executeCount; + } else if (this.endOption === 'on') { + const endDatetime = new Date(this.endDate); + const [hoursEnd, minutesEnd] = this.endTime.split(':'); + endDatetime.setHours(parseInt(hoursEnd), parseInt(minutesEnd), 0, 0); + endArg = endDatetime; + } - const weekDays = this.weekDays.filter(day => day.active) - .map(day => day.day); + const weekDays = this.weekDays.filter(day => day.active).map(day => day.day); return { repeatable: !this.oneTimeSchedule, startDatetime: startDatetime, + endOption: endOption, + endArg: endArg, repeatUnit: this.repeatTimeUnit, repeatPeriod: this.repeatPeriod, weekDays: weekDays From d7dbe125b5bc3b3852a71fbffdcfef63a0eff8da Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 16:20:09 +0200 Subject: [PATCH 304/398] Parse values to JSON --- src/scheduling/schedule_config.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 4dc94d36..6f6acfd8 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -7,12 +7,9 @@ ALLOWED_WEEKDAYS = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] -def _read_start_datetime(incoming_schedule_config): - start_datetime = model_helper.read_datetime_from_config('start_datetime', incoming_schedule_config) - if start_datetime is None: - raise InvalidScheduleException('start_datetime is required') - return start_datetime - +def _read_datetime(incoming_schedule_config, key): + datetime_value = model_helper.read_datetime_from_config(key, incoming_schedule_config) + return datetime_value def _read_repeat_unit(incoming_schedule_config): repeat_unit = incoming_schedule_config.get('repeat_unit') @@ -52,9 +49,11 @@ def read_weekdays(incoming_schedule_config): def read_schedule_config(incoming_schedule_config): repeatable = read_repeatable_flag(incoming_schedule_config) - start_datetime = _read_start_datetime(incoming_schedule_config) + start_datetime = _read_datetime(incoming_schedule_config, 'start_datetime') + endOption = incoming_schedule_config.get('endOption') + endArg = incoming_schedule_config.get('endArg') - prepared_schedule_config = ScheduleConfig(repeatable, start_datetime) + prepared_schedule_config = ScheduleConfig(repeatable, start_datetime, endOption, endArg) if repeatable: prepared_schedule_config.repeat_unit = _read_repeat_unit(incoming_schedule_config) prepared_schedule_config.repeat_period = _read_repeat_period(incoming_schedule_config) @@ -67,9 +66,11 @@ def read_schedule_config(incoming_schedule_config): class ScheduleConfig: - def __init__(self, repeatable, start_datetime) -> None: + def __init__(self, repeatable, start_datetime, endOption, endArg) -> None: self.repeatable = repeatable self.start_datetime = start_datetime # type: datetime + self.endOption = endOption + self.endArg = endArg self.repeat_unit = None self.repeat_period = None self.weekdays = None @@ -77,9 +78,15 @@ def __init__(self, repeatable, start_datetime) -> None: def as_serializable_dict(self): result = { 'repeatable': self.repeatable, - 'start_datetime': date_utils.to_iso_string(self.start_datetime) + 'start_datetime': date_utils.to_iso_string(self.start_datetime), + 'endOption': self.endOption } + if self.endOption == 'on': + result['endArg'] = date_utils.to_iso_string(date_utils.parse_iso_datetime(self.endArg)) + else: + result['endArg'] = self.endArg + if self.repeat_unit is not None: result['repeat_unit'] = self.repeat_unit From 3519f370d81a80d9f350158f3fb7751073cbadc4 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 16:43:17 +0200 Subject: [PATCH 305/398] Input validation and endDate parsing --- src/scheduling/schedule_config.py | 5 ++++- src/scheduling/schedule_service.py | 9 +++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 6f6acfd8..8e753e5b 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -53,6 +53,9 @@ def read_schedule_config(incoming_schedule_config): endOption = incoming_schedule_config.get('endOption') endArg = incoming_schedule_config.get('endArg') + if endOption == 'on': + endArg = _read_datetime(incoming_schedule_config, 'endArg') + prepared_schedule_config = ScheduleConfig(repeatable, start_datetime, endOption, endArg) if repeatable: prepared_schedule_config.repeat_unit = _read_repeat_unit(incoming_schedule_config) @@ -83,7 +86,7 @@ def as_serializable_dict(self): } if self.endOption == 'on': - result['endArg'] = date_utils.to_iso_string(date_utils.parse_iso_datetime(self.endArg)) + result['endArg'] = date_utils.to_iso_string(self.endArg) else: result['endArg'] = self.endArg diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index b7a26a52..9c91727f 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -83,6 +83,15 @@ def create_job(self, script_name, parameter_values, incoming_schedule_config, us if not schedule_config.repeatable and date_utils.is_past(schedule_config.start_datetime): raise InvalidScheduleException('Start date should be in the future') + if schedule_config.endOption == 'on': + if schedule_config.start_datetime > schedule_config.endArg: + raise InvalidScheduleException('End date should be after start date') + if date_utils.is_past(schedule_config.endArg): + raise InvalidScheduleException('End date should be in the future') + + if schedule_config.endOption == 'after' and schedule_config.endArg <= 0: + raise InvalidScheduleException('Count should be greater than 0!') + id = self._id_generator.next_id() normalized_values = {} From 647779bf4f750e469f1526597855d5beb3dc6933 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 17:07:23 +0200 Subject: [PATCH 306/398] Refactor and optimize building of JSON's to only include needed data --- src/scheduling/schedule_config.py | 28 +++++++++++++++++----------- src/scheduling/schedule_service.py | 8 +++++++- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 8e753e5b..be799ad2 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -50,14 +50,19 @@ def read_weekdays(incoming_schedule_config): def read_schedule_config(incoming_schedule_config): repeatable = read_repeatable_flag(incoming_schedule_config) start_datetime = _read_datetime(incoming_schedule_config, 'start_datetime') - endOption = incoming_schedule_config.get('endOption') - endArg = incoming_schedule_config.get('endArg') - if endOption == 'on': - endArg = _read_datetime(incoming_schedule_config, 'endArg') - - prepared_schedule_config = ScheduleConfig(repeatable, start_datetime, endOption, endArg) + prepared_schedule_config = ScheduleConfig(repeatable, start_datetime) if repeatable: + + endOption = incoming_schedule_config.get('endOption') + prepared_schedule_config.endOption = endOption + if endOption == 'on': + prepared_schedule_config.endArg = _read_datetime(incoming_schedule_config, 'endArg') + elif endOption == 'after': + prepared_schedule_config.endArg = model_helper.read_int_from_config('endArg', incoming_schedule_config) + else: + prepared_schedule_config.endOption = 'never' + prepared_schedule_config.repeat_unit = _read_repeat_unit(incoming_schedule_config) prepared_schedule_config.repeat_period = _read_repeat_period(incoming_schedule_config) @@ -69,11 +74,11 @@ def read_schedule_config(incoming_schedule_config): class ScheduleConfig: - def __init__(self, repeatable, start_datetime, endOption, endArg) -> None: + def __init__(self, repeatable, start_datetime) -> None: self.repeatable = repeatable self.start_datetime = start_datetime # type: datetime - self.endOption = endOption - self.endArg = endArg + self.endOption = None + self.endArg = None self.repeat_unit = None self.repeat_period = None self.weekdays = None @@ -82,12 +87,13 @@ def as_serializable_dict(self): result = { 'repeatable': self.repeatable, 'start_datetime': date_utils.to_iso_string(self.start_datetime), - 'endOption': self.endOption } if self.endOption == 'on': + result['endOption'] = self.endOption result['endArg'] = date_utils.to_iso_string(self.endArg) - else: + elif self.endOption == 'after': + result['endOption'] = self.endOption result['endArg'] = self.endArg if self.repeat_unit is not None: diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 9c91727f..27b657db 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -122,7 +122,13 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - + + if schedule.endOption == 'on' and date_utils.is_past(schedule.endArg): + return + + if schedule.endOption == 'after' and schedule.endArg <= 0: + return + next_datetime = schedule.get_next_time() LOGGER.info( 'Scheduling ' + job.get_log_name() + ' at ' + next_datetime.astimezone(tz=None).strftime('%H:%M, %d %B %Y')) From db4aa2d9a6105856f7f5cea02f16e534a1e6f393 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 17:21:55 +0200 Subject: [PATCH 307/398] Prevent execute after end time --- src/scheduling/schedule_service.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 27b657db..3166d3b1 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -123,13 +123,14 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - if schedule.endOption == 'on' and date_utils.is_past(schedule.endArg): - return - if schedule.endOption == 'after' and schedule.endArg <= 0: return next_datetime = schedule.get_next_time() + + if schedule.endOption == 'on' and date_utils.is_past(schedule.endArg) or next_datetime < schedule.endArg: + return + LOGGER.info( 'Scheduling ' + job.get_log_name() + ' at ' + next_datetime.astimezone(tz=None).strftime('%H:%M, %d %B %Y')) From 5ada7fdb8edeaa597d14a499aea40b30edf0dbf9 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 20:30:52 +0200 Subject: [PATCH 308/398] Improve scheduling logic for count --- src/scheduling/schedule_service.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 3166d3b1..cabb3c42 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -123,13 +123,17 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - if schedule.endOption == 'after' and schedule.endArg <= 0: - return + if schedule.endOption == 'after': + if schedule.endArg > 1: + schedule.endArg -= 1 + else: + return next_datetime = schedule.get_next_time() - if schedule.endOption == 'on' and date_utils.is_past(schedule.endArg) or next_datetime < schedule.endArg: - return + if schedule.endOption == 'on': + if date_utils.is_past(schedule.endArg) or next_datetime < schedule.endArg: + return LOGGER.info( 'Scheduling ' + job.get_log_name() + ' at ' + next_datetime.astimezone(tz=None).strftime('%H:%M, %d %B %Y')) From a6562f72623666046573a1db537355d460d4dd67 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 20:42:49 +0200 Subject: [PATCH 309/398] Decrease count only on successful execute and edit json --- src/scheduling/schedule_service.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index cabb3c42..75dd75ab 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -123,11 +123,8 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - if schedule.endOption == 'after': - if schedule.endArg > 1: - schedule.endArg -= 1 - else: - return + if schedule.endOption == 'after' and schedule.endArg <= 0: + return next_datetime = schedule.get_next_time() @@ -163,6 +160,18 @@ def cleanup(): self._execution_service.cleanup_execution(execution_id, user) self._execution_service.add_finish_listener(cleanup, execution_id) + + if job.schedule.endOption == 'after': + schedule = job.schedule + schedule.endArg -= 1 + + with open(job_path, 'r+') as file: + data = json.load(file) + data['schedule']['endArg'] = schedule.endArg + file.seek(0) # Move the file pointer to the beginning + json.dump(data, file, indent=4) + file.truncate() + except: LOGGER.exception('Failed to execute ' + job.get_log_name()) From 464eca70fcad8bc9de7ac7509ebf58862856ee1a Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 20:50:56 +0200 Subject: [PATCH 310/398] Cleaup for decrease count in json --- src/scheduling/schedule_service.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 75dd75ab..8f17b68b 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -162,15 +162,11 @@ def cleanup(): self._execution_service.add_finish_listener(cleanup, execution_id) if job.schedule.endOption == 'after': - schedule = job.schedule - schedule.endArg -= 1 - - with open(job_path, 'r+') as file: - data = json.load(file) - data['schedule']['endArg'] = schedule.endArg - file.seek(0) # Move the file pointer to the beginning - json.dump(data, file, indent=4) - file.truncate() + job.schedule.endArg -= 1 + + file_utils.write_file( + job_path, + json.dumps(job.as_serializable_dict(), indent=2)) except: LOGGER.exception('Failed to execute ' + job.get_log_name()) From e0b77cc5503032d992fbfc77b81ff2f33e2a7d09 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 20:54:41 +0200 Subject: [PATCH 311/398] Schedule box fix height --- web-src/src/main-app/components/schedule/SchedulePanel.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index a7116677..614f71ae 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -271,7 +271,7 @@ export default { font-size: 16px; max-width: 320px; width: 100%; - height: 380px; + height: 480px; display: flex; flex-direction: column; justify-content: space-between; From e094d41edfc4c588ed11b587c80c79503e068d9b Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 12 Jun 2023 21:12:34 +0200 Subject: [PATCH 312/398] Fix missing DIV and reformat using formatter --- .../components/schedule/SchedulePanel.vue | 120 +++++++----------- 1 file changed, 46 insertions(+), 74 deletions(-) diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index 614f71ae..f1e50ea2 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -5,107 +5,79 @@

    - - + +
    From b033c9d85575e3a3d88d2da5ffff7e117abb8078 Mon Sep 17 00:00:00 2001 From: UrekD Date: Tue, 13 Jun 2023 15:15:25 +0200 Subject: [PATCH 313/398] Fix comparing next ex and end date --- src/scheduling/schedule_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 8f17b68b..047d347d 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -129,7 +129,7 @@ def schedule_job(self, job: SchedulingJob, job_path): next_datetime = schedule.get_next_time() if schedule.endOption == 'on': - if date_utils.is_past(schedule.endArg) or next_datetime < schedule.endArg: + if date_utils.is_past(schedule.endArg) or next_datetime > schedule.endArg: return LOGGER.info( From a5e99f2638952af21e5990d6fcf92bab9c0263c7 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 26 Jun 2023 23:22:51 +0200 Subject: [PATCH 314/398] Revert web formatting --- .../components/schedule/SchedulePanel.vue | 48 +++++++++++++------ 1 file changed, 33 insertions(+), 15 deletions(-) diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index f1e50ea2..0f22cc82 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -5,31 +5,40 @@

    - - + +
    Every - - + +
    Starting - - + +
    @@ -66,18 +75,27 @@
    -
    -
    - -
    -
    {{ weekdaysError }}
    +
    +
    +
    +
    {{ weekdaysError }}
    From be3bb7022af73c2c93e331f903d13314f9a4d86b Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 26 Jun 2023 23:28:09 +0200 Subject: [PATCH 315/398] Formatting --- web-src/src/main-app/components/schedule/SchedulePanel.vue | 1 - 1 file changed, 1 deletion(-) diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index 0f22cc82..172cefe8 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -72,7 +72,6 @@
    Count -
    From db80a76e656343b69b1dfe88d59023a87a1d5817 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 26 Jun 2023 23:32:29 +0200 Subject: [PATCH 316/398] Remove redundant check --- src/scheduling/schedule_service.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 047d347d..346e5bf6 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -86,8 +86,6 @@ def create_job(self, script_name, parameter_values, incoming_schedule_config, us if schedule_config.endOption == 'on': if schedule_config.start_datetime > schedule_config.endArg: raise InvalidScheduleException('End date should be after start date') - if date_utils.is_past(schedule_config.endArg): - raise InvalidScheduleException('End date should be in the future') if schedule_config.endOption == 'after' and schedule_config.endArg <= 0: raise InvalidScheduleException('Count should be greater than 0!') From 784e7c4358ad92c17bf08bc2155b3768e9199ccd Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 26 Jun 2023 23:39:23 +0200 Subject: [PATCH 317/398] Convert to snake_case --- src/model/external_model.py | 4 ++-- src/scheduling/schedule_config.py | 30 +++++++++++++++--------------- src/scheduling/schedule_service.py | 16 ++++++++-------- 3 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/model/external_model.py b/src/model/external_model.py index 3e88e09f..a19bc98b 100644 --- a/src/model/external_model.py +++ b/src/model/external_model.py @@ -129,8 +129,8 @@ def parse_external_schedule(external_schedule): return { 'repeatable': external_schedule.get('repeatable'), 'start_datetime': external_schedule.get('startDatetime'), - 'endOption': external_schedule.get('endOption'), - 'endArg': external_schedule.get('endArg'), + 'end_option': external_schedule.get('end_option'), + 'end_arg': external_schedule.get('end_arg'), 'repeat_unit': external_schedule.get('repeatUnit'), 'repeat_period': external_schedule.get('repeatPeriod'), 'weekdays': external_schedule.get('weekDays') diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index be799ad2..b904d900 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -54,14 +54,14 @@ def read_schedule_config(incoming_schedule_config): prepared_schedule_config = ScheduleConfig(repeatable, start_datetime) if repeatable: - endOption = incoming_schedule_config.get('endOption') - prepared_schedule_config.endOption = endOption - if endOption == 'on': - prepared_schedule_config.endArg = _read_datetime(incoming_schedule_config, 'endArg') - elif endOption == 'after': - prepared_schedule_config.endArg = model_helper.read_int_from_config('endArg', incoming_schedule_config) + end_option = incoming_schedule_config.get('end_option') + prepared_schedule_config.end_option = end_option + if end_option == 'on': + prepared_schedule_config.end_arg = _read_datetime(incoming_schedule_config, 'end_arg') + elif end_option == 'after': + prepared_schedule_config.end_arg = model_helper.read_int_from_config('end_arg', incoming_schedule_config) else: - prepared_schedule_config.endOption = 'never' + prepared_schedule_config.end_option = 'never' prepared_schedule_config.repeat_unit = _read_repeat_unit(incoming_schedule_config) prepared_schedule_config.repeat_period = _read_repeat_period(incoming_schedule_config) @@ -77,8 +77,8 @@ class ScheduleConfig: def __init__(self, repeatable, start_datetime) -> None: self.repeatable = repeatable self.start_datetime = start_datetime # type: datetime - self.endOption = None - self.endArg = None + self.end_option = None + self.end_arg = None self.repeat_unit = None self.repeat_period = None self.weekdays = None @@ -89,12 +89,12 @@ def as_serializable_dict(self): 'start_datetime': date_utils.to_iso_string(self.start_datetime), } - if self.endOption == 'on': - result['endOption'] = self.endOption - result['endArg'] = date_utils.to_iso_string(self.endArg) - elif self.endOption == 'after': - result['endOption'] = self.endOption - result['endArg'] = self.endArg + if self.end_option == 'on': + result['end_option'] = self.end_option + result['end_arg'] = date_utils.to_iso_string(self.end_arg) + elif self.end_option == 'after': + result['end_option'] = self.end_option + result['end_arg'] = self.end_arg if self.repeat_unit is not None: result['repeat_unit'] = self.repeat_unit diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 346e5bf6..ee2e424e 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -83,11 +83,11 @@ def create_job(self, script_name, parameter_values, incoming_schedule_config, us if not schedule_config.repeatable and date_utils.is_past(schedule_config.start_datetime): raise InvalidScheduleException('Start date should be in the future') - if schedule_config.endOption == 'on': - if schedule_config.start_datetime > schedule_config.endArg: + if schedule_config.end_option == 'on': + if schedule_config.start_datetime > schedule_config.end_arg: raise InvalidScheduleException('End date should be after start date') - if schedule_config.endOption == 'after' and schedule_config.endArg <= 0: + if schedule_config.end_option == 'after' and schedule_config.end_arg <= 0: raise InvalidScheduleException('Count should be greater than 0!') id = self._id_generator.next_id() @@ -121,13 +121,13 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - if schedule.endOption == 'after' and schedule.endArg <= 0: + if schedule.end_option == 'after' and schedule.end_arg <= 0: return next_datetime = schedule.get_next_time() - if schedule.endOption == 'on': - if date_utils.is_past(schedule.endArg) or next_datetime > schedule.endArg: + if schedule.end_option == 'on': + if date_utils.is_past(schedule.end_arg) or next_datetime > schedule.end_arg: return LOGGER.info( @@ -159,8 +159,8 @@ def cleanup(): self._execution_service.add_finish_listener(cleanup, execution_id) - if job.schedule.endOption == 'after': - job.schedule.endArg -= 1 + if job.schedule.end_option == 'after': + job.schedule.end_arg -= 1 file_utils.write_file( job_path, From a0bf188ea89838a8961b7fcca166c4fab14341b5 Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 26 Jun 2023 23:41:59 +0200 Subject: [PATCH 318/398] _read_datetime validation --- src/scheduling/schedule_config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index b904d900..2af3f8af 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -9,6 +9,8 @@ def _read_datetime(incoming_schedule_config, key): datetime_value = model_helper.read_datetime_from_config(key, incoming_schedule_config) + if datetime_value is None: + raise InvalidScheduleException('%1 is required', key) return datetime_value def _read_repeat_unit(incoming_schedule_config): From 170609ac690198c200e84fa3b002322c76209c7b Mon Sep 17 00:00:00 2001 From: UrekD Date: Mon, 26 Jun 2023 23:48:36 +0200 Subject: [PATCH 319/398] Fix model snake case --- src/model/external_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/model/external_model.py b/src/model/external_model.py index a19bc98b..ec03da13 100644 --- a/src/model/external_model.py +++ b/src/model/external_model.py @@ -129,8 +129,8 @@ def parse_external_schedule(external_schedule): return { 'repeatable': external_schedule.get('repeatable'), 'start_datetime': external_schedule.get('startDatetime'), - 'end_option': external_schedule.get('end_option'), - 'end_arg': external_schedule.get('end_arg'), + 'end_option': external_schedule.get('endOption'), + 'end_arg': external_schedule.get('endArg'), 'repeat_unit': external_schedule.get('repeatUnit'), 'repeat_period': external_schedule.get('repeatPeriod'), 'weekdays': external_schedule.get('weekDays') From 9e3abfee9ea75ee76a6ab950e9e231ee14a85d70 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Tue, 27 Jun 2023 22:42:06 +0200 Subject: [PATCH 320/398] #654 added oauth token refresh support --- README.md | 2 +- src/auth/auth_abstract_oauth.py | 123 ++++--- src/auth/auth_base.py | 2 +- src/auth/auth_gitlab.py | 2 +- src/auth/identification.py | 5 +- src/auth/oauth_token_manager.py | 161 +++++++++ src/auth/oauth_token_response.py | 96 ++++++ src/auth/tornado_auth.py | 11 +- src/scheduling/schedule_service.py | 38 +-- src/scheduling/scheduler.py | 48 +++ src/tests/auth/test_auth_abstract_oauth.py | 38 +-- src/tests/auth/test_auth_gitlab.py | 2 +- src/tests/auth/test_auth_keycloak_openid.py | 305 ++++++++++++++++++ src/tests/file_download_feature_test.py | 10 +- src/tests/scheduling/schedule_service_test.py | 13 +- src/tests/test_utils.py | 30 +- src/tests/utils/mock_server.py | 173 ++++++++++ src/utils/tornado_utils.py | 40 +++ src/web/server.py | 8 +- src/web/web_auth_utils.py | 21 +- 20 files changed, 995 insertions(+), 133 deletions(-) create mode 100644 src/auth/oauth_token_manager.py create mode 100644 src/auth/oauth_token_response.py create mode 100644 src/scheduling/scheduler.py create mode 100644 src/tests/auth/test_auth_keycloak_openid.py create mode 100644 src/tests/utils/mock_server.py diff --git a/README.md b/README.md index e8291716..182390ca 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ or [how to configure the server](https://github.com/bugy/script-server/wiki/Serv ### Server-side -Python 3.6 or higher with the following modules: +Python 3.7 or higher with the following modules: * Tornado 5 / 6 diff --git a/src/auth/auth_abstract_oauth.py b/src/auth/auth_abstract_oauth.py index 85090c39..95e8df51 100644 --- a/src/auth/auth_abstract_oauth.py +++ b/src/auth/auth_abstract_oauth.py @@ -1,5 +1,6 @@ import abc import asyncio +import datetime import json import logging import os @@ -15,11 +16,12 @@ from auth import auth_base from auth.auth_base import AuthFailureError, AuthBadRequestException, AuthRejectedError +from auth.oauth_token_manager import OAuthTokenManager +from auth.oauth_token_response import OAuthTokenResponse from model import model_helper from model.model_helper import read_bool_from_config, read_int_from_config from model.server_conf import InvalidServerConfigException from utils import file_utils -from utils.tornado_utils import get_secure_cookie LOGGER = logging.getLogger('script_server.AbstractOauthAuthenticator') @@ -90,6 +92,12 @@ def __init__(self, oauth_authorize_url, oauth_token_url, oauth_scope, params_dic self._schedule_dump_task() + self._token_manager = OAuthTokenManager( + enabled=bool(self.auth_info_ttl), + fetch_token_callback=self._fetch_token_by_refresh) + + self.ioloop = tornado.ioloop.IOLoop.current() + @staticmethod def _validate_dump_file(dump_file): if os.path.isdir(dump_file): @@ -105,8 +113,8 @@ async def authenticate(self, request_handler): LOGGER.error('Code is not specified') raise AuthBadRequestException('Missing authorization information. Please contact your administrator') - (access_token, refresh_token) = await self.fetch_access_token(code, request_handler) - user_info = await self.fetch_user_info(access_token) + token_response = await self.fetch_access_token_by_code(code, request_handler) + user_info = await self.fetch_user_info(token_response.access_token) username = user_info.username if not username: @@ -124,12 +132,13 @@ async def authenticate(self, request_handler): self._users[username] = user_state if self.group_support: - await self.load_groups(access_token, username, user_info, user_state) + await self.load_groups(token_response.access_token, username, user_info, user_state) now = time.time() + self._token_manager.update_tokens(token_response, username, request_handler) + if self.auth_info_ttl: - request_handler.set_secure_cookie('token', access_token) user_state.last_auth_update = now user_state.last_visit = now @@ -144,7 +153,7 @@ async def load_groups(self, access_token, username, user_info, user_state): user_state.groups = user_groups LOGGER.info('Loaded groups for ' + username + ': ' + str(user_state.groups)) - def validate_user(self, user, request_handler): + async def validate_user(self, user, request_handler): if not user: LOGGER.warning('Username is not available') return False @@ -152,15 +161,20 @@ def validate_user(self, user, request_handler): now = time.time() user_state = self._users.get(user) + validate_expiration = True if not user_state: # if nothing is enabled, it's ok not to have user state (e.g. after server restart) if self.session_expire <= 0 and not self.auth_info_ttl and not self.group_support: return True + elif self._token_manager.can_restore_state(request_handler): + validate_expiration = False + user_state = _UserState(user) + self._users[user] = user_state else: LOGGER.info('User %s state is missing', user) return False - if self.session_expire > 0: + if (self.session_expire > 0) and validate_expiration: last_visit = user_state.last_visit if (last_visit is None) or ((last_visit + self.session_expire) < now): LOGGER.info('User %s state is expired', user) @@ -169,9 +183,10 @@ def validate_user(self, user, request_handler): user_state.last_visit = now if self.auth_info_ttl: - access_token = get_secure_cookie(request_handler, 'token') + access_token = await self._token_manager.synchronize_user_tokens(user, request_handler) if access_token is None: LOGGER.info('User %s token is not available', user) + self._remove_user(user) return False self.update_user_auth(user, user_state, access_token) @@ -186,7 +201,7 @@ def get_groups(self, user, known_groups=None): return user_state.groups def logout(self, user, request_handler): - request_handler.clear_cookie('token') + self._token_manager.logout(user, request_handler) self._remove_user(user) self._dump_state() @@ -194,9 +209,10 @@ def logout(self, user, request_handler): def _remove_user(self, user): if user in self._users: del self._users[user] + self._token_manager.remove_user(user) - async def fetch_access_token(self, code, request_handler): - body = urllib_parse.urlencode({ + async def fetch_access_token_by_code(self, code, request_handler): + return await self._fetch_token({ 'redirect_uri': get_path_for_redirect(request_handler), 'code': code, 'client_id': self.client_id, @@ -204,39 +220,21 @@ async def fetch_access_token(self, code, request_handler): 'grant_type': 'authorization_code', }) - response = await self.http_client.fetch( - self.oauth_token_url, - method='POST', - headers={'Content-Type': 'application/x-www-form-urlencoded'}, - body=body, - raise_error=False) - - response_values = {} - if response.body: - response_values = escape.json_decode(response.body) - - if response.error: - if response_values.get('error_description'): - error_text = response_values.get('error_description') - elif response_values.get('error'): - error_text = response_values.get('error') - else: - error_text = str(response.error) - - error_message = 'Failed to load access_token: ' + error_text - LOGGER.error(error_message) - raise AuthFailureError(error_message) - - response_values = escape.json_decode(response.body) - access_token = response_values.get('access_token') - refresh_token = response_values.get('refresh_token') - - if not access_token: - message = 'No access token in response: ' + str(response.body) - LOGGER.error(message) - raise AuthFailureError(message) - - return access_token, refresh_token + async def _fetch_token_by_refresh(self, refresh_token, username): + if username not in self._users: + return None + + try: + return await self._fetch_token({ + 'refresh_token': refresh_token, + 'client_id': self.client_id, + 'client_secret': self.secret, + 'grant_type': 'refresh_token', + }) + except AuthFailureError: + LOGGER.info(f'Failed to refresh token for user {username}. Logging out') + self._remove_user(username) + return None def update_user_auth(self, username, user_state, access_token): now = time.time() @@ -246,7 +244,7 @@ def update_user_auth(self, username, user_state, access_token): if not ttl_expired: return - tornado.ioloop.IOLoop.current().spawn_callback( + self.ioloop.spawn_callback( self._do_update_user_auth_async, username, user_state, @@ -342,6 +340,41 @@ def _cleanup(self): if self.timer: self.timer.cancel() + async def _fetch_token(self, body): + encoded_body = urllib_parse.urlencode(body) + + response = await self.http_client.fetch( + self.oauth_token_url, + method='POST', + headers={'Content-Type': 'application/x-www-form-urlencoded'}, + body=encoded_body, + raise_error=False) + + response_values = {} + if response.body: + response_values = escape.json_decode(response.body) + + if response.error: + if response_values.get('error_description'): + error_text = response_values.get('error_description') + elif response_values.get('error'): + error_text = response_values.get('error') + else: + error_text = str(response.error) + + error_message = 'Failed to refresh access_token: ' + error_text + LOGGER.error(error_message) + raise AuthFailureError(error_message) + + token_response = OAuthTokenResponse.create(response_values, datetime.datetime.now()) + + if not token_response.access_token: + message = 'No access token in response: ' + str(response.body) + LOGGER.error(message) + raise AuthFailureError(message) + + return token_response + def get_path_for_redirect(request_handler): referer = request_handler.request.headers.get('Referer') diff --git a/src/auth/auth_base.py b/src/auth/auth_base.py index 7f4b0dd8..9d1267fc 100644 --- a/src/auth/auth_base.py +++ b/src/auth/auth_base.py @@ -17,7 +17,7 @@ def get_client_visible_config(self): def get_groups(self, user, known_groups=None): return [] - def validate_user(self, user, request_handler): + async def validate_user(self, user, request_handler): return True def perform_basic_auth(self, user, password): diff --git a/src/auth/auth_gitlab.py b/src/auth/auth_gitlab.py index e53be74f..2049db57 100644 --- a/src/auth/auth_gitlab.py +++ b/src/auth/auth_gitlab.py @@ -29,7 +29,7 @@ def __init__(self, params_dict): async def fetch_user_info(self, access_token) -> _OauthUserInfo: user = await self.oauth2_request( _OAUTH_GITLAB_USERINFO % self.gitlab_host, - access_token) + access_token=access_token) if user is None: return None diff --git a/src/auth/identification.py b/src/auth/identification.py index cba542a1..ab265525 100644 --- a/src/auth/identification.py +++ b/src/auth/identification.py @@ -2,11 +2,10 @@ import logging import uuid -import tornado.websocket - from model.trusted_ips import TrustedIpValidator from utils import tornado_utils, date_utils, audit_utils from utils.date_utils import days_to_ms +from utils.tornado_utils import can_write_secure_cookie LOGGER = logging.getLogger('identification') @@ -120,4 +119,4 @@ def _write_client_token(self, client_id, request_handler): request_handler.set_secure_cookie(self.COOKIE_KEY, new_token, expires_days=self.EXPIRES_DAYS) def _can_write(self, request_handler): - return not isinstance(request_handler, tornado.websocket.WebSocketHandler) + return can_write_secure_cookie(request_handler) diff --git a/src/auth/oauth_token_manager.py b/src/auth/oauth_token_manager.py new file mode 100644 index 00000000..21542064 --- /dev/null +++ b/src/auth/oauth_token_manager.py @@ -0,0 +1,161 @@ +import datetime +import logging +from typing import Dict, Optional + +import tornado.ioloop + +from auth.oauth_token_response import OAuthTokenResponse +from scheduling.scheduler import Scheduler +from utils.tornado_utils import get_secure_cookie, can_write_secure_cookie + +LOGGER = logging.getLogger('script_server.auth.OAuthTokenManager') + + +class OAuthTokenManager: + def __init__(self, enabled, fetch_token_callback) -> None: + self._refresh_tokens = {} # type: Dict[str, str] + self._pending_access_tokens = {} # type: Dict[str, OAuthTokenResponse] + self._scheduler = None + + self._enabled = enabled + self._fetch_token_callback = fetch_token_callback + + def update_tokens(self, token_response: OAuthTokenResponse, username, request_handler): + if not self._enabled: + return + + request_handler.set_secure_cookie('token', token_response.access_token) + + if token_response.should_refresh(): + refresh_token = token_response.refresh_token + + if self._refresh_tokens.get(username) != refresh_token: + self._refresh_tokens[username] = refresh_token + self._schedule_token_refresh(username, refresh_token, token_response.resolve_next_refresh_datetime()) + + request_handler.set_secure_cookie('token_details', token_response.serialize_details()) + + def can_restore_state(self, request_handler): + if not self._enabled: + return False + + token_response = self._restore_token_response_from_cookies(request_handler) + if token_response is None: + return False + + if token_response.should_refresh() and token_response.is_refresh_expired(): + return False + + return True + + async def synchronize_user_tokens(self, user, request_handler): + user_access_token = get_secure_cookie(request_handler, 'token') + if not self._enabled: + return user_access_token + + if user in self._pending_access_tokens: + token_response = self._pending_access_tokens[user] + if can_write_secure_cookie(request_handler): + LOGGER.info('Pending access token is available for ' + user + '. Replacing active token') + del self._pending_access_tokens[user] + self.update_tokens(token_response, user, request_handler) + else: + LOGGER.info('Pending access token is available for ' + user + + ', using it without replacing (called from websocket)') + + return token_response.access_token + + if user not in self._refresh_tokens: + token_details = get_secure_cookie(request_handler, 'token_details') + if token_details is not None: + token_response = OAuthTokenResponse.deserialize(user_access_token, token_details) + + if token_response.should_refresh(): + if token_response.is_refresh_expired(): + LOGGER.warning(f'Refresh token expired for user {user}. Logging out') + + return None + + LOGGER.info(f'Restoring refresh token for user {user} after restart') + + if token_response.is_access_expired(): + LOGGER.info(f'Access token expired for user {user}. Requesting a new one') + + await self._refresh_token(user, token_response.refresh_token, force=True) + + if user not in self._pending_access_tokens: + LOGGER.warning(f'Failed to refresh token for user {user}. Logging out') + return None + else: + token_response = self._pending_access_tokens[user] + del self._pending_access_tokens[user] + return token_response.access_token + + else: + self._refresh_tokens[user] = token_response.refresh_token + self._schedule_token_refresh( + user, + token_response.refresh_token, + token_response.resolve_next_refresh_datetime()) + + return user_access_token + + def remove_user(self, username): + if username in self._refresh_tokens: + del self._refresh_tokens[username] + + if username in self._pending_access_tokens: + del self._pending_access_tokens[username] + + def _schedule_token_refresh(self, username, refresh_token, next_refresh_datetime): + if not self._scheduler: + self.scheduler = Scheduler() + + if (next_refresh_datetime - datetime.datetime.now()) < datetime.timedelta(seconds=30): + next_refresh_datetime_adjusted = next_refresh_datetime + elif (next_refresh_datetime - datetime.datetime.now()) < datetime.timedelta(minutes=2): + next_refresh_datetime_adjusted = next_refresh_datetime - datetime.timedelta(seconds=10) + else: + next_refresh_datetime_adjusted = next_refresh_datetime - datetime.timedelta(minutes=1) + + self.scheduler.schedule( + next_refresh_datetime_adjusted, + tornado.ioloop.IOLoop.current().add_callback, + (self._refresh_token, username, refresh_token)) + + async def _refresh_token(self, username, refresh_token, force=False): + if not force: + if (username not in self._refresh_tokens) or (self._refresh_tokens[username] != refresh_token): + return + + token_response = await self._fetch_token_callback(refresh_token, username) + + if token_response is None: + return + + LOGGER.info(f'Refreshed token for {username}') + + self._refresh_tokens[username] = token_response.refresh_token + self._pending_access_tokens[username] = token_response + + if token_response.should_refresh(): + self._schedule_token_refresh( + username, + token_response.refresh_token, + token_response.resolve_next_refresh_datetime()) + + @staticmethod + def _restore_token_response_from_cookies(request_handler) -> Optional[OAuthTokenResponse]: + user_access_token = get_secure_cookie(request_handler, 'token') + if not user_access_token: + return None + + token_details = get_secure_cookie(request_handler, 'token_details') + if token_details is None: + return None + + return OAuthTokenResponse.deserialize(user_access_token, token_details) + + def logout(self, user, request_handler): + request_handler.clear_cookie('token') + request_handler.clear_cookie('token_details') diff --git a/src/auth/oauth_token_response.py b/src/auth/oauth_token_response.py new file mode 100644 index 00000000..cdd2943c --- /dev/null +++ b/src/auth/oauth_token_response.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +import datetime +import json +from typing import Optional + + +def _calc_expires_at(expires_in, response_datetime): + if expires_in is None: + return None + + return response_datetime + datetime.timedelta(seconds=expires_in) + + +def _date_to_string(expires_at: datetime) -> Optional[str]: + if expires_at is None: + return None + + return expires_at.isoformat() + + +def _string_to_date(expires_at: datetime) -> Optional[datetime.datetime]: + if expires_at is None: + return None + + return datetime.datetime.fromisoformat(expires_at) + + +class OAuthTokenResponse: + + def __init__(self, access_token, access_expires_at, refresh_token, refresh_expires_at) -> None: + self.access_token = access_token + self.access_expires_at = access_expires_at + self.refresh_token = refresh_token + self.refresh_expires_at = refresh_expires_at + + @classmethod + def create(cls, response_values, response_datetime) -> OAuthTokenResponse: + return OAuthTokenResponse( + response_values.get('access_token'), + _calc_expires_at(response_values.get('expires_in'), response_datetime), + response_values.get('refresh_token'), + _calc_expires_at(response_values.get('refresh_expires_in'), response_datetime)) + + def should_refresh(self): + return self.refresh_token and self.access_expires_at + + def get_refresh_expires_at(self): + return self.refresh_expires_at + + def get_access_expires_at(self): + return self.access_expires_at + + def resolve_next_refresh_datetime(self): + if not self.should_refresh(): + raise Exception('Cannot resolve expires at, for non-refreshable tokens') + + if self.access_expires_at: + return self.access_expires_at + + if self.refresh_expires_at: + return self.refresh_expires_at + + return datetime.datetime.now() + datetime.timedelta(days=1) + + def serialize_details(self): + return json.dumps({ + 'refresh_token': self.refresh_token, + 'access_expires_at': _date_to_string(self.access_expires_at), + 'refresh_expires_at': _date_to_string(self.refresh_expires_at) + }) + + @classmethod + def deserialize(cls, access_token, serialized) -> OAuthTokenResponse: + deserialized = json.loads(serialized) + + return OAuthTokenResponse( + access_token, + _string_to_date(deserialized['access_expires_at']), + deserialized.get('refresh_token'), + _string_to_date(deserialized['refresh_expires_at']) + ) + + def is_refresh_expired(self): + expires_at = self.get_refresh_expires_at() + if expires_at is None: + return False + + return expires_at <= datetime.datetime.now() + + def is_access_expired(self): + expires_at = self.get_access_expires_at() + if expires_at is None: + return False + + return expires_at <= datetime.datetime.now() diff --git a/src/auth/tornado_auth.py b/src/auth/tornado_auth.py index 324d7999..05b741a0 100644 --- a/src/auth/tornado_auth.py +++ b/src/auth/tornado_auth.py @@ -8,19 +8,19 @@ from auth import auth_base from utils import tornado_utils -from utils.tornado_utils import respond_error, redirect_relative +from utils.tornado_utils import respond_error, redirect_relative, can_write_secure_cookie LOGGER = logging.getLogger('script_server.tornado_auth') -class TornadoAuth(): +class TornadoAuth: def __init__(self, authenticator): self.authenticator = authenticator def is_enabled(self): return bool(self.authenticator) - def is_authenticated(self, request_handler): + async def is_authenticated(self, request_handler): if not self.is_enabled(): return True @@ -28,7 +28,7 @@ def is_authenticated(self, request_handler): if not username: return False - active = self.authenticator.validate_user(username, request_handler) + active = await self.authenticator.validate_user(username, request_handler) if not active: self.logout(request_handler) @@ -113,6 +113,9 @@ def logout(self, request_handler): if not username: return + if not can_write_secure_cookie(request_handler): + return + LOGGER.info('Logging out ' + username) request_handler.clear_cookie('username') diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index b7a26a52..79e95b55 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -1,10 +1,6 @@ import json import logging import os -import sched -import threading -import time -from datetime import timedelta from auth.user import User from config.config_service import ConfigService @@ -12,6 +8,7 @@ from execution.id_generator import IdGenerator from scheduling import scheduling_job from scheduling.schedule_config import read_schedule_config, InvalidScheduleException +from scheduling.scheduler import Scheduler from scheduling.scheduling_job import SchedulingJob from utils import file_utils, date_utils, custom_json @@ -23,8 +20,6 @@ LOGGER = logging.getLogger('script_server.scheduling.schedule_service') -_sleep = time.sleep - def restore_jobs(schedules_folder): files = [file for file in os.listdir(schedules_folder) if file.endswith('.json')] @@ -63,10 +58,8 @@ def __init__(self, (jobs, ids) = restore_jobs(self._schedules_folder) self._id_generator = IdGenerator(ids) - self.stopped = False - self.scheduler = sched.scheduler(timefunc=time.time) - self._start_scheduler() + self.scheduler = Scheduler() for job_path, job in jobs.items(): self.schedule_job(job, job_path) @@ -118,7 +111,7 @@ def schedule_job(self, job: SchedulingJob, job_path): LOGGER.info( 'Scheduling ' + job.get_log_name() + ' at ' + next_datetime.astimezone(tz=None).strftime('%H:%M, %d %B %Y')) - self.scheduler.enterabs(next_datetime.timestamp(), 1, self._execute_job, (job, job_path)) + self.scheduler.schedule(next_datetime, self._execute_job, (job, job_path)) def _execute_job(self, job: SchedulingJob, job_path): LOGGER.info('Executing ' + job.get_log_name()) @@ -160,29 +153,8 @@ def save_job(self, job: SchedulingJob): return path - def _start_scheduler(self): - def scheduler_loop(): - while not self.stopped: - try: - self.scheduler.run(blocking=False) - except: - LOGGER.exception('Failed to execute scheduled job') - - now = date_utils.now() - sleep_delta = timedelta(minutes=1) - timedelta(microseconds=now.microsecond, seconds=now.second) - _sleep(sleep_delta.total_seconds()) - - self.scheduling_thread = threading.Thread(daemon=True, target=scheduler_loop) - self.scheduling_thread.start() - - def _stop(self): - self.stopped = True - - def stopper(): - pass - - # just schedule the next execution to exit thread immediately - self.scheduler.enter(1, 0, stopper) + def stop(self): + self.scheduler.stop() class InvalidUserException(Exception): diff --git a/src/scheduling/scheduler.py b/src/scheduling/scheduler.py new file mode 100644 index 00000000..408bc7a5 --- /dev/null +++ b/src/scheduling/scheduler.py @@ -0,0 +1,48 @@ +import logging +import sched +import threading +import time +from datetime import timedelta + +from utils import date_utils + +_sleep = time.sleep + +LOGGER = logging.getLogger('script_server.scheduling.scheduler') + + +class Scheduler: + def __init__(self) -> None: + self.stopped = False + + self.scheduler = sched.scheduler(timefunc=time.time) + self._start_scheduler() + + def _start_scheduler(self): + def scheduler_loop(): + while not self.stopped: + try: + self.scheduler.run(blocking=False) + except: + LOGGER.exception('Failed to execute scheduled job') + + now = date_utils.now() + sleep_delta = timedelta(seconds=1) - timedelta(microseconds=now.microsecond) + _sleep(sleep_delta.total_seconds()) + + self.scheduling_thread = threading.Thread(daemon=True, target=scheduler_loop) + self.scheduling_thread.start() + + def stop(self): + self.stopped = True + + def stopper(): + pass + + # just schedule the next execution to exit thread immediately + self.scheduler.enter(1, 0, stopper) + + self.scheduling_thread.join(1) + + def schedule(self, execute_at_datetime, callback, params): + self.scheduler.enterabs(execute_at_datetime.timestamp(), 1, callback, params) diff --git a/src/tests/auth/test_auth_abstract_oauth.py b/src/tests/auth/test_auth_abstract_oauth.py index 803b4534..fb5c6401 100644 --- a/src/tests/auth/test_auth_abstract_oauth.py +++ b/src/tests/auth/test_auth_abstract_oauth.py @@ -12,6 +12,7 @@ import auth from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo from auth.auth_base import AuthFailureError, AuthBadRequestException +from auth.oauth_token_response import OAuthTokenResponse from model.server_conf import InvalidServerConfigException from tests import test_utils from tests.test_utils import mock_object @@ -263,42 +264,42 @@ def test_validate_user_success(self): request_handler = mock_request_handler('X') username = yield authenticator.authenticate(request_handler) - valid = authenticator.validate_user(username, request_handler) + valid = yield authenticator.validate_user(username, request_handler) self.assertEqual(True, valid) @gen_test def test_validate_when_no_state(self): authenticator = create_test_authenticator(group_support=False) - valid = authenticator.validate_user('user_X', mock_request_handler('')) + valid = yield authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(True, valid) @gen_test def test_validate_when_no_username(self): authenticator = create_test_authenticator(group_support=False) - valid = authenticator.validate_user(None, mock_request_handler('')) + valid = yield authenticator.validate_user(None, mock_request_handler('')) self.assertEqual(False, valid) @gen_test def test_validate_when_no_state_and_expire_enabled(self): authenticator = create_test_authenticator(session_expire_minutes=1) - valid = authenticator.validate_user('user_X', mock_request_handler('')) + valid = yield authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(False, valid) @gen_test def test_validate_when_no_state_and_auth_update_enabled(self): authenticator = create_test_authenticator(auth_info_ttl=1) - valid = authenticator.validate_user('user_X', mock_request_handler('')) + valid = yield authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(False, valid) @gen_test def test_validate_when_no_state_and_group_support(self): authenticator = create_test_authenticator(group_support=True) - valid = authenticator.validate_user('user_X', mock_request_handler('')) + valid = yield authenticator.validate_user('user_X', mock_request_handler('')) self.assertEqual(False, valid) @patch('time.time', mock_time) @@ -310,7 +311,7 @@ def test_validate_when_session_expired(self): username = yield authenticator.authenticate(request_handler) mock_time.return_value = mock_time.return_value + 60 * 10 - valid = authenticator.validate_user(username, request_handler) + valid = yield authenticator.validate_user(username, request_handler) self.assertEqual(False, valid) @patch('time.time', mock_time) @@ -322,7 +323,7 @@ def test_validate_when_session_not_expired(self): username = yield authenticator.authenticate(request_handler) mock_time.return_value = mock_time.return_value + 60 * 2 - valid = authenticator.validate_user(username, request_handler) + valid = yield authenticator.validate_user(username, request_handler) self.assertEqual(True, valid) @patch('time.time', mock_time) @@ -334,10 +335,10 @@ def test_validate_when_session_not_expired_after_renew(self): username = yield authenticator.authenticate(request_handler) mock_time.return_value = mock_time.return_value + 60 * 2 - authenticator.validate_user(username, request_handler) + yield authenticator.validate_user(username, request_handler) mock_time.return_value = mock_time.return_value + 60 * 4 - valid2 = authenticator.validate_user(username, request_handler) + valid2 = yield authenticator.validate_user(username, request_handler) self.assertEqual(True, valid2) @patch('time.time', mock_time) @@ -352,7 +353,7 @@ def test_validate_when_session_expired_after_renew(self): authenticator.validate_user(username, request_handler) mock_time.return_value = mock_time.return_value + 60 * 6 - valid2 = authenticator.validate_user(username, request_handler) + valid2 = yield authenticator.validate_user(username, request_handler) self.assertEqual(False, valid2) @gen_test @@ -362,7 +363,7 @@ def test_validate_when_update_auth_and_no_access_token(self): request_handler = mock_request_handler('X') username = yield authenticator.authenticate(request_handler) - valid = authenticator.validate_user(username, mock_request_handler('X')) + valid = yield authenticator.validate_user(username, mock_request_handler('X')) self.assertEqual(False, valid) @@ -441,7 +442,7 @@ def test_no_reload_groups_without_expiry(self): authenticator.user_groups['user_X'] = ['Group A'] - valid1 = authenticator.validate_user(username, request_handler) + valid1 = yield authenticator.validate_user(username, request_handler) self.assertEqual(True, valid1) yield self.wait_next_ioloop() @@ -460,12 +461,13 @@ def run_validation_test(self, prevalidation_callback): prevalidation_callback(username, authenticator) - valid1 = authenticator.validate_user(username, request_handler) + valid1 = yield authenticator.validate_user(username, request_handler) self.assertEqual(True, valid1) yield self.wait_next_ioloop() - return authenticator.validate_user(username, request_handler) + new_validity = yield authenticator.validate_user(username, request_handler) + return new_validity async def wait_next_ioloop(self): await gen.sleep(0.001) @@ -483,7 +485,7 @@ def test_validate_user_success(self): self.assertIsNone(request_handler.get_secure_cookie('token')) - valid = authenticator.validate_user(username, request_handler) + valid = yield authenticator.validate_user(username, request_handler) self.assertFalse(valid) @@ -620,10 +622,10 @@ def __init__(self, params_dict): self.disabled_users = [] self.failing_groups_loading = [] - async def fetch_access_token(self, code, request_handler): + async def fetch_access_token_by_code(self, code, request_handler): for key, value in self.user_tokens.items(): if value.endswith(code): - return key, None + return OAuthTokenResponse(key, None, None, None) raise Exception('Could not generate token for code ' + code + '. Make sure core is equal to user suffix') diff --git a/src/tests/auth/test_auth_gitlab.py b/src/tests/auth/test_auth_gitlab.py index 10030f05..a1c6d4d5 100644 --- a/src/tests/auth/test_auth_gitlab.py +++ b/src/tests/auth/test_auth_gitlab.py @@ -60,7 +60,7 @@ def test_fetch_user_info(self, mock_request): user_info = yield authenticator.fetch_user_info('my_token_2') self.assertEqual(_OauthUserInfo('me@gmail.com', True, response), user_info) - mock_request.assert_called_with('https://my.gitlab.host/api/v4/user', 'my_token_2') + mock_request.assert_called_with('https://my.gitlab.host/api/v4/user', access_token='my_token_2') @patch('tornado.auth.OAuth2Mixin.oauth2_request', new_callable=AsyncMock) @gen_test diff --git a/src/tests/auth/test_auth_keycloak_openid.py b/src/tests/auth/test_auth_keycloak_openid.py new file mode 100644 index 00000000..d82528b6 --- /dev/null +++ b/src/tests/auth/test_auth_keycloak_openid.py @@ -0,0 +1,305 @@ +import logging +import shutil +import time +from unittest.mock import MagicMock + +from parameterized import parameterized +from tornado import gen +from tornado.testing import AsyncTestCase, gen_test +from tornado.web import RequestHandler + +from auth.auth_keycloak_openid import KeycloakOpenidAuthenticator +from scheduling import scheduler +from tests import test_utils +from tests.test_utils import mock_request_handler +from tests.utils.mock_server import MockServer + +REALM_URL = 'http://my-keycloak.net/realms/master' + +access_expiration_duration = 0.1 +refresh_expiration_duration = 0.6 + + +class OauthServerMock: + + def __init__(self, mock_server: MockServer): + super().__init__() + + self.mock_server = mock_server + + self.refresh_token_counter = 1 + self.access_token_counter = 1 + self.code_user_mapping = {} + + self.mock_server.register_mock( + 'POST', + '/realms/master/protocol/openid-connect/token', + response_handler=self.handle_token_request + ) + + self.mock_server.register_mock( + 'GET', + '/realms/master/protocol/openid-connect/userinfo', + response_handler=self.handle_userinfo_request) + + self.access_token_expiration_times = {} + self.refresh_token_expiration_times = {} + self.user_groups = {} + self.deactivated_users = [] + + def handle_token_request(self, request_handler): + client_id = request_handler.get_argument('client_id') + client_secret = request_handler.get_argument('client_secret') + + if client_id != 'my-client' or client_secret != 'top_secret': + request_handler.set_status(400, 'Invalid client parameters') + return + + grant_type = request_handler.get_argument('grant_type') + + if grant_type == 'refresh_token': + refresh_token = request_handler.get_argument('refresh_token') + if refresh_token is None: + request_handler.set_status(401, 'No token provided') + return + + if refresh_token not in self.refresh_token_expiration_times: + request_handler.set_status(401, 'Invalid refresh token: ' + str(refresh_token)) + return + + if self.refresh_token_expiration_times[refresh_token] < time.time(): + request_handler.set_status(401, 'Refresh token has expired: ' + refresh_token) + return + + token_prefix, user = self.parse_token_info(refresh_token) + + elif grant_type == 'authorization_code': + code = request_handler.get_argument('code') + if not code or code not in self.code_user_mapping: + request_handler.set_status(401, 'Invalid code provided: ' + str(code)) + return + + redirect_uri = request_handler.get_argument('redirect_uri') + if redirect_uri != 'http://localhost:5432/index.html': + request_handler.set_status(400, 'Invalid redirect_uri: ' + str(redirect_uri)) + return + + user = self.code_user_mapping[code] + del self.code_user_mapping[code] + + token_prefix = 'token-' + user + '|' + + else: + request_handler.set_status(400, 'Unsupported grant type: ' + str(grant_type)) + return + + if user in self.deactivated_users: + request_handler.set_status(401, 'User is deactivated') + return + + self.send_tokens(token_prefix, request_handler) + + @staticmethod + def parse_token_info(refresh_token): + token_prefix = refresh_token.split('|')[0] + '|' + user = token_prefix[6:-1] + return token_prefix, user + + def send_tokens(self, token_prefix, request_handler): + access_token = f'{token_prefix}acc-{self.access_token_counter}' + refresh_token = f'{token_prefix}ref-{self.refresh_token_counter}' + self.access_token_counter += 1 + self.refresh_token_counter += 1 + + request_handler.write({ + 'access_token': access_token, + 'refresh_token': refresh_token, + 'expires_in': access_expiration_duration, + 'refresh_expires_in': refresh_expiration_duration + }) + + self.cleanup_old_tokens(self.access_token_expiration_times, token_prefix) + self.cleanup_old_tokens(self.refresh_token_expiration_times, token_prefix) + + self.access_token_expiration_times[access_token] = time.time() + access_expiration_duration + self.refresh_token_expiration_times[refresh_token] = time.time() + refresh_expiration_duration + + @staticmethod + def cleanup_old_tokens(existing_tokens_map, token_prefix): + for key in list(existing_tokens_map.keys()): + if key.startswith(token_prefix): + del existing_tokens_map[key] + + def handle_userinfo_request(self, request_handler: RequestHandler): + authorization = request_handler.request.headers.get('Authorization') + if authorization is None: + request_handler.set_status(401, 'No token provided') + return + + access_token = authorization[7:] + if access_token not in self.access_token_expiration_times: + request_handler.set_status(401, 'Wrong token provided: ' + access_token) + return + + if self.access_token_expiration_times[access_token] < time.time(): + request_handler.set_status(401, 'Access token has expired: ' + access_token) + return + + _, user = self.parse_token_info(access_token) + request_handler.write({ + 'preferred_username': user, + 'groups': self.user_groups.get(user, []) + }) + + def set_groups(self, username, groups): + self.user_groups[username] = groups + + def deactivate_user(self, username): + if username not in self.deactivated_users: + self.deactivated_users.append(username) + + def activate_user(self, username): + self.deactivated_users.remove(username) + + +class KeycloakOauthTestCase(AsyncTestCase): + + @gen_test + async def test_success_auth(self): + username, _ = await self.authenticate('qwerty123') + + self.assertEqual(username, 'bugy') + self.assertEqual(['g1', 'g2'], self.authenticator.get_groups('bugy')) + + @gen_test + async def test_success_validate_immediately(self): + username, request_1 = await self.authenticate('qwerty123') + + self.oauth_server.set_groups('user', ['g3']) + + valid = await self.authenticator.validate_user(username, mock_request_handler(previous_request=request_1)) + self.assertTrue(valid) + self.assertEqual(['g1', 'g2'], self.authenticator.get_groups('bugy')) + + @gen_test + async def test_success_validate_after_refresh(self): + username, request_1 = await self.authenticate('qwerty123') + + self.oauth_server.set_groups('bugy', ['g3']) + + await gen.sleep(0.4 + 0.1) + + valid_1 = await self.authenticator.validate_user(username, mock_request_handler(previous_request=request_1)) + self.assertTrue(valid_1) + + await gen.sleep(0.1) + + self.assertEqual(['g3'], self.authenticator.get_groups('bugy')) + + @gen_test + async def test_failed_validate_after_deactivate(self): + username, request_1 = await self.authenticate('qwerty123') + + self.oauth_server.deactivate_user('bugy') + + await gen.sleep(access_expiration_duration + 0.1) + + valid_1 = await self.authenticator.validate_user(username, mock_request_handler(previous_request=request_1)) + self.assertFalse(valid_1) + + @gen_test + async def test_failed_validate_after_deactivate_when_different_users(self): + user1, user1_request1 = await self.authenticate('qwerty123') + user2, user2_request1 = await self.authenticate('dolphin99') + + self.oauth_server.deactivate_user('bugy') + + await gen.sleep(access_expiration_duration + 0.1) + + user1_valid = await self.authenticator.validate_user(user1, + mock_request_handler(previous_request=user1_request1)) + self.assertFalse(user1_valid) + + user2_valid = await self.authenticator.validate_user(user2, + mock_request_handler(previous_request=user2_request1)) + self.assertTrue(user2_valid) + + @parameterized.expand([ + (0, True, True, ['g1', 'g2']), + (0, False, True, ['g3']), + (access_expiration_duration + 0.1, True, True, ['g1', 'g2']), + (access_expiration_duration + 0.1, False, True, ['g3']), + (refresh_expiration_duration + 0.1, True, False, []), + (refresh_expiration_duration + 0.1, False, False, []), + ]) + @gen_test + async def test_read_tokens_from_request(self, sleep_time, has_dump, expected_validity, expected_groups): + username, request_1 = await self.authenticate('qwerty123') + + self.authenticator._dump_state() + shutil.copyfile(self.dump_file, self.dump_file + '.bkp') + + self.authenticator.logout(username, mock_request_handler(previous_request=request_1)) + shutil.move(self.dump_file + '.bkp', self.dump_file) + + await gen.sleep(sleep_time) + + self.oauth_server.set_groups('bugy', ['g3']) + + dump_file = self.dump_file if has_dump else None + + another_authenticator = self.create_authenticator(dump_file) + valid = await another_authenticator.validate_user(username, mock_request_handler(previous_request=request_1)) + self.assertEqual(expected_validity, valid) + + await gen.sleep(0.1) + + self.assertEqual(expected_groups, another_authenticator.get_groups(username)) + + async def authenticate(self, code): + request = mock_request_handler(arguments={'code': code}, + headers={'Referer': 'http://localhost:5432/index.html'}) + username = await self.authenticator.authenticate(request) + return username, request + + def setUp(self): + super().setUp() + test_utils.setup() + + logging.basicConfig( + level=logging.DEBUG, + format='%(asctime)s.%(msecs)06d %(levelname)s %(module)s: %(message)s') + + scheduler._sleep = MagicMock() + scheduler._sleep.side_effect = lambda x: time.sleep(0.001) + + self.dump_file = test_utils.create_file('dump.oauth', text='{}') + + self.mock_server = MockServer() + + self.authenticator = self.create_authenticator(self.dump_file) + + self._refresh_token_request_handler = None + self.oauth_server = OauthServerMock(self.mock_server) + self.oauth_server.set_groups('bugy', ['g1', 'g2']) + self.oauth_server.set_groups('yaro', ['g2']) + self.oauth_server.code_user_mapping['qwerty123'] = 'bugy' + self.oauth_server.code_user_mapping['dolphin99'] = 'yaro' + + def create_authenticator(self, dump_file=None): + return KeycloakOpenidAuthenticator({ + 'realm_url': self.mock_server.get_host() + '/realms/master', + 'client_id': 'my-client', + 'secret': 'top_secret', + 'group_support': True, + 'auth_info_ttl': 0.4, + 'state_dump_file': dump_file + }) + + def tearDown(self): + super().tearDown() + test_utils.cleanup() + + scheduler._sleep = time.sleep + + self.mock_server.cleanup() diff --git a/src/tests/file_download_feature_test.py b/src/tests/file_download_feature_test.py index d95f1679..80464d4c 100644 --- a/src/tests/file_download_feature_test.py +++ b/src/tests/file_download_feature_test.py @@ -31,14 +31,14 @@ def test_single_asterisk_1_match(self): self.assertEqual(files, [os.path.join(test_utils.temp_folder, 'test.txt')]) def test_single_asterisk_2_matches(self): - test_utils.create_file('test1.txt') - test_utils.create_file('test2.txt') + test_utils.create_file('test1.log') + test_utils.create_file('test2.log') - files = file_download_feature.find_matching_files('*/test*.txt', None) + files = file_download_feature.find_matching_files('*/test*.log', None) self.assertCountEqual(files, [ - os.path.join(test_utils.temp_folder, 'test1.txt'), - os.path.join(test_utils.temp_folder, 'test2.txt') + os.path.join(test_utils.temp_folder, 'test1.log'), + os.path.join(test_utils.temp_folder, 'test2.log') ]) def test_double_asterisk_match(self): diff --git a/src/tests/scheduling/schedule_service_test.py b/src/tests/scheduling/schedule_service_test.py index 03298cd2..9f6494aa 100644 --- a/src/tests/scheduling/schedule_service_test.py +++ b/src/tests/scheduling/schedule_service_test.py @@ -8,7 +8,7 @@ from auth.user import User from config.config_service import ConfigService -from scheduling import schedule_service +from scheduling import scheduler from scheduling.schedule_config import ScheduleConfig, InvalidScheduleException from scheduling.schedule_service import ScheduleService, InvalidUserException, UnavailableScriptException from scheduling.scheduling_job import SchedulingJob @@ -57,8 +57,8 @@ def setUp(self) -> None: self.scheduler_mock = MagicMock() self.patcher.start().return_value = self.scheduler_mock - schedule_service._sleep = MagicMock() - schedule_service._sleep.side_effect = lambda x: time.sleep(0.001) + scheduler._sleep = MagicMock() + scheduler._sleep.side_effect = lambda x: time.sleep(0.001) self.config_service = ConfigService(AnyUserAuthorizer(), test_utils.temp_folder, test_utils.process_invoker) @@ -95,10 +95,9 @@ def tearDown(self) -> None: date_utils._mocked_now = None - self.schedule_service._stop() - self.schedule_service.scheduling_thread.join() + self.schedule_service.stop() - schedule_service._sleep = time.sleep + scheduler._sleep = time.sleep self.patcher.stop() @@ -289,7 +288,7 @@ def test_scheduler_runner(self): self.assertGreater(step2_runs_count, step1_runs_count) def test_scheduler_runner_when_stopped(self): - self.schedule_service._stop() + self.schedule_service.stop() time.sleep(0.1) original_runs_count = self.scheduler_mock.run.call_count diff --git a/src/tests/test_utils.py b/src/tests/test_utils.py index db70bb96..45c6ca15 100644 --- a/src/tests/test_utils.py +++ b/src/tests/test_utils.py @@ -457,22 +457,46 @@ def wait_observable_close_notification(observable, timeout): close_condition.wait(timeout) -def mock_request_handler(*, arguments: dict = None, method='GET', headers=None): +def mock_request_handler(*, arguments: dict = None, method='GET', headers=None, previous_request=None): if headers is None: headers = {} request_handler = mock_object() - def get_argument(arg_name): + cookies = {} + if previous_request and previous_request._cookies: + cookies.update(previous_request._cookies) + + def get_argument(arg_name, default=None): if arguments is None: - return None + return default return arguments.get(arg_name) + def set_secure_cookie(cookie_name, value): + cookies[cookie_name] = f'!SECURE!{value}!!!' + + def clear_cookie(cookie_name): + del cookies[cookie_name] + + def get_secure_cookie(cookie_name): + if not previous_request or cookie_name not in cookies: + raise Exception('No cookie ' + cookie_name + ' is available') + + value = cookies[cookie_name] + if not value.startswith('!SECURE!'): + raise Exception('Cookie ' + cookie_name + ' is not a secure cookie') + + return value[8:-3].encode('utf8') + request_handler.get_argument = get_argument + request_handler.set_secure_cookie = set_secure_cookie + request_handler.get_secure_cookie = get_secure_cookie + request_handler.clear_cookie = clear_cookie request_handler.request = mock_object() request_handler.request.method = method request_handler.request.headers = headers + request_handler._cookies = cookies return request_handler diff --git a/src/tests/utils/mock_server.py b/src/tests/utils/mock_server.py new file mode 100644 index 00000000..e605f1e6 --- /dev/null +++ b/src/tests/utils/mock_server.py @@ -0,0 +1,173 @@ +import re +from typing import Optional, Callable + +import tornado.httpserver +import tornado.netutil +from tornado.web import Application + + +class MockResponse: + def __init__(self, body): + self.body = body + + def __call__(self, request_handler): + if self.body: + request_handler.write(self.body) + + +class MatchResult: + + def __init__(self) -> None: + self.matches = True + self.match_count = 0 + self.unmatched_fields = {} + + def add_match(self): + self.match_count += 1 + + def add_miss(self, field_name, expected_value, actual_value): + self.matches = False + self.unmatched_fields[field_name] = (expected_value, actual_value) + + def add_match_result(self, field_name, expected_value, actual_value, custom_matcher=None): + if custom_matcher: + matches = custom_matcher() + else: + matches = expected_value == actual_value + + if matches: + self.add_match() + else: + self.add_miss(field_name, expected_value, actual_value) + + +class _MockHandler: + + def __init__(self, + method: str, + path_pattern: str, + response_handler: Callable[[tornado.web.RequestHandler], None] = None, + headers: Optional[dict] = None, + request_body: Optional[str] = None) -> None: + self.method = method.upper() + self.path_pattern = path_pattern + self.response_handler = response_handler + self.request_body = request_body + self.headers = headers + + def match(self, request_handler) -> MatchResult: + match_result = MatchResult() + + match_result.add_match_result('method', self.method, request_handler.request.method) + match_result.add_match_result( + 'path', + self.path_pattern, + request_handler.request.uri, + lambda: re.match(self.path_pattern, request_handler.request.uri) + ) + + if self.request_body: + actual_body = request_handler.request.body + if actual_body: + actual_body = actual_body.decode('utf8') + match_result.add_match_result( + 'body', + self.request_body, + actual_body) + + if self.headers: + for header, value in self.headers.items(): + actual_value = request_handler.request.headers.get(header) + match_result.add_match_result('Header:' + header, value, actual_value) + + return match_result + + def handle(self, request_handler): + if self.response_handler: + self.response_handler(request_handler) + + def matcher_info(self): + result = { + 'method': self.method, + 'path_pattern': self.path_pattern} + + if self.request_body: + result['request_body'] = self.request_body + + if self.headers: + result['headers'] = self.headers + + return str(result) + + +class MockServer: + def __init__(self) -> None: + application = Application(handlers=[(r'/.*', MockRequestHandler)]) + + sockets = tornado.netutil.bind_sockets(0, '127.0.0.1') + self._port = sockets[0].getsockname()[:2][1] + self.server = tornado.httpserver.HTTPServer(application) + self.server.add_sockets(sockets) + application.mock_server = self + + self.mock_handlers: list[_MockHandler] = [] + + def get_host(self): + return 'http://127.0.0.1:' + str(self._port) + + def handle_request(self, request_handler: tornado.web.RequestHandler): + all_matches = [] + for mock_handler in self.mock_handlers: + all_matches.append((mock_handler, mock_handler.match(request_handler))) + + sorted_matches = sorted(all_matches, key=lambda x: (x[1].matches, x[1].match_count), reverse=True) + + (most_suitable_handler, match_result) = sorted_matches[0] + + if match_result.matches: + most_suitable_handler.handle(request_handler) + return + + raise Exception('Cannot match request + ' + repr(request_handler.request) + '\n' + + 'Most suitable handler : ' + most_suitable_handler.matcher_info() + '\n' + + 'Unmatches fields: ' + str(match_result.unmatched_fields)) + + def register_mock( + self, + method, + path_pattern, + request_body=None, + headers=None, + response_handler: Callable[[tornado.web.RequestHandler], None] = None, + response: MockResponse = None, + ): + if response and response_handler: + raise Exception('response and response_handler cannot be specified at the same time') + + if response: + response_handler = response + + handler = _MockHandler( + method, + path_pattern, + request_body=request_body, + headers=headers, + response_handler=response_handler) + + self.mock_handlers.append(handler) + + return handler + + def unregister_mock(self, mock_handler): + self.mock_handlers.remove(mock_handler) + + def cleanup(self): + self.mock_handlers.clear() + + +class MockRequestHandler(tornado.web.RequestHandler): + def get(self, *args, **kwargs): + self.application.mock_server.handle_request(self) + + def post(self, *args, **kwargs): + self.application.mock_server.handle_request(self) diff --git a/src/utils/tornado_utils.py b/src/utils/tornado_utils.py index c56797ff..c109e937 100644 --- a/src/utils/tornado_utils.py +++ b/src/utils/tornado_utils.py @@ -1,8 +1,14 @@ +import asyncio import json import re +import threading +from concurrent.futures import Future from urllib import parse as urllib_parse from urllib.parse import urljoin +import tornado.ioloop +import tornado.websocket + from model.model_helper import is_empty from utils import string_utils from utils.string_utils import unwrap_quotes @@ -93,6 +99,10 @@ def get_secure_cookie(request_handler, key): return value.decode('utf-8') +def can_write_secure_cookie(request_handler): + return not isinstance(request_handler, tornado.websocket.WebSocketHandler) + + def parse_header(header): header_split = [] current = '' @@ -134,3 +144,33 @@ def parse_header(header): sub_headers_dict[key] = value return main_value, sub_headers_dict + + +separate_io_loop = None +io_loop_lock = threading.RLock() + + +def run_sync(func): + global separate_io_loop + + if separate_io_loop is None: + with io_loop_lock: + if separate_io_loop is None: + io_loop_future = Future() + + def run_new_ioloop(): + try: + io_loop = asyncio.new_event_loop() + io_loop_future.set_result(io_loop) + io_loop.run_forever() + except Exception as e: + io_loop_future.set_exception(e) + + # We need to run it in another thread because ioloop.current.run_sync starts/stops the current ioloop, + # which is not acceptable + thread = threading.Thread(target=run_new_ioloop, daemon=True) + thread.start() + separate_io_loop = io_loop_future.result() + + future = asyncio.run_coroutine_threadsafe(func, separate_io_loop) + return future.result() diff --git a/src/web/server.py b/src/web/server.py index 82fd1848..e3b7f586 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -42,7 +42,7 @@ from utils.tornado_utils import respond_error, redirect_relative, get_form_file from web.script_config_socket import ScriptConfigSocket, active_config_models from web.streaming_form_reader import StreamingFormReader -from web.web_auth_utils import check_authorization +from web.web_auth_utils import check_authorization, check_authorization_sync from web.web_utils import wrap_to_server_event, identify_user, inject_user, get_user from web.xheader_app_wrapper import autoapply_xheaders @@ -266,10 +266,6 @@ def __init__(self, application, request, **kwargs): @check_authorization @inject_user def open(self, user, execution_id): - auth = self.application.auth - if not auth.is_authenticated(self): - return None - execution_service = self.application.execution_service try: @@ -503,7 +499,7 @@ def get(self, user, execution_id): class AuthorizedStaticFileHandler(BaseStaticHandler): admin_files = ['admin.html', 'css/admin.css', 'admin.js', 'admin-deps.css'] - @check_authorization + @check_authorization_sync def validate_absolute_path(self, root, absolute_path): if not self.application.auth.is_enabled() and (absolute_path.endswith("/login.html")): raise tornado.web.HTTPError(404) diff --git a/src/web/web_auth_utils.py b/src/web/web_auth_utils.py index 756b5802..c28f7c05 100644 --- a/src/web/web_auth_utils.py +++ b/src/web/web_auth_utils.py @@ -8,18 +8,28 @@ import tornado.websocket from auth.auth_base import AuthRejectedError, AuthFailureError +from utils import tornado_utils from utils.tornado_utils import redirect_relative from web.web_utils import identify_user LOGGER = logging.getLogger('web_server') -webpack_prefixed_extensions = ['.css', '.js.map', '.js', '.jpg', '.woff', '.woff2'] +webpack_prefixed_extensions = ['.css', '.js.map', '.js', '.jpg', '.woff', '.woff2', '.png'] + + +def check_authorization_sync(func): + wrapper = check_authorization(func) + + def sync_wrapper(self, *args, **kwargs): + return tornado_utils.run_sync(wrapper(self, *args, **kwargs)) + + return sync_wrapper # In case of REST requests we don't redirect explicitly, but reply with Unauthorized code. # Client application should provide redirection in the way it likes -def check_authorization(func): - def wrapper(self, *args, **kwargs): +def check_authorization(func, ): + async def wrapper(self, *args, **kwargs): auth = self.application.auth authorizer = self.application.authorizer @@ -31,7 +41,7 @@ def wrapper(self, *args, **kwargs): return func(self, *args, **kwargs) try: - authenticated = auth.is_authenticated(self) + authenticated = await auth.is_authenticated(self) except (AuthRejectedError, AuthFailureError) as e: message = 'On-fly auth rejected' LOGGER.warning(message + ': ' + str(e)) @@ -104,7 +114,8 @@ def is_allowed_during_login(request_path, login_url, request_handler): '/fonts/roboto-latin-500.woff', '/fonts/roboto-latin-400.woff2', '/fonts/roboto-latin-400.woff', - '/img/titleBackground_login.jpg'] + '/img/titleBackground_login.jpg', + '/img/gitlab-icon-rgb.png'] return (request_path in login_resources) or (request_path.startswith('/theme/')) From c85fc7b3cfab2dc741721bdf823df1bc0949f518 Mon Sep 17 00:00:00 2001 From: UrekD Date: Wed, 28 Jun 2023 17:48:46 +0200 Subject: [PATCH 321/398] Fix exception formatiing --- src/scheduling/schedule_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 2af3f8af..49aad747 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -10,7 +10,7 @@ def _read_datetime(incoming_schedule_config, key): datetime_value = model_helper.read_datetime_from_config(key, incoming_schedule_config) if datetime_value is None: - raise InvalidScheduleException('%1 is required', key) + raise InvalidScheduleException(f'{key} is required') return datetime_value def _read_repeat_unit(incoming_schedule_config): From 90a790907c3e3d377da5a2e92a180271e4b6dbad Mon Sep 17 00:00:00 2001 From: UrekD Date: Wed, 28 Jun 2023 17:57:46 +0200 Subject: [PATCH 322/398] Removed redundant check --- src/scheduling/schedule_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index ee2e424e..0233694a 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -127,7 +127,7 @@ def schedule_job(self, job: SchedulingJob, job_path): next_datetime = schedule.get_next_time() if schedule.end_option == 'on': - if date_utils.is_past(schedule.end_arg) or next_datetime > schedule.end_arg: + if next_datetime > schedule.end_arg: return LOGGER.info( From afe7f1a85b9c889f1bf4ccad86dfb4b20ebdb80d Mon Sep 17 00:00:00 2001 From: UrekD Date: Wed, 28 Jun 2023 23:42:44 +0200 Subject: [PATCH 323/398] Validate end_option is not none --- src/scheduling/schedule_config.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 49aad747..595876ef 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -57,6 +57,9 @@ def read_schedule_config(incoming_schedule_config): if repeatable: end_option = incoming_schedule_config.get('end_option') + if end_option is None: + raise InvalidScheduleException('end_option is required for repeatable schedule') + prepared_schedule_config.end_option = end_option if end_option == 'on': prepared_schedule_config.end_arg = _read_datetime(incoming_schedule_config, 'end_arg') From bd37d615353b9e41c49346c019563a7f70056d78 Mon Sep 17 00:00:00 2001 From: UrekD Date: Wed, 28 Jun 2023 23:46:01 +0200 Subject: [PATCH 324/398] vue maxExecuteCount --- web-src/src/main-app/components/schedule/SchedulePanel.vue | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index 172cefe8..a00d9b94 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -71,7 +71,7 @@
    Count - +
    @@ -139,7 +139,7 @@ export default { endTime: endDay.toTimeString().substr(0, 5), id: null, repeatPeriod: 1, - executeCount: 1, + maxExecuteCount: 1, repeatTimeUnit: 'days', weekDays: [ {'day': 'Monday', active: currentDay === 1}, @@ -183,7 +183,7 @@ export default { let endArg = null; if (this.endOption === 'after') { - endArg = this.executeCount; + endArg = this.maxExecuteCount; } else if (this.endOption === 'on') { const endDatetime = new Date(this.endDate); const [hoursEnd, minutesEnd] = this.endTime.split(':'); From 4731af493f254ff1c8fbb4f38625424430a94c0e Mon Sep 17 00:00:00 2001 From: UrekD Date: Thu, 29 Jun 2023 00:28:08 +0200 Subject: [PATCH 325/398] Rename on and after --- src/scheduling/schedule_config.py | 8 ++++---- src/scheduling/schedule_service.py | 10 +++++----- .../components/schedule/SchedulePanel.vue | 20 ++++++++++--------- 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 595876ef..fb64053a 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -61,9 +61,9 @@ def read_schedule_config(incoming_schedule_config): raise InvalidScheduleException('end_option is required for repeatable schedule') prepared_schedule_config.end_option = end_option - if end_option == 'on': + if end_option == 'end_datetime': prepared_schedule_config.end_arg = _read_datetime(incoming_schedule_config, 'end_arg') - elif end_option == 'after': + elif end_option == 'max_executions': prepared_schedule_config.end_arg = model_helper.read_int_from_config('end_arg', incoming_schedule_config) else: prepared_schedule_config.end_option = 'never' @@ -94,10 +94,10 @@ def as_serializable_dict(self): 'start_datetime': date_utils.to_iso_string(self.start_datetime), } - if self.end_option == 'on': + if self.end_option == 'end_datetime': result['end_option'] = self.end_option result['end_arg'] = date_utils.to_iso_string(self.end_arg) - elif self.end_option == 'after': + elif self.end_option == 'max_executions': result['end_option'] = self.end_option result['end_arg'] = self.end_arg diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 0233694a..2ce2384b 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -83,11 +83,11 @@ def create_job(self, script_name, parameter_values, incoming_schedule_config, us if not schedule_config.repeatable and date_utils.is_past(schedule_config.start_datetime): raise InvalidScheduleException('Start date should be in the future') - if schedule_config.end_option == 'on': + if schedule_config.end_option == 'end_datetime': if schedule_config.start_datetime > schedule_config.end_arg: raise InvalidScheduleException('End date should be after start date') - if schedule_config.end_option == 'after' and schedule_config.end_arg <= 0: + if schedule_config.end_option == 'max_executions' and schedule_config.end_arg <= 0: raise InvalidScheduleException('Count should be greater than 0!') id = self._id_generator.next_id() @@ -121,12 +121,12 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - if schedule.end_option == 'after' and schedule.end_arg <= 0: + if schedule.end_option == 'max_executions' and schedule.end_arg <= 0: return next_datetime = schedule.get_next_time() - if schedule.end_option == 'on': + if schedule.end_option == 'end_datetime': if next_datetime > schedule.end_arg: return @@ -159,7 +159,7 @@ def cleanup(): self._execution_service.add_finish_listener(cleanup, execution_id) - if job.schedule.end_option == 'after': + if job.schedule.end_option == 'max_executions': job.schedule.end_arg -= 1 file_utils.write_file( diff --git a/web-src/src/main-app/components/schedule/SchedulePanel.vue b/web-src/src/main-app/components/schedule/SchedulePanel.vue index a00d9b94..64c74d67 100644 --- a/web-src/src/main-app/components/schedule/SchedulePanel.vue +++ b/web-src/src/main-app/components/schedule/SchedulePanel.vue @@ -42,7 +42,7 @@
    - End + End:


    -
    +
    Ending
    -
    +
    Count
    @@ -182,13 +182,15 @@ export default { let endOption = this.endOption; let endArg = null; - if (this.endOption === 'after') { + if (this.endOption === 'maxExecuteCount') { endArg = this.maxExecuteCount; - } else if (this.endOption === 'on') { + endOption = 'max_executions'; + } else if (this.endOption === 'endDatetime') { const endDatetime = new Date(this.endDate); const [hoursEnd, minutesEnd] = this.endTime.split(':'); endDatetime.setHours(parseInt(hoursEnd), parseInt(minutesEnd), 0, 0); endArg = endDatetime; + endOption = 'end_datetime'; } const weekDays = this.weekDays.filter(day => day.active).map(day => day.day); From d65eb5d70780b540168e025f82a77ce3abab22dd Mon Sep 17 00:00:00 2001 From: yshepilov Date: Thu, 29 Jun 2023 23:10:09 +0200 Subject: [PATCH 326/398] #654 fixed flickering test --- src/auth/oauth_token_manager.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/auth/oauth_token_manager.py b/src/auth/oauth_token_manager.py index 21542064..cc937292 100644 --- a/src/auth/oauth_token_manager.py +++ b/src/auth/oauth_token_manager.py @@ -111,9 +111,10 @@ def _schedule_token_refresh(self, username, refresh_token, next_refresh_datetime if not self._scheduler: self.scheduler = Scheduler() - if (next_refresh_datetime - datetime.datetime.now()) < datetime.timedelta(seconds=30): - next_refresh_datetime_adjusted = next_refresh_datetime - elif (next_refresh_datetime - datetime.datetime.now()) < datetime.timedelta(minutes=2): + token_expires_in = next_refresh_datetime - datetime.datetime.now() + if token_expires_in < datetime.timedelta(seconds=30): + next_refresh_datetime_adjusted = next_refresh_datetime - (token_expires_in / 2) + elif token_expires_in < datetime.timedelta(minutes=2): next_refresh_datetime_adjusted = next_refresh_datetime - datetime.timedelta(seconds=10) else: next_refresh_datetime_adjusted = next_refresh_datetime - datetime.timedelta(minutes=1) From 24d8f833ccce1eb2e52e860ec0a860dc3f1fc417 Mon Sep 17 00:00:00 2001 From: UrekD Date: Fri, 30 Jun 2023 18:51:48 +0200 Subject: [PATCH 327/398] Implement execution count for repetables, validate end_option and end_arg, increment executions and check for max_exec --- src/scheduling/schedule_config.py | 49 ++++++++++++++++++++++++------ src/scheduling/schedule_service.py | 6 ++-- 2 files changed, 42 insertions(+), 13 deletions(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index fb64053a..079f37fb 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -31,6 +31,37 @@ def _read_repeat_period(incoming_schedule_config): return period +def _read_executions_count(incoming_schedule_config): + executions_count = model_helper.read_int_from_config('executions_count', incoming_schedule_config) + if executions_count is None: + executions_count = 0 + elif executions_count < 0: + raise InvalidScheduleException('executions_count should be > 0') + return executions_count + +def _read_end_arg_int(incoming_schedule_config): + end_arg = model_helper.read_int_from_config('end_arg', incoming_schedule_config) + if end_arg is None: + raise InvalidScheduleException('end_arg is required for repeatable schedule') + elif end_arg <= 0: + raise InvalidScheduleException('end_arg should be > 0') + return end_arg + + +def _read_end_args(incoming_schedule_config): + end_option = incoming_schedule_config.get('end_option') + if end_option is None: + raise InvalidScheduleException('end_option is required for repeatable schedule') + elif end_option == 'end_datetime': + end_arg = _read_datetime(incoming_schedule_config, 'end_arg') + return end_option,end_arg + elif end_option == 'max_executions': + end_arg = _read_end_arg_int(incoming_schedule_config) + return end_option,end_arg + else: + return end_option,None + + def read_repeatable_flag(incoming_schedule_config): repeatable = model_helper.read_bool_from_config('repeatable', incoming_schedule_config) if repeatable is None: @@ -56,17 +87,9 @@ def read_schedule_config(incoming_schedule_config): prepared_schedule_config = ScheduleConfig(repeatable, start_datetime) if repeatable: - end_option = incoming_schedule_config.get('end_option') - if end_option is None: - raise InvalidScheduleException('end_option is required for repeatable schedule') + prepared_schedule_config.executions_count = _read_executions_count(incoming_schedule_config) - prepared_schedule_config.end_option = end_option - if end_option == 'end_datetime': - prepared_schedule_config.end_arg = _read_datetime(incoming_schedule_config, 'end_arg') - elif end_option == 'max_executions': - prepared_schedule_config.end_arg = model_helper.read_int_from_config('end_arg', incoming_schedule_config) - else: - prepared_schedule_config.end_option = 'never' + prepared_schedule_config.end_option, prepared_schedule_config.end_arg = _read_end_args(incoming_schedule_config) prepared_schedule_config.repeat_unit = _read_repeat_unit(incoming_schedule_config) prepared_schedule_config.repeat_period = _read_repeat_period(incoming_schedule_config) @@ -84,6 +107,7 @@ def __init__(self, repeatable, start_datetime) -> None: self.start_datetime = start_datetime # type: datetime self.end_option = None self.end_arg = None + self.executions_count = None self.repeat_unit = None self.repeat_period = None self.weekdays = None @@ -100,6 +124,11 @@ def as_serializable_dict(self): elif self.end_option == 'max_executions': result['end_option'] = self.end_option result['end_arg'] = self.end_arg + else: + result['end_option'] = 'never' + + if self.repeatable: + result['executions_count'] = self.executions_count if self.repeat_unit is not None: result['repeat_unit'] = self.repeat_unit diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index 2ce2384b..7b64f7dd 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -121,7 +121,7 @@ def schedule_job(self, job: SchedulingJob, job_path): if not schedule.repeatable and date_utils.is_past(schedule.start_datetime): return - if schedule.end_option == 'max_executions' and schedule.end_arg <= 0: + if schedule.end_option == 'max_executions' and schedule.end_arg <= schedule.executions_count: return next_datetime = schedule.get_next_time() @@ -159,8 +159,8 @@ def cleanup(): self._execution_service.add_finish_listener(cleanup, execution_id) - if job.schedule.end_option == 'max_executions': - job.schedule.end_arg -= 1 + if job.schedule.repeatable: + job.schedule.executions_count += 1 file_utils.write_file( job_path, From 21f4a5cf476141c0534a6404ef96f7b9590a595c Mon Sep 17 00:00:00 2001 From: UrekD Date: Fri, 30 Jun 2023 18:58:07 +0200 Subject: [PATCH 328/398] Fix dict not parsing end_option for never --- src/scheduling/schedule_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 079f37fb..aaf30701 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -124,7 +124,7 @@ def as_serializable_dict(self): elif self.end_option == 'max_executions': result['end_option'] = self.end_option result['end_arg'] = self.end_arg - else: + elif self.end_option == 'never': result['end_option'] = 'never' if self.repeatable: From d4c30acf933797bb82a4a189b2e58a9bdd122485 Mon Sep 17 00:00:00 2001 From: UrekD Date: Fri, 30 Jun 2023 19:11:45 +0200 Subject: [PATCH 329/398] Removed an extra , in array same functionality --- src/scheduling/schedule_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index aaf30701..e750bbed 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -115,7 +115,7 @@ def __init__(self, repeatable, start_datetime) -> None: def as_serializable_dict(self): result = { 'repeatable': self.repeatable, - 'start_datetime': date_utils.to_iso_string(self.start_datetime), + 'start_datetime': date_utils.to_iso_string(self.start_datetime) } if self.end_option == 'end_datetime': From 7d03fdcb3550f194ee8e118704844657a49b11f3 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Mon, 3 Jul 2023 20:52:48 +0200 Subject: [PATCH 330/398] fixed backward incompatibility in pytest-metadata --- src/e2e_tests/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/e2e_tests/conftest.py b/src/e2e_tests/conftest.py index db13ff8d..b337905e 100644 --- a/src/e2e_tests/conftest.py +++ b/src/e2e_tests/conftest.py @@ -83,7 +83,9 @@ def pytest_configure(config): with open(CONFIG_PATH) as config_file: data = json.load(config_file) for config_item in data.keys(): - config._metadata[str(config_item)] = str(data[config_item]) + # https://github.com/pytest-dev/pytest-metadata/issues/70 + from pytest_metadata.plugin import metadata_key + config.stash[metadata_key][str(config_item)] = str(data[config_item]) def pytest_html_report_title(report): From 4225459c3f43023b0b0331b546b5e2bdc833500d Mon Sep 17 00:00:00 2001 From: yshepilov Date: Mon, 3 Jul 2023 22:11:30 +0200 Subject: [PATCH 331/398] #654 fixed non-working files download --- src/web/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/web/server.py b/src/web/server.py index e3b7f586..08c4389d 100755 --- a/src/web/server.py +++ b/src/web/server.py @@ -644,7 +644,7 @@ def set_extra_headers(self, path): encoded_filename = urllib.parse.quote(filename, encoding='utf-8') self.set_header('Content-Disposition', 'attachment; filename*=UTF-8\'\'' + encoded_filename + '') - @check_authorization + @check_authorization_sync def validate_absolute_path(self, root, absolute_path): audit_name = get_audit_name_from_request(self) user_id = identify_user(self) From 5143f66f58e43f0103e680cd082e6c0c2513625c Mon Sep 17 00:00:00 2001 From: UrekD Date: Tue, 4 Jul 2023 22:45:18 +0200 Subject: [PATCH 332/398] Use save_job for saving count --- src/scheduling/schedule_service.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/scheduling/schedule_service.py b/src/scheduling/schedule_service.py index d98bbacc..acbbf815 100644 --- a/src/scheduling/schedule_service.py +++ b/src/scheduling/schedule_service.py @@ -155,9 +155,7 @@ def cleanup(): if job.schedule.repeatable: job.schedule.executions_count += 1 - file_utils.write_file( - job_path, - json.dumps(job.as_serializable_dict(), indent=2)) + self.save_job(job) except: LOGGER.exception('Failed to execute ' + job.get_log_name()) From 3f38a06593fe04ea74c3c4da180a2132adfab8d8 Mon Sep 17 00:00:00 2001 From: UrekD Date: Tue, 4 Jul 2023 22:50:47 +0200 Subject: [PATCH 333/398] Removed unneeded function for read execution cpunt --- src/scheduling/schedule_config.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index e750bbed..3516f75a 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -31,14 +31,6 @@ def _read_repeat_period(incoming_schedule_config): return period -def _read_executions_count(incoming_schedule_config): - executions_count = model_helper.read_int_from_config('executions_count', incoming_schedule_config) - if executions_count is None: - executions_count = 0 - elif executions_count < 0: - raise InvalidScheduleException('executions_count should be > 0') - return executions_count - def _read_end_arg_int(incoming_schedule_config): end_arg = model_helper.read_int_from_config('end_arg', incoming_schedule_config) if end_arg is None: @@ -87,7 +79,7 @@ def read_schedule_config(incoming_schedule_config): prepared_schedule_config = ScheduleConfig(repeatable, start_datetime) if repeatable: - prepared_schedule_config.executions_count = _read_executions_count(incoming_schedule_config) + prepared_schedule_config.executions_count = model_helper.read_int_from_config('executions_count', incoming_schedule_config, default=0) prepared_schedule_config.end_option, prepared_schedule_config.end_arg = _read_end_args(incoming_schedule_config) From 59a7bf157f01521e512a61cc5f6206b8e0c20cac Mon Sep 17 00:00:00 2001 From: UrekD Date: Tue, 4 Jul 2023 23:19:48 +0200 Subject: [PATCH 334/398] Have no end_arg for repetable without end condition --- src/scheduling/schedule_config.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/scheduling/schedule_config.py b/src/scheduling/schedule_config.py index 3516f75a..db2b9d34 100644 --- a/src/scheduling/schedule_config.py +++ b/src/scheduling/schedule_config.py @@ -42,9 +42,7 @@ def _read_end_arg_int(incoming_schedule_config): def _read_end_args(incoming_schedule_config): end_option = incoming_schedule_config.get('end_option') - if end_option is None: - raise InvalidScheduleException('end_option is required for repeatable schedule') - elif end_option == 'end_datetime': + if end_option == 'end_datetime': end_arg = _read_datetime(incoming_schedule_config, 'end_arg') return end_option,end_arg elif end_option == 'max_executions': @@ -116,8 +114,6 @@ def as_serializable_dict(self): elif self.end_option == 'max_executions': result['end_option'] = self.end_option result['end_arg'] = self.end_arg - elif self.end_option == 'never': - result['end_option'] = 'never' if self.repeatable: result['executions_count'] = self.executions_count From d519350790919d5d17e963eeb1aada0c4fc91cb3 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Thu, 27 Jul 2023 21:22:25 +0200 Subject: [PATCH 335/398] #654 made oauth failure to logout user for any oauth (not only keycloak) --- src/auth/auth_abstract_oauth.py | 12 ++++++++---- src/auth/auth_keycloak_openid.py | 10 +--------- src/web/script_config_socket.py | 2 +- 3 files changed, 10 insertions(+), 14 deletions(-) diff --git a/src/auth/auth_abstract_oauth.py b/src/auth/auth_abstract_oauth.py index 95e8df51..c137f8eb 100644 --- a/src/auth/auth_abstract_oauth.py +++ b/src/auth/auth_abstract_oauth.py @@ -13,6 +13,7 @@ import tornado import tornado.ioloop from tornado import httpclient, escape +from tornado.httpclient import HTTPClientError from auth import auth_base from auth.auth_base import AuthFailureError, AuthBadRequestException, AuthRejectedError @@ -266,10 +267,13 @@ async def _do_update_user_auth_async(self, username, user_state, access_token): try: user_info = await self.fetch_user_info(access_token) # type: _OauthUserInfo - except AuthRejectedError: - LOGGER.info(f'User {username} is not authenticated anymore. Logging out') - self._remove_user(username) - return + except (AuthRejectedError, HTTPClientError) as e: + if (not isinstance(e, HTTPClientError)) or (e.code == 401): + LOGGER.info(f'User {username} is not authenticated anymore. Logging out') + self._remove_user(username) + return + else: + raise e if (not user_info) or (not user_info.username): LOGGER.error('Failed to fetch user info: %s', str(user_info)) diff --git a/src/auth/auth_keycloak_openid.py b/src/auth/auth_keycloak_openid.py index fe4b1fcf..7aae259a 100644 --- a/src/auth/auth_keycloak_openid.py +++ b/src/auth/auth_keycloak_openid.py @@ -1,10 +1,8 @@ import logging from tornado import escape -from tornado.httpclient import HTTPClientError from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo -from auth.auth_base import AuthRejectedError from model import model_helper LOGGER = logging.getLogger('script_server.GoogleOauthAuthorizer') @@ -33,13 +31,7 @@ async def fetch_user_info(self, access_token) -> _OauthUserInfo: self._realm_url + 'protocol/openid-connect/userinfo', headers={'Authorization': 'Bearer ' + access_token}) - try: - user_response = await user_future - except HTTPClientError as e: - if e.code == 401: - raise AuthRejectedError('Failed to fetch user info') - else: - raise e + user_response = await user_future if not user_response: raise Exception('No response during loading userinfo') diff --git a/src/web/script_config_socket.py b/src/web/script_config_socket.py index 691a5058..de26665f 100644 --- a/src/web/script_config_socket.py +++ b/src/web/script_config_socket.py @@ -197,7 +197,7 @@ def load_model(): self.close(code=CorruptConfigFileException.HTTP_CODE, reason=str(e)) return None except Exception: - message = 'Failed to load script config ' + config_name + message = 'Failed to load script config ' + str(config_name) LOGGER.exception(message) self.close(code=500, reason=message) return None From 179f18c142b17578a57d95baee42c4a5929001e8 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Thu, 27 Jul 2023 22:27:03 +0200 Subject: [PATCH 336/398] #602 fixed a bug that fast scripts were notified about finish before start --- src/execution/execution_service.py | 4 ++-- src/tests/execution_service_test.py | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/src/execution/execution_service.py b/src/execution/execution_service.py index 4118a574..d439f1ec 100644 --- a/src/execution/execution_service.py +++ b/src/execution/execution_service.py @@ -62,10 +62,10 @@ def start_script(self, config, user: User): config=config) self._active_executor_ids.add(execution_id) - self._add_post_finish_handling(execution_id, executor, user) - self._fire_execution_started(execution_id, user) + self._add_post_finish_handling(execution_id, executor, user) + return execution_id def stop_script(self, execution_id, user): diff --git a/src/tests/execution_service_test.py b/src/tests/execution_service_test.py index cbda159e..17ae410d 100644 --- a/src/tests/execution_service_test.py +++ b/src/tests/execution_service_test.py @@ -236,6 +236,20 @@ def test_finish_listener_by_id(self): self.get_process(id1).stop() self.assertEqual(1, len(notifications)) + def test_start_finish_listener_order(self): + executor._process_creator = create_process_wrapper + + execution_service = self.create_execution_service() + + notifications = [] + + execution_service.add_finish_listener(lambda _, __: notifications.append('finished')) + execution_service.add_start_listener(lambda _, __: notifications.append('started')) + + self._start(execution_service) + + self.assertEqual(['started', 'finished'], notifications) + def _start(self, execution_service, user_id=DEFAULT_USER_ID): return _start(execution_service, user_id) From 0d99e5a8176ecb3f3f706b0f4f954e1f33090ec6 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Thu, 27 Jul 2023 22:49:11 +0200 Subject: [PATCH 337/398] fixed tests --- src/tests/execution_service_test.py | 4 ++++ src/tests/external_model_test.py | 21 ++++++++++++++----- src/tests/scheduling/schedule_service_test.py | 4 +++- 3 files changed, 23 insertions(+), 6 deletions(-) diff --git a/src/tests/execution_service_test.py b/src/tests/execution_service_test.py index 17ae410d..0e5346c2 100644 --- a/src/tests/execution_service_test.py +++ b/src/tests/execution_service_test.py @@ -1,4 +1,5 @@ import copy +import time import unittest from parameterized import parameterized @@ -248,6 +249,9 @@ def test_start_finish_listener_order(self): self._start(execution_service) + if len(notifications) < 2: + time.sleep(0.01) + self.assertEqual(['started', 'finished'], notifications) def _start(self, execution_service, user_id=DEFAULT_USER_ID): diff --git a/src/tests/external_model_test.py b/src/tests/external_model_test.py index 8afb3693..d2b686f0 100644 --- a/src/tests/external_model_test.py +++ b/src/tests/external_model_test.py @@ -174,15 +174,22 @@ def test_config_with_none_values(self): class TestParseExternalSchedule(unittest.TestCase): def test_parse_full_config(self): parsed = parse_external_schedule( - {'repeatable': False, 'startDatetime': '2020-12-30', 'repeatUnit': 'days', 'repeatPeriod': 5, - 'weekDays': ['monday', 'Tuesday']}) + {'repeatable': False, + 'startDatetime': '2020-12-30', + 'repeatUnit': 'days', + 'repeatPeriod': 5, + 'weekDays': ['monday', 'Tuesday'], + 'endOption': 'max_executions', + 'endArg': 3}) self.assertDictEqual({ 'repeatable': False, 'start_datetime': '2020-12-30', 'repeat_unit': 'days', 'repeat_period': 5, - 'weekdays': ['monday', 'Tuesday']}, + 'weekdays': ['monday', 'Tuesday'], + 'end_option': 'max_executions', + 'end_arg': 3}, parsed) def test_parse_partial_config(self): @@ -194,7 +201,9 @@ def test_parse_partial_config(self): 'start_datetime': '2020-12-30', 'repeat_unit': None, 'repeat_period': None, - 'weekdays': None}, + 'weekdays': None, + 'end_arg': None, + 'end_option': None}, parsed) def test_parse_unknown_field(self): @@ -208,4 +217,6 @@ def test_parse_unknown_field(self): 'start_datetime': '2020-12-30', 'repeat_unit': None, 'repeat_period': None, - 'weekdays': None}, parsed) + 'weekdays': None, + 'end_arg': None, + 'end_option': None}, parsed) diff --git a/src/tests/scheduling/schedule_service_test.py b/src/tests/scheduling/schedule_service_test.py index 9f6494aa..4f944d2a 100644 --- a/src/tests/scheduling/schedule_service_test.py +++ b/src/tests/scheduling/schedule_service_test.py @@ -431,7 +431,8 @@ def create_job(id=None, repeat_unit=None, repeat_period=None, weekdays=None, - parameter_values=None): + parameter_values=None, + executions_count=0): if audit_names is None: audit_names = {audit_utils.HOSTNAME: 'my-host'} @@ -450,6 +451,7 @@ def create_job(id=None, schedule_config.repeat_unit = repeat_unit schedule_config.repeat_period = repeat_period schedule_config.weekdays = weekdays + schedule_config.executions_count = executions_count return SchedulingJob(id, User(user_id, audit_names), schedule_config, script_name, parameter_values) From 0ab219b68e843ef1d182d786e8c76499f4f98b4a Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 4 Aug 2023 21:38:38 +0200 Subject: [PATCH 338/398] rolled up version for release --- web-src/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web-src/package.json b/web-src/package.json index 0285bb90..62cbb500 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -1,6 +1,6 @@ { "name": "script-server", - "version": "1.17.0", + "version": "1.18.0", "private": true, "dependencies": { "ace-builds": "^1.11.2", From 666415aadaf04b004d19e4ed7aedc44c4136ce35 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sat, 5 Aug 2023 13:39:33 +0200 Subject: [PATCH 339/398] fixed flickering test --- src/tests/auth/test_auth_keycloak_openid.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/tests/auth/test_auth_keycloak_openid.py b/src/tests/auth/test_auth_keycloak_openid.py index d82528b6..7698b32f 100644 --- a/src/tests/auth/test_auth_keycloak_openid.py +++ b/src/tests/auth/test_auth_keycloak_openid.py @@ -189,10 +189,14 @@ async def test_success_validate_after_refresh(self): await gen.sleep(0.4 + 0.1) - valid_1 = await self.authenticator.validate_user(username, mock_request_handler(previous_request=request_1)) + valid_1 = await self.authenticator.validate_user(username, mock_request_handler(previous_request=request_1)) self.assertTrue(valid_1) - await gen.sleep(0.1) + for i in range(1, 8): + await gen.sleep(0.05) + + if self.authenticator.get_groups('bugy') == ['g3']: + break self.assertEqual(['g3'], self.authenticator.get_groups('bugy')) From 738acf03dd672ad9717faaa818e7555cce0a8861 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 25 Aug 2023 21:39:44 +0200 Subject: [PATCH 340/398] fixed failing e2e tests due to chromedriver version mismatch --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 24652535..cfb1fcfd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,7 +29,8 @@ env: - PATH=$HOME/.local/bin:$PATH before_install: - sudo apt-get -y install python3-pip python3-setuptools apache2-utils python3-venv - chromium-browser chromium-chromedriver + - wget https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/116.0.5845.96/linux64/chromedriver-linux64.zip + - unzip chromedriver-linux64.zip -d $HOME/.local/bin install: - pip3 install -r requirements.txt - pip3 install pyasn1 --upgrade From 6f5bf247a0cf8a2ca9bea19a34fd60dd2aeb7c3a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Aug 2023 22:13:58 +0000 Subject: [PATCH 341/398] Bump apollo-server-core from 2.26.0 to 2.26.2 in /web-src Bumps [apollo-server-core](https://github.com/apollographql/apollo-server/tree/HEAD/packages/apollo-server-core) from 2.26.0 to 2.26.2. - [Release notes](https://github.com/apollographql/apollo-server/releases) - [Commits](https://github.com/apollographql/apollo-server/commits/apollo-server-core@2.26.2/packages/apollo-server-core) --- updated-dependencies: - dependency-name: apollo-server-core dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 61 ++++++++++++++++++++------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index 9fc80a86..669f389c 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -1,12 +1,12 @@ { "name": "script-server", - "version": "1.17.0", + "version": "1.18.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "script-server", - "version": "1.17.0", + "version": "1.18.0", "dependencies": { "ace-builds": "^1.11.2", "axios": "^0.27.2", @@ -160,13 +160,13 @@ } }, "node_modules/@apollographql/graphql-upload-8-fork": { - "version": "8.1.3", - "resolved": "https://registry.npmjs.org/@apollographql/graphql-upload-8-fork/-/graphql-upload-8-fork-8.1.3.tgz", - "integrity": "sha512-ssOPUT7euLqDXcdVv3Qs4LoL4BPtfermW1IOouaqEmj36TpHYDmYDIbKoSQxikd9vtMumFnP87OybH7sC9fJ6g==", + "version": "8.1.4", + "resolved": "https://registry.npmjs.org/@apollographql/graphql-upload-8-fork/-/graphql-upload-8-fork-8.1.4.tgz", + "integrity": "sha512-lHAj/PUegYu02zza9Pg0bQQYH5I0ah1nyIzu2YIqOv41P0vu3GCBISAmQCfFHThK7N3dy7dLFPhoKcXlXRLPoQ==", "dev": true, "dependencies": { "@types/express": "*", - "@types/fs-capacitor": "*", + "@types/fs-capacitor": "^2.0.0", "@types/koa": "*", "busboy": "^0.3.1", "fs-capacitor": "^2.0.4", @@ -3669,9 +3669,9 @@ "dev": true }, "node_modules/@types/http-errors": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-1.8.2.tgz", - "integrity": "sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ==", "dev": true }, "node_modules/@types/http-proxy": { @@ -4013,9 +4013,9 @@ "dev": true }, "node_modules/@types/koa": { - "version": "2.13.5", - "resolved": "https://registry.npmjs.org/@types/koa/-/koa-2.13.5.tgz", - "integrity": "sha512-HSUOdzKz3by4fnqagwthW/1w/yJspTgppyyalPVbgZf8jQWvdIXcVW5h2DGtw4zYntOaeRGx49r1hxoPWrD4aA==", + "version": "2.13.8", + "resolved": "https://registry.npmjs.org/@types/koa/-/koa-2.13.8.tgz", + "integrity": "sha512-Ugmxmgk/yPRW3ptBTh9VjOLwsKWJuGbymo1uGX0qdaqqL18uJiiG1ZoV0rxCOYSaDGhvEp5Ece02Amx0iwaxQQ==", "dev": true, "dependencies": { "@types/accepts": "*", @@ -5527,14 +5527,15 @@ "dev": true }, "node_modules/apollo-server-core": { - "version": "2.26.0", - "resolved": "https://registry.npmjs.org/apollo-server-core/-/apollo-server-core-2.26.0.tgz", - "integrity": "sha512-z0dAZGu6zLhYLWVaRis6pR1dQbzPhA6xU5z0issR/sQR5kr466vFMF/rq//Jqwpd/A4xfTXZrFmr5urFyl4k4g==", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/apollo-server-core/-/apollo-server-core-2.26.2.tgz", + "integrity": "sha512-r8jOhf1jElaxsNsALFMy/MLiJCqSa1ZiwxkerVYbsEkyWrpD1Khy0extDkTBrfa6uK8CatX7xK9U413bYNhJFA==", + "deprecated": "The `apollo-server-core` package is part of Apollo Server v2 and v3, which are now deprecated (end-of-life October 22nd 2023 and October 22nd 2024, respectively). This package's functionality is now found in the `@apollo/server` package. See https://www.apollographql.com/docs/apollo-server/previous-versions/ for more details.", "dev": true, "dependencies": { "@apollographql/apollo-tools": "^0.5.0", "@apollographql/graphql-playground-html": "1.6.27", - "@apollographql/graphql-upload-8-fork": "^8.1.3", + "@apollographql/graphql-upload-8-fork": "^8.1.4", "@josephg/resolvable": "^1.0.0", "@types/ws": "^7.0.0", "apollo-cache-control": "^0.15.0", @@ -25142,13 +25143,13 @@ } }, "@apollographql/graphql-upload-8-fork": { - "version": "8.1.3", - "resolved": "https://registry.npmjs.org/@apollographql/graphql-upload-8-fork/-/graphql-upload-8-fork-8.1.3.tgz", - "integrity": "sha512-ssOPUT7euLqDXcdVv3Qs4LoL4BPtfermW1IOouaqEmj36TpHYDmYDIbKoSQxikd9vtMumFnP87OybH7sC9fJ6g==", + "version": "8.1.4", + "resolved": "https://registry.npmjs.org/@apollographql/graphql-upload-8-fork/-/graphql-upload-8-fork-8.1.4.tgz", + "integrity": "sha512-lHAj/PUegYu02zza9Pg0bQQYH5I0ah1nyIzu2YIqOv41P0vu3GCBISAmQCfFHThK7N3dy7dLFPhoKcXlXRLPoQ==", "dev": true, "requires": { "@types/express": "*", - "@types/fs-capacitor": "*", + "@types/fs-capacitor": "^2.0.0", "@types/koa": "*", "busboy": "^0.3.1", "fs-capacitor": "^2.0.4", @@ -27705,9 +27706,9 @@ "dev": true }, "@types/http-errors": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-1.8.2.tgz", - "integrity": "sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ==", "dev": true }, "@types/http-proxy": { @@ -27993,9 +27994,9 @@ "dev": true }, "@types/koa": { - "version": "2.13.5", - "resolved": "https://registry.npmjs.org/@types/koa/-/koa-2.13.5.tgz", - "integrity": "sha512-HSUOdzKz3by4fnqagwthW/1w/yJspTgppyyalPVbgZf8jQWvdIXcVW5h2DGtw4zYntOaeRGx49r1hxoPWrD4aA==", + "version": "2.13.8", + "resolved": "https://registry.npmjs.org/@types/koa/-/koa-2.13.8.tgz", + "integrity": "sha512-Ugmxmgk/yPRW3ptBTh9VjOLwsKWJuGbymo1uGX0qdaqqL18uJiiG1ZoV0rxCOYSaDGhvEp5Ece02Amx0iwaxQQ==", "dev": true, "requires": { "@types/accepts": "*", @@ -29292,14 +29293,14 @@ } }, "apollo-server-core": { - "version": "2.26.0", - "resolved": "https://registry.npmjs.org/apollo-server-core/-/apollo-server-core-2.26.0.tgz", - "integrity": "sha512-z0dAZGu6zLhYLWVaRis6pR1dQbzPhA6xU5z0issR/sQR5kr466vFMF/rq//Jqwpd/A4xfTXZrFmr5urFyl4k4g==", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/apollo-server-core/-/apollo-server-core-2.26.2.tgz", + "integrity": "sha512-r8jOhf1jElaxsNsALFMy/MLiJCqSa1ZiwxkerVYbsEkyWrpD1Khy0extDkTBrfa6uK8CatX7xK9U413bYNhJFA==", "dev": true, "requires": { "@apollographql/apollo-tools": "^0.5.0", "@apollographql/graphql-playground-html": "1.6.27", - "@apollographql/graphql-upload-8-fork": "^8.1.3", + "@apollographql/graphql-upload-8-fork": "^8.1.4", "@josephg/resolvable": "^1.0.0", "@types/ws": "^7.0.0", "apollo-cache-control": "^0.15.0", From a918ba19388e57ff4fdd2d359b3ebd162e72dfe5 Mon Sep 17 00:00:00 2001 From: Jason Boblick Date: Fri, 29 Sep 2023 10:51:19 -0400 Subject: [PATCH 342/398] make attachments optional for email alerts --- src/communications/destination_email.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/communications/destination_email.py b/src/communications/destination_email.py index 13731296..af3575d1 100644 --- a/src/communications/destination_email.py +++ b/src/communications/destination_email.py @@ -56,6 +56,7 @@ def __init__(self, params_dict): self.auth_enabled = read_bool_from_config('auth_enabled', params_dict) self.login = params_dict.get('login') self.tls = read_bool_from_config('tls', params_dict) + self.include_files = read_bool_from_config('include_files', params_dict, default=True) self.password = self.read_password(params_dict) self.to_addresses = split_addresses(self.to_addresses) @@ -103,7 +104,7 @@ def send(self, title, body, files=None): if self.auth_enabled: server.login(self.login, self.password) - if files: + if self.include_files and files: for file in files: filename = file.filename part = MIMEApplication(file.content, Name=filename) From bc6071cf65fb54e4c2e250ddf75d097a73dee5f9 Mon Sep 17 00:00:00 2001 From: Jason Boblick Date: Tue, 3 Oct 2023 14:04:01 -0400 Subject: [PATCH 343/398] rename variable to better match purpose --- src/communications/destination_email.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/communications/destination_email.py b/src/communications/destination_email.py index af3575d1..a53ca465 100644 --- a/src/communications/destination_email.py +++ b/src/communications/destination_email.py @@ -56,7 +56,7 @@ def __init__(self, params_dict): self.auth_enabled = read_bool_from_config('auth_enabled', params_dict) self.login = params_dict.get('login') self.tls = read_bool_from_config('tls', params_dict) - self.include_files = read_bool_from_config('include_files', params_dict, default=True) + self.attach_files = read_bool_from_config('attach_files', params_dict, default=True) self.password = self.read_password(params_dict) self.to_addresses = split_addresses(self.to_addresses) @@ -104,7 +104,7 @@ def send(self, title, body, files=None): if self.auth_enabled: server.login(self.login, self.password) - if self.include_files and files: + if self.attach_files and files: for file in files: filename = file.filename part = MIMEApplication(file.content, Name=filename) From 20370ff326c0291a8497d385a0ca3491cd001296 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sun, 15 Oct 2023 17:54:32 +0200 Subject: [PATCH 344/398] #699 added automatic script groups based on sub-folders --- src/config/config_service.py | 34 +++++++-- src/main.py | 3 +- src/model/script_config.py | 13 +++- src/model/server_conf.py | 20 +++++ src/tests/config_service_test.py | 73 ++++++++++++++++--- src/tests/execution_logging_test.py | 5 +- src/tests/execution_service_test.py | 17 +++-- src/tests/scheduling/schedule_service_test.py | 6 +- src/tests/script_config_test.py | 62 +++++++++++----- src/tests/test_utils.py | 30 ++++++-- src/tests/web/script_config_socket_test.py | 2 +- src/tests/web/server_test.py | 2 +- src/utils/file_utils.py | 36 +++++---- 13 files changed, 229 insertions(+), 74 deletions(-) diff --git a/src/config/config_service.py b/src/config/config_service.py index 9e5c1324..bcc4ebaa 100644 --- a/src/config/config_service.py +++ b/src/config/config_service.py @@ -3,6 +3,7 @@ import os import re import shutil +from datetime import datetime from typing import NamedTuple, Optional from auth.authorization import Authorizer @@ -14,8 +15,6 @@ from utils.file_utils import to_filename from utils.process_utils import ProcessInvoker from utils.string_utils import is_blank, strip -from datetime import datetime - SCRIPT_EDIT_CODE_MODE = 'new_code' SCRIPT_EDIT_UPLOAD_MODE = 'upload_script' @@ -56,12 +55,19 @@ def _create_archive_filename(filename): class ConfigService: - def __init__(self, authorizer, conf_folder, process_invoker: ProcessInvoker) -> None: + def __init__( + self, + authorizer, + conf_folder, + group_scripts_by_folder: bool, + process_invoker: ProcessInvoker) -> None: + self._authorizer = authorizer # type: Authorizer self._script_configs_folder = os.path.join(conf_folder, 'runners') self._scripts_folder = os.path.join(conf_folder, 'scripts') self._scripts_deleted_folder = os.path.join(conf_folder, 'deleted') self._process_invoker = process_invoker + self._group_scripts_by_folder = group_scripts_by_folder file_utils.prepare_folder(self._script_configs_folder) file_utils.prepare_folder(self._scripts_deleted_folder) @@ -117,7 +123,7 @@ def update_config(self, user, config, filename, uploaded_script): with open(original_file_path, 'r') as f: original_config_json = json.load(f) - short_original_config = script_config.read_short(original_file_path, original_config_json) + short_original_config = self.read_short_config(original_config_json, original_file_path) name = config['name'] @@ -133,6 +139,12 @@ def update_config(self, user, config, filename, uploaded_script): LOGGER.info('Updating script config "' + name + '" in ' + original_file_path) self._save_config(config, original_file_path) + def read_short_config(self, config_json, file_path): + return script_config.read_short( + file_path, + config_json, + self._group_scripts_by_folder, + self._script_configs_folder) def delete_config(self, user, name): self._check_admin_access(user) @@ -220,7 +232,7 @@ def list_configs(self, user, mode=None): def load_script(path, content) -> Optional[ShortConfig]: try: config_object = self.load_config_file(path, content) - short_config = script_config.read_short(path, config_object) + short_config = self.read_short_config(config_object, path) if short_config is None: return None @@ -257,7 +269,9 @@ def load_config_model(self, name, user, parameter_values=None, skip_invalid_para user, parameter_values, skip_invalid_parameters, - self._process_invoker) + self._process_invoker, + self._group_scripts_by_folder, + self._script_configs_folder) def _visit_script_configs(self, visitor): configs_dir = self._script_configs_folder @@ -296,7 +310,7 @@ def _find_config(self, name, user) -> Optional[ConfigSearchResult]: def find_and_load(path: str, content): try: config_object = self.load_config_file(path, content) - short_config = script_config.read_short(path, config_object) + short_config = self.read_short_config(config_object, path) if short_config is None: return None @@ -331,7 +345,9 @@ def _load_script_config( user, parameter_values, skip_invalid_parameters, - process_invoker): + process_invoker, + group_scripts_by_folder, + script_configs_folder): if isinstance(content_or_json_dict, str): json_object = custom_json.loads(content_or_json_dict) @@ -342,6 +358,8 @@ def _load_script_config( path, user.get_username(), user.get_audit_name(), + group_scripts_by_folder, + script_configs_folder, process_invoker, pty_enabled_default=os_utils.is_pty_supported()) diff --git a/src/main.py b/src/main.py index 6d6eaee7..e1054034 100644 --- a/src/main.py +++ b/src/main.py @@ -103,7 +103,8 @@ def main(): process_invoker = ProcessInvoker(server_config.env_vars) - config_service = ConfigService(authorizer, CONFIG_FOLDER, process_invoker) + config_service = ConfigService( + authorizer, CONFIG_FOLDER, server_config.groups_config.group_by_folders, process_invoker) alerts_service = AlertsService(server_config.alerts_config) alerts_service = alerts_service diff --git a/src/model/script_config.py b/src/model/script_config.py index 8475654a..77787204 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -62,14 +62,16 @@ def __init__(self, path, username, audit_name, + group_by_folders: bool, + script_configs_folder: str, process_invoker: ProcessInvoker, pty_enabled_default=True): super().__init__() - short_config = read_short(path, config_object) + short_config = read_short(path, config_object, group_by_folders, script_configs_folder) self.name = short_config.name self._pty_enabled_default = pty_enabled_default - self._config_folder = os.path.dirname(path) + self._config_folder = script_configs_folder self._process_invoker = process_invoker self._username = username @@ -363,11 +365,16 @@ def _build_name_from_path(file_path): return name.strip() -def read_short(file_path, json_object): +def read_short(file_path, json_object, group_by_folders: bool, script_configs_folder: str): name = _read_name(file_path, json_object) allowed_users = json_object.get('allowed_users') admin_users = json_object.get('admin_users') group = read_str_from_config(json_object, 'group', blank_to_none=True) + if not group and group_by_folders: + relative_path = file_utils.relative_path(file_path, script_configs_folder) + while os.path.dirname(relative_path): + relative_path = os.path.dirname(relative_path) + group = relative_path hidden = read_bool_from_config('hidden', json_object, default=False) if hidden: diff --git a/src/model/server_conf.py b/src/model/server_conf.py index 23bebd62..b8175486 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -29,6 +29,7 @@ def __init__(self) -> None: self.allowed_users = None self.alerts_config = None self.logging_config = None + self.groups_config = ScriptGroupsConfig() # type: ScriptGroupsConfig self.admin_config = None self.title = None self.enable_script_titles = None @@ -75,6 +76,24 @@ def from_json(cls, json_config): return config +class ScriptGroupsConfig: + + def __init__(self) -> None: + self.group_by_folders = True + + @classmethod + def from_json(cls, json_config): + config = ScriptGroupsConfig() + + if json_config: + config.group_by_folders = model_helper.read_bool_from_config( + 'group_by_folders', + json_config, + default=config.group_by_folders) + + return config + + def _build_env_vars(json_object): sensitive_config_paths = [ ['auth', 'secret'], @@ -184,6 +203,7 @@ def from_json(conf_path, temp_folder): config.alerts_config = json_object.get('alerts') config.callbacks_config = json_object.get('callbacks') config.logging_config = LoggingConfig.from_json(json_object.get('logging')) + config.groups_config = ScriptGroupsConfig.from_json(json_object.get('script_groups')) config.user_groups = user_groups config.admin_users = admin_users config.full_history_users = full_history_users diff --git a/src/tests/config_service_test.py b/src/tests/config_service_test.py index a2753df0..5291cb7f 100644 --- a/src/tests/config_service_test.py +++ b/src/tests/config_service_test.py @@ -1,6 +1,7 @@ import json import os import sys +import tempfile import unittest from collections import OrderedDict from shutil import copyfile @@ -30,6 +31,14 @@ def test_list_configs_when_one(self): self.assertEqual(1, len(configs)) self.assertEqual('conf_x', configs[0].name) + def test_list_configs_when_one_and_symlink(self): + conf_path = os.path.join(test_utils.temp_folder, 'runners', 'sub', 'x.json') + with self._temporary_file_symlink(conf_path, {'name': 'test X'}): + configs = self.config_service.list_configs(self.user) + self.assertEqual(1, len(configs)) + self.assertEqual('test X', configs[0].name) + self.assertEqual('sub', configs[0].group) + def test_list_configs_when_multiple(self): _create_script_config_file('conf_x') _create_script_config_file('conf_y') @@ -40,9 +49,9 @@ def test_list_configs_when_multiple(self): self.assertCountEqual(['conf_x', 'conf_y', 'A B C'], conf_names) def test_list_configs_when_multiple_and_subfolders(self): - _create_script_config_file('conf_x', subfolder = 's1') - _create_script_config_file('conf_y', subfolder = 's2') - _create_script_config_file('ABC', subfolder = os.path.join('s1', 'inner')) + _create_script_config_file('conf_x', subfolder='s1') + _create_script_config_file('conf_y', subfolder='s2') + _create_script_config_file('ABC', subfolder=os.path.join('s1', 'inner')) configs = self.config_service.list_configs(self.user) conf_names = [config.name for config in configs] @@ -114,6 +123,36 @@ def test_load_config_with_slash_in_name(self): config = self.config_service.load_config_model('Name with slash /', self.user) self.assertEqual('Name with slash /', config.name) + def test_list_configs_when_multiple_subfolders_and_symlink(self): + def create_config_file(name, relative_path, group=None): + filename = os.path.basename(relative_path) + test_utils.write_script_config( + {'name': name, 'group': group}, + filename, + config_folder=os.path.join(test_utils.temp_folder, 'runners', os.path.dirname(relative_path))) + + subfolder = os.path.join(test_utils.temp_folder, 'runners', 'sub') + symlink_path = os.path.join(subfolder, 'x.json') + with self._temporary_file_symlink(symlink_path, {'name': 'test X'}): + create_config_file('conf Y', os.path.join('sub', 'y', 'conf_y.json')) + create_config_file('conf Z', os.path.join('sub', 'z', 'conf_z.json')) + create_config_file('conf A', 'conf_a.json') + create_config_file('conf B', os.path.join('b', 'conf_b.json')) + create_config_file('conf C', os.path.join('c', 'conf_c.json'), group='test group') + + configs = self.config_service.list_configs(self.user) + actual_name_group_map = {c.name: c.group for c in configs} + + self.assertEqual( + actual_name_group_map, + {'test X': 'sub', + 'conf Y': 'sub', + 'conf Z': 'sub', + 'conf A': None, + 'conf B': 'b', + 'conf C': 'test group'}, + ) + def tearDown(self): super().tearDown() test_utils.cleanup() @@ -125,7 +164,19 @@ def setUp(self): self.user = User('ConfigServiceTest', {AUTH_USERNAME: 'ConfigServiceTest'}) self.admin_user = User('admin_user', {AUTH_USERNAME: 'The Admin'}) authorizer = Authorizer(ANY_USER, ['admin_user'], [], [], EmptyGroupProvider()) - self.config_service = ConfigService(authorizer, test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService(authorizer, test_utils.temp_folder, True, test_utils.process_invoker) + + @staticmethod + def _temporary_file_symlink(symlink_path, file_content: dict): + f = tempfile.NamedTemporaryFile() + + f.write(json.dumps(file_content).encode('utf-8')) + f.flush() + subdir = os.path.dirname(symlink_path) + os.makedirs(subdir) + os.symlink(f.name, symlink_path) + + return f class ConfigServiceAuthTest(unittest.TestCase): @@ -209,7 +260,11 @@ def setUp(self): authorizer = Authorizer([], ['adm_user'], [], [], EmptyGroupProvider()) self.user1 = User('user1', {}) self.admin_user = User('adm_user', {}) - self.config_service = ConfigService(authorizer, test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService( + authorizer, + test_utils.temp_folder, + True, + test_utils.process_invoker) def script_path(path): @@ -242,7 +297,7 @@ def setUp(self): authorizer = Authorizer([], ['admin_user', 'admin_non_editor'], [], ['admin_user'], EmptyGroupProvider()) self.admin_user = User('admin_user', {}) - self.config_service = ConfigService(authorizer, test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService(authorizer, test_utils.temp_folder, True, test_utils.process_invoker) def tearDown(self): super().tearDown() @@ -416,7 +471,7 @@ def setUp(self): authorizer = Authorizer([], ['admin_user', 'admin_non_editor'], [], ['admin_user'], EmptyGroupProvider()) self.admin_user = User('admin_user', {}) - self.config_service = ConfigService(authorizer, test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService(authorizer, test_utils.temp_folder, True, test_utils.process_invoker) for suffix in 'XYZ': name = 'Conf ' + suffix @@ -669,7 +724,7 @@ def setUp(self): authorizer = Authorizer([], ['admin_user'], [], [], EmptyGroupProvider()) self.admin_user = User('admin_user', {}) - self.config_service = ConfigService(authorizer, test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService(authorizer, test_utils.temp_folder, True, test_utils.process_invoker) def tearDown(self): super().tearDown() @@ -717,7 +772,7 @@ def setUp(self) -> None: authorizer = Authorizer([], ['admin_user', 'admin_non_editor'], [], ['admin_user'], EmptyGroupProvider()) self.admin_user = User('admin_user', {}) - self.config_service = ConfigService(authorizer, test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService(authorizer, test_utils.temp_folder, True, test_utils.process_invoker) for pair in [('script.py', b'123'), ('another.py', b'xyz'), diff --git a/src/tests/execution_logging_test.py b/src/tests/execution_logging_test.py index 738b4da9..d057d6e1 100644 --- a/src/tests/execution_logging_test.py +++ b/src/tests/execution_logging_test.py @@ -542,7 +542,8 @@ def test_logging_values(self): 'my_script', script_command='echo', parameters=[param1, param2, param3, param4], - logging_config=LoggingConfig('test-${SCRIPT}-${p1}')) + logging_config=LoggingConfig('test-${SCRIPT}-${p1}'), + path=os.path.join('conf', 'my_script.json')) config_model.set_all_param_values({'p1': 'abc', 'p3': True, 'p4': 987}) execution_id = self.executor_service.start_script( @@ -568,7 +569,7 @@ def test_logging_values(self): self.assertEqual('some text\nanother text', log) log_files = os.listdir(test_utils.temp_folder) - self.assertEqual(['test-my_script-abc.log'], log_files) + self.assertEqual(['test-my_script-abc.log', 'conf'], log_files) def test_exit_code(self): config_model = create_config_model( diff --git a/src/tests/execution_service_test.py b/src/tests/execution_service_test.py index 0e5346c2..9ffa4c23 100644 --- a/src/tests/execution_service_test.py +++ b/src/tests/execution_service_test.py @@ -10,7 +10,6 @@ from execution.execution_service import ExecutionService from execution.executor import create_process_wrapper from model.model_helper import AccessProhibitedException -from model.script_config import ConfigModel from tests import test_utils from tests.test_utils import mock_object, create_audit_names, _MockProcessWrapper, _IdGeneratorMock from utils import audit_utils @@ -441,10 +440,12 @@ def _start_with_config(execution_service, config, parameter_values=None, user_id def _create_script_config(parameter_configs): - config = ConfigModel( - {'name': 'script_x', - 'script_path': 'ls', - 'parameters': parameter_configs}, - 'script_x.json', 'user1', 'localhost', - test_utils.process_invoker) - return config + return test_utils.create_config_model( + 'script_x', + config={'name': 'script_x', + 'script_path': 'ls', + 'parameters': parameter_configs}, + username='user1', + audit_name='localhost', + + ) diff --git a/src/tests/scheduling/schedule_service_test.py b/src/tests/scheduling/schedule_service_test.py index 4f944d2a..ef32a423 100644 --- a/src/tests/scheduling/schedule_service_test.py +++ b/src/tests/scheduling/schedule_service_test.py @@ -60,7 +60,11 @@ def setUp(self) -> None: scheduler._sleep = MagicMock() scheduler._sleep.side_effect = lambda x: time.sleep(0.001) - self.config_service = ConfigService(AnyUserAuthorizer(), test_utils.temp_folder, test_utils.process_invoker) + self.config_service = ConfigService( + AnyUserAuthorizer(), + test_utils.temp_folder, + True, + test_utils.process_invoker) self.create_config('my_script_A') self.create_config('unschedulable-script', scheduling_enabled=False) diff --git a/src/tests/script_config_test.py b/src/tests/script_config_test.py index 1acfa8c6..cfd0c8c8 100644 --- a/src/tests/script_config_test.py +++ b/src/tests/script_config_test.py @@ -6,7 +6,7 @@ from config.constants import PARAM_TYPE_SERVER_FILE, PARAM_TYPE_MULTISELECT from config.exceptions import InvalidConfigException -from model.script_config import ConfigModel, InvalidValueException, TemplateProperty, ParameterNotFoundException, \ +from model.script_config import InvalidValueException, TemplateProperty, ParameterNotFoundException, \ get_sorted_config from model.value_wrapper import ScriptValueWrapper from react.properties import ObservableDict, ObservableList @@ -259,10 +259,10 @@ def test_list_files_for_valid_param(self): param = create_script_param_config('recurs_file', type=PARAM_TYPE_SERVER_FILE, file_recursive=True, - file_dir=test_utils.temp_folder) + file_dir=self.subfolder) config_model = _create_config_model('my_conf', parameters=[param]) - create_files(['file1', 'file2']) + create_files(['file1', 'file2'], 'sub') file_names = [f['name'] for f in (config_model.list_files_for_param('recurs_file', []))] self.assertCountEqual(['file1', 'file2'], file_names) @@ -271,14 +271,14 @@ def test_list_files_when_working_dir(self): type=PARAM_TYPE_SERVER_FILE, file_recursive=True, file_dir='.') - config_model = _create_config_model('my_conf', parameters=[param], working_dir=test_utils.temp_folder) + config_model = _create_config_model('my_conf', parameters=[param], working_dir=self.subfolder) - create_files(['file1', 'file2']) + create_files(['file1', 'file2'], 'sub') file_names = [f['name'] for f in (config_model.list_files_for_param('recurs_file', []))] self.assertCountEqual(['file1', 'file2'], file_names) def test_list_files_when_unknown_param(self): - config_model = _create_config_model('my_conf', parameters=[], working_dir=test_utils.temp_folder) + config_model = _create_config_model('my_conf', parameters=[], working_dir=self.subfolder) self.assertRaises(ParameterNotFoundException, config_model.list_files_for_param, 'recurs_file', []) @@ -286,6 +286,9 @@ def setUp(self): super().setUp() test_utils.setup() + self.subfolder = os.path.join(test_utils.temp_folder, 'sub') + os.mkdir(self.subfolder) + def tearDown(self): super().tearDown() test_utils.cleanup() @@ -399,14 +402,17 @@ def test_get_required_parameters(self): class ConfigModelIncludeTest(unittest.TestCase): def test_static_include_simple(self): included_path = test_utils.write_script_config({'script_path': 'ping google.com'}, 'included') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model('main_conf', script_path=None, config={'include': included_path}) self.assertEqual('ping google.com', config_model.script_command) def test_static_include_multiple_inclusions(self): included_path_1 = test_utils.write_script_config({'script_path': 'ping google.com'}, 'included1') + included_path_1 = file_utils.relative_path(included_path_1, test_utils.temp_folder) included_path_2 = test_utils.write_script_config( {'script_path': 'echo 123', 'working_directory': '123'}, 'included2') + included_path_2 = file_utils.relative_path(included_path_2, test_utils.temp_folder) config_model = _create_config_model( 'main_conf', script_path=None, @@ -419,6 +425,7 @@ def test_static_include_precedence(self): 'script_path': 'ping google.com', 'working_directory': '123'}, 'included') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model('main_conf', config={ 'include': included_path, 'working_directory': 'abc'}) @@ -429,6 +436,7 @@ def test_static_include_single_parameter(self): included_path = test_utils.write_script_config({'parameters': [ create_script_param_config('param2', type='int') ]}, 'included1') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model('main_conf', config={ 'include': included_path, 'parameters': [create_script_param_config('param1', type='text')]}) @@ -448,12 +456,14 @@ def test_static_include_multiple_parameters_from_multiple_included(self): create_script_param_config('param2', type='int'), create_script_param_config('param3'), ]}, 'included1') + included_path_1 = file_utils.relative_path(included_path_1, test_utils.temp_folder) included_path_2 = test_utils.write_script_config({ 'parameters': [ create_script_param_config('param2', type='ip4'), create_script_param_config('param4'), create_script_param_config(None), ]}, 'included2') + included_path_2 = file_utils.relative_path(included_path_2, test_utils.temp_folder) config_model = _create_config_model('main_conf', config={ 'include': [included_path_1, included_path_2], @@ -482,6 +492,7 @@ def test_static_include_hidden_config(self): 'script_path': 'ping google.com', 'hidden': True}, 'included') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model('main_conf', script_path=None, config={'include': included_path}) self.assertEqual('ping google.com', config_model.script_command) @@ -546,6 +557,7 @@ def test_dynamic_include_relative_path(self): included_path = test_utils.write_script_config({'parameters': [ create_script_param_config('included_param') ]}, 'included', folder) + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) included_folder = os.path.dirname(included_path) config_model = _create_config_model( 'main_conf', @@ -554,7 +566,7 @@ def test_dynamic_include_relative_path(self): 'include': '${p1}', 'working_directory': included_folder, 'parameters': [create_script_param_config('p1')]}) - config_model.set_param_value('p1', 'included.json') + config_model.set_param_value('p1', included_path) self.assertEqual(2, len(config_model.parameters)) @@ -566,6 +578,7 @@ def test_dynamic_include_replace(self): included_path2 = test_utils.write_script_config({'parameters': [ create_script_param_config('included_param_Y') ]}, 'included2') + included_path2 = file_utils.relative_path(included_path2, test_utils.temp_folder) config_model.set_param_value('p1', included_path2) @@ -588,6 +601,7 @@ def test_set_all_values_for_included(self): create_script_param_config('included_param1'), create_script_param_config('included_param2') ]}, 'included') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model( 'main_conf', config={ @@ -604,6 +618,7 @@ def test_set_all_values_for_dependant_on_constant(self): included_path = test_utils.write_script_config({'parameters': [ create_script_param_config('included_param1', values_script='echo ${p1}'), ]}, 'included') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model( 'main_conf', config={ @@ -659,12 +674,14 @@ def test_dynamic_include_when_multiple_includes(self, param2_value, expected_des 'parameters': [ create_script_param_config('param3', type='int'), ]}, 'included1') + included_path_1 = file_utils.relative_path(included_path_1, test_utils.temp_folder) included_path_2 = test_utils.write_script_config({ 'description': 'test desc', 'parameters': [ create_script_param_config('param3', type='ip4'), create_script_param_config('param4') ]}, 'included2') + included_path_2 = file_utils.relative_path(included_path_2, test_utils.temp_folder) config_model = _create_config_model('main_conf', config={ 'include': ['${param1}', included_path_2[:-6] + '${param2}.json'], @@ -686,6 +703,7 @@ def test_dynamic_include_when_multiple_includes(self, param2_value, expected_des def prepare_config_model_with_included(self, included_params, static_param_name): included_path = test_utils.write_script_config({'parameters': included_params}, 'included') + included_path = file_utils.relative_path(included_path, test_utils.temp_folder) config_model = _create_config_model('main_conf', config={ 'include': '${' + static_param_name + '}', 'parameters': [create_script_param_config(static_param_name)]}) @@ -1079,6 +1097,7 @@ def test_create_with_schedulable_true_and_included_secure_parameter(self): another_path = test_utils.write_script_config( {'parameters': [{'name': 'p2', 'secure': True}]}, 'another_config') + another_path = file_utils.relative_path(another_path, test_utils.temp_folder) self.assertTrue(config_model.schedulable) @@ -1177,28 +1196,31 @@ def _create_config_model(name, *, parameters=None, parameter_values=None, working_dir=None, - script_path='echo 123', + script_path='DEFAULT', skip_invalid_parameters=False): result_config = {} - if script_path is not None: - result_config['script_path'] = script_path - if config: result_config.update(config) - result_config['name'] = name - - if parameters is not None: - result_config['parameters'] = parameters - - if path is None: - path = name - if working_dir is not None: result_config['working_directory'] = working_dir - model = ConfigModel(result_config, path, username, audit_name, test_utils.process_invoker) + if script_path == 'DEFAULT': + if config and 'script_path' in config: + script_path = None + else: + script_path = 'echo 123' + + model = test_utils.create_config_model( + name, + script_command=script_path, + config=result_config, + username=username, + audit_name=audit_name, + path=path, + parameters=parameters) + if parameter_values is not None: model.set_all_param_values(parameter_values, skip_invalid_parameters=skip_invalid_parameters) diff --git a/src/tests/test_utils.py b/src/tests/test_utils.py index 45c6ca15..7ec447e3 100644 --- a/src/tests/test_utils.py +++ b/src/tests/test_utils.py @@ -121,10 +121,15 @@ def mock_object(): def write_script_config(conf_object, filename, config_folder=None): if config_folder is None: config_folder = os.path.join(temp_folder, 'runners') - file_path = os.path.join(config_folder, filename + '.json') + + if not filename.endswith('.json'): + filename = filename + '.json' + + file_path = os.path.join(config_folder, filename) config_json = json.dumps(conf_object) file_utils.write_file(file_path, config_json) + return file_path @@ -222,7 +227,7 @@ def create_config_model(name, *, script_command='ls', output_files=None, requires_terminal=None, - schedulable=True, + schedulable=None, logging_config: LoggingConfig = None, output_format=None): result_config = {} @@ -236,7 +241,9 @@ def create_config_model(name, *, result_config['parameters'] = parameters if path is None: - path = name + path = create_file(name + '.json', text='{}', overwrite=True) + elif not os.path.exists(path): + path = create_file(path, text='{}', overwrite=True) if output_files is not None: result_config['output_files'] = output_files @@ -245,7 +252,10 @@ def create_config_model(name, *, result_config['requires_terminal'] = requires_terminal if schedulable is not None: - result_config['scheduling'] = {'enabled': schedulable} + if 'scheduling' in result_config: + result_config['scheduling']['enabled'] = schedulable + else: + result_config['scheduling'] = {'enabled': schedulable} if output_format: result_config['output_format'] = output_format @@ -255,9 +265,17 @@ def create_config_model(name, *, 'execution_file': logging_config.filename_pattern, 'execution_date_format': logging_config.date_format} - result_config['script_path'] = script_command + if script_command: + result_config['script_path'] = script_command - model = ConfigModel(result_config, path, username, audit_name, process_invoker) + model = ConfigModel( + result_config, + path, + username, + audit_name, + True, + temp_folder, + process_invoker) if parameter_values is not None: model.set_all_param_values(parameter_values) diff --git a/src/tests/web/script_config_socket_test.py b/src/tests/web/script_config_socket_test.py index 74a4be46..176429d8 100644 --- a/src/tests/web/script_config_socket_test.py +++ b/src/tests/web/script_config_socket_test.py @@ -234,7 +234,7 @@ def setUp(self): application.authorizer = Authorizer(ANY_USER, [], [], [], EmptyGroupProvider()) application.identification = IpBasedIdentification(TrustedIpValidator(['127.0.0.1']), None) application.config_service = ConfigService( - application.authorizer, test_utils.temp_folder, test_utils.process_invoker) + application.authorizer, test_utils.temp_folder, True, test_utils.process_invoker) server = httpserver.HTTPServer(application) socket, self.port = testing.bind_unused_port() diff --git a/src/tests/web/server_test.py b/src/tests/web/server_test.py index 7d19a3e8..7a4abbbe 100644 --- a/src/tests/web/server_test.py +++ b/src/tests/web/server_test.py @@ -295,7 +295,7 @@ def start_server(self, port, address, *, xsrf_protection=XSRF_PROTECTION_TOKEN): execution_service, MagicMock(), MagicMock(), - ConfigService(authorizer, self.conf_folder, test_utils.process_invoker), + ConfigService(authorizer, self.conf_folder, True, test_utils.process_invoker), MagicMock(), FileUploadFeature(UserFileStorage(cookie_secret), test_utils.temp_folder), file_download_feature, diff --git a/src/utils/file_utils.py b/src/utils/file_utils.py index c2b87ab4..3004b6cf 100644 --- a/src/utils/file_utils.py +++ b/src/utils/file_utils.py @@ -33,7 +33,7 @@ def is_root(path): return os.path.dirname(path) == path -def normalize_path(path_string, current_folder=None): +def normalize_path(path_string, current_folder=None, follow_symlinks=True): path_string = os.path.expanduser(path_string) path_string = os.path.normpath(path_string) @@ -41,13 +41,16 @@ def normalize_path(path_string, current_folder=None): return path_string if current_folder: - normalized_folder = normalize_path(current_folder) + normalized_folder = normalize_path(current_folder, follow_symlinks=follow_symlinks) return os.path.join(normalized_folder, path_string) if not os.path.exists(path_string): return path_string - return str(pathlib.Path(path_string).resolve()) + if follow_symlinks: + return str(pathlib.Path(path_string).resolve()) + else: + return str(pathlib.Path(path_string).absolute()) def read_file(filename, byte_content=False, keep_newlines=False): @@ -143,17 +146,22 @@ def last_modification(folder_paths): def relative_path(path, parent_path): - path = normalize_path(path) - parent_path = normalize_path(parent_path) - - if os_utils.is_win(): - path = path.capitalize() - parent_path = parent_path.capitalize() - - if not path.startswith(parent_path): - raise ValueError(path + ' is not subpath of ' + parent_path) - - relative_path = path[len(parent_path):] + def normalize(path, follow_symlinks=True): + path = normalize_path(path, follow_symlinks=follow_symlinks) + if os_utils.is_win(): + path = path.capitalize() + return path + + normalized_path = normalize(path) + normalized_parent_path = normalize(parent_path) + + if not normalized_path.startswith(normalized_parent_path): + normalized_path = normalize(path, follow_symlinks=False) + normalized_parent_path = normalize(parent_path, follow_symlinks=False) + if not normalized_path.startswith(normalized_parent_path): + raise ValueError(path + ' is not subpath of ' + parent_path) + + relative_path = normalized_path[len(normalized_parent_path):] if relative_path.startswith(os.path.sep): return relative_path[1:] From 44bd5f2cde91f82bee0b881c0c926276ab24551c Mon Sep 17 00:00:00 2001 From: yshepilov Date: Sun, 15 Oct 2023 17:57:39 +0200 Subject: [PATCH 345/398] #699 added automatic script groups based on sub-folders --- src/model/script_config.py | 2 +- src/tests/config_service_test.py | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/model/script_config.py b/src/model/script_config.py index 77787204..1574f85e 100644 --- a/src/model/script_config.py +++ b/src/model/script_config.py @@ -370,7 +370,7 @@ def read_short(file_path, json_object, group_by_folders: bool, script_configs_fo allowed_users = json_object.get('allowed_users') admin_users = json_object.get('admin_users') group = read_str_from_config(json_object, 'group', blank_to_none=True) - if not group and group_by_folders: + if ('group' not in json_object) and group_by_folders: relative_path = file_utils.relative_path(file_path, script_configs_folder) while os.path.dirname(relative_path): relative_path = os.path.dirname(relative_path) diff --git a/src/tests/config_service_test.py b/src/tests/config_service_test.py index 5291cb7f..c1430bfd 100644 --- a/src/tests/config_service_test.py +++ b/src/tests/config_service_test.py @@ -126,8 +126,11 @@ def test_load_config_with_slash_in_name(self): def test_list_configs_when_multiple_subfolders_and_symlink(self): def create_config_file(name, relative_path, group=None): filename = os.path.basename(relative_path) + config = {'name': name} + if group is not None: + config['group'] = group test_utils.write_script_config( - {'name': name, 'group': group}, + config, filename, config_folder=os.path.join(test_utils.temp_folder, 'runners', os.path.dirname(relative_path))) @@ -139,6 +142,7 @@ def create_config_file(name, relative_path, group=None): create_config_file('conf A', 'conf_a.json') create_config_file('conf B', os.path.join('b', 'conf_b.json')) create_config_file('conf C', os.path.join('c', 'conf_c.json'), group='test group') + create_config_file('conf D', os.path.join('d', 'conf_d.json'), group='') configs = self.config_service.list_configs(self.user) actual_name_group_map = {c.name: c.group for c in configs} @@ -150,7 +154,8 @@ def create_config_file(name, relative_path, group=None): 'conf Z': 'sub', 'conf A': None, 'conf B': 'b', - 'conf C': 'test group'}, + 'conf C': 'test group', + 'conf D': None}, ) def tearDown(self): From 10f26e2fe1eb7e3cb5edf4869cc8c456ad8e587b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 01:40:19 +0000 Subject: [PATCH 346/398] Bump @babel/traverse from 7.21.4 to 7.23.2 in /web-src Bumps [@babel/traverse](https://github.com/babel/babel/tree/HEAD/packages/babel-traverse) from 7.21.4 to 7.23.2. - [Release notes](https://github.com/babel/babel/releases) - [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md) - [Commits](https://github.com/babel/babel/commits/v7.23.2/packages/babel-traverse) --- updated-dependencies: - dependency-name: "@babel/traverse" dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 314 +++++++++++++++++++++++++------------- 1 file changed, 204 insertions(+), 110 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index 669f389c..bdef78f9 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -181,12 +181,13 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.21.4.tgz", - "integrity": "sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==", + "version": "7.22.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", + "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", "dev": true, "dependencies": { - "@babel/highlight": "^7.18.6" + "@babel/highlight": "^7.22.13", + "chalk": "^2.4.2" }, "engines": { "node": ">=6.9.0" @@ -360,9 +361,9 @@ } }, "node_modules/@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", + "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", "dev": true, "engines": { "node": ">=6.9.0" @@ -381,25 +382,25 @@ } }, "node_modules/@babel/helper-function-name": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", - "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", + "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", "dev": true, "dependencies": { - "@babel/template": "^7.20.7", - "@babel/types": "^7.21.0" + "@babel/template": "^7.22.15", + "@babel/types": "^7.23.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", + "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", "dev": true, "dependencies": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" @@ -529,30 +530,30 @@ } }, "node_modules/@babel/helper-split-export-declaration": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", - "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "version": "7.22.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", + "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", "dev": true, "dependencies": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.19.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", - "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", + "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", "dev": true, "engines": { "node": ">=6.9.0" @@ -597,13 +598,13 @@ } }, "node_modules/@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", + "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", + "@babel/helper-validator-identifier": "^7.22.20", + "chalk": "^2.4.2", "js-tokens": "^4.0.0" }, "engines": { @@ -2011,33 +2012,45 @@ } }, "node_modules/@babel/template": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", - "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", + "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", "dev": true, "dependencies": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7" + "@babel/code-frame": "^7.22.13", + "@babel/parser": "^7.22.15", + "@babel/types": "^7.22.15" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.4.tgz", - "integrity": "sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==", + "node_modules/@babel/template/node_modules/@babel/parser": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", + "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", "dev": true, - "dependencies": { - "@babel/code-frame": "^7.21.4", - "@babel/generator": "^7.21.4", - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.21.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.21.4", - "@babel/types": "^7.21.4", + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz", + "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.22.13", + "@babel/generator": "^7.23.0", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.6", + "@babel/parser": "^7.23.0", + "@babel/types": "^7.23.0", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -2045,14 +2058,55 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/traverse/node_modules/@babel/generator": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz", + "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", + "dev": true, + "dependencies": { + "@babel/types": "^7.23.0", + "@jridgewell/gen-mapping": "^0.3.2", + "@jridgewell/trace-mapping": "^0.3.17", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/@babel/parser": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", + "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/traverse/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", + "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@babel/types": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.4.tgz", - "integrity": "sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", + "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.19.4", - "@babel/helper-validator-identifier": "^7.19.1", + "@babel/helper-string-parser": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.20", "to-fast-properties": "^2.0.0" }, "engines": { @@ -25158,12 +25212,13 @@ } }, "@babel/code-frame": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.21.4.tgz", - "integrity": "sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==", + "version": "7.22.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", + "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", "dev": true, "requires": { - "@babel/highlight": "^7.18.6" + "@babel/highlight": "^7.22.13", + "chalk": "^2.4.2" } }, "@babel/compat-data": { @@ -25293,9 +25348,9 @@ } }, "@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", + "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", "dev": true }, "@babel/helper-explode-assignable-expression": { @@ -25308,22 +25363,22 @@ } }, "@babel/helper-function-name": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", - "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", + "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", "dev": true, "requires": { - "@babel/template": "^7.20.7", - "@babel/types": "^7.21.0" + "@babel/template": "^7.22.15", + "@babel/types": "^7.23.0" } }, "@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", + "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", "dev": true, "requires": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" } }, "@babel/helper-member-expression-to-functions": { @@ -25420,24 +25475,24 @@ } }, "@babel/helper-split-export-declaration": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", - "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "version": "7.22.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", + "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", "dev": true, "requires": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" } }, "@babel/helper-string-parser": { - "version": "7.19.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", - "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", + "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", "dev": true }, "@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", "dev": true }, "@babel/helper-validator-option": { @@ -25470,13 +25525,13 @@ } }, "@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", + "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", + "@babel/helper-validator-identifier": "^7.22.20", + "chalk": "^2.4.2", "js-tokens": "^4.0.0" } }, @@ -26421,42 +26476,81 @@ } }, "@babel/template": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", - "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", + "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", "dev": true, "requires": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7" + "@babel/code-frame": "^7.22.13", + "@babel/parser": "^7.22.15", + "@babel/types": "^7.22.15" + }, + "dependencies": { + "@babel/parser": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", + "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", + "dev": true + } } }, "@babel/traverse": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.4.tgz", - "integrity": "sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.21.4", - "@babel/generator": "^7.21.4", - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.21.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.21.4", - "@babel/types": "^7.21.4", + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz", + "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.22.13", + "@babel/generator": "^7.23.0", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.6", + "@babel/parser": "^7.23.0", + "@babel/types": "^7.23.0", "debug": "^4.1.0", "globals": "^11.1.0" + }, + "dependencies": { + "@babel/generator": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz", + "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", + "dev": true, + "requires": { + "@babel/types": "^7.23.0", + "@jridgewell/gen-mapping": "^0.3.2", + "@jridgewell/trace-mapping": "^0.3.17", + "jsesc": "^2.5.1" + } + }, + "@babel/parser": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", + "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", + "dev": true + }, + "@jridgewell/gen-mapping": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", + "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "dev": true, + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + } } }, "@babel/types": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.4.tgz", - "integrity": "sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", + "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", "dev": true, "requires": { - "@babel/helper-string-parser": "^7.19.4", - "@babel/helper-validator-identifier": "^7.19.1", + "@babel/helper-string-parser": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.20", "to-fast-properties": "^2.0.0" } }, From 311e3681a6e23487cd128445b93740b5012ae05a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 28 Oct 2023 00:54:11 +0000 Subject: [PATCH 347/398] Bump browserify-sign from 4.2.1 to 4.2.2 in /web-src Bumps [browserify-sign](https://github.com/crypto-browserify/browserify-sign) from 4.2.1 to 4.2.2. - [Changelog](https://github.com/browserify/browserify-sign/blob/main/CHANGELOG.md) - [Commits](https://github.com/crypto-browserify/browserify-sign/compare/v4.2.1...v4.2.2) --- updated-dependencies: - dependency-name: browserify-sign dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 51 +++++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index bdef78f9..26ded486 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -6674,26 +6674,29 @@ } }, "node_modules/browserify-sign": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", - "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.2.tgz", + "integrity": "sha512-1rudGyeYY42Dk6texmv7c4VcQ0EsvVbLwZkA+AQB7SxvXxmcD93jcHie8bzecJ+ChDlmAm2Qyu0+Ccg5uhZXCg==", "dev": true, "dependencies": { - "bn.js": "^5.1.1", - "browserify-rsa": "^4.0.1", + "bn.js": "^5.2.1", + "browserify-rsa": "^4.1.0", "create-hash": "^1.2.0", "create-hmac": "^1.1.7", - "elliptic": "^6.5.3", + "elliptic": "^6.5.4", "inherits": "^2.0.4", - "parse-asn1": "^5.1.5", - "readable-stream": "^3.6.0", - "safe-buffer": "^5.2.0" + "parse-asn1": "^5.1.6", + "readable-stream": "^3.6.2", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 4" } }, "node_modules/browserify-sign/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dev": true, "dependencies": { "inherits": "^2.0.3", @@ -30280,26 +30283,26 @@ } }, "browserify-sign": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", - "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.2.tgz", + "integrity": "sha512-1rudGyeYY42Dk6texmv7c4VcQ0EsvVbLwZkA+AQB7SxvXxmcD93jcHie8bzecJ+ChDlmAm2Qyu0+Ccg5uhZXCg==", "dev": true, "requires": { - "bn.js": "^5.1.1", - "browserify-rsa": "^4.0.1", + "bn.js": "^5.2.1", + "browserify-rsa": "^4.1.0", "create-hash": "^1.2.0", "create-hmac": "^1.1.7", - "elliptic": "^6.5.3", + "elliptic": "^6.5.4", "inherits": "^2.0.4", - "parse-asn1": "^5.1.5", - "readable-stream": "^3.6.0", - "safe-buffer": "^5.2.0" + "parse-asn1": "^5.1.6", + "readable-stream": "^3.6.2", + "safe-buffer": "^5.2.1" }, "dependencies": { "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dev": true, "requires": { "inherits": "^2.0.3", From 550c9f654ae82c201b01ef1f63011d91fe19cb1e Mon Sep 17 00:00:00 2001 From: Vo Van Nghia Date: Tue, 7 Nov 2023 12:11:55 +0100 Subject: [PATCH 348/398] enable multiarch image --- tools/deploy_docker.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/deploy_docker.sh b/tools/deploy_docker.sh index 5ac6586e..fed4828d 100755 --- a/tools/deploy_docker.sh +++ b/tools/deploy_docker.sh @@ -19,9 +19,11 @@ else DOCKER_TAG="$TRAVIS_BRANCH" fi +docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD" -docker build -f tools/Dockerfile -t "$IMAGE_NAME":"$DOCKER_TAG" . +docker buildx build --platform linux/amd64,linux/arm64 -f tools/Dockerfile -t "$IMAGE_NAME":"$DOCKER_TAG" . echo "NEW_GIT_TAG=$NEW_GIT_TAG" if [ ! -z "$NEW_GIT_TAG" ]; then From b558807fb7943074338cf7b53be4599af6b2c6a6 Mon Sep 17 00:00:00 2001 From: Vo Van Nghia Date: Tue, 7 Nov 2023 18:17:43 +0100 Subject: [PATCH 349/398] install docker buildx --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index cfb1fcfd..b63bb245 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,6 +31,9 @@ before_install: - sudo apt-get -y install python3-pip python3-setuptools apache2-utils python3-venv - wget https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/116.0.5845.96/linux64/chromedriver-linux64.zip - unzip chromedriver-linux64.zip -d $HOME/.local/bin + - mkdir -vp ~/.docker/cli-plugins/ + - curl --silent -L "https://github.com/docker/buildx/releases/download/v0.11.2/buildx-v0.11.2.linux-amd64" > ~/.docker/cli-plugins/docker-buildx + - chmod a+x ~/.docker/cli-plugins/docker-buildx install: - pip3 install -r requirements.txt - pip3 install pyasn1 --upgrade From bbc50d5e22d1c1aed86961ae69962e9137607d56 Mon Sep 17 00:00:00 2001 From: Vo Van Nghia Date: Wed, 8 Nov 2023 08:38:18 +0100 Subject: [PATCH 350/398] create new docker builder --- tools/deploy_docker.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/deploy_docker.sh b/tools/deploy_docker.sh index fed4828d..73306b4a 100755 --- a/tools/deploy_docker.sh +++ b/tools/deploy_docker.sh @@ -23,11 +23,11 @@ docker run --rm --privileged multiarch/qemu-user-static --reset -p yes docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD" -docker buildx build --platform linux/amd64,linux/arm64 -f tools/Dockerfile -t "$IMAGE_NAME":"$DOCKER_TAG" . +docker buildx create --use +docker buildx build --platform linux/amd64,linux/arm64 --push -f tools/Dockerfile -t "$IMAGE_NAME":"$DOCKER_TAG" . echo "NEW_GIT_TAG=$NEW_GIT_TAG" if [ ! -z "$NEW_GIT_TAG" ]; then docker tag "$IMAGE_NAME":"$DOCKER_TAG" "$IMAGE_NAME":"$NEW_GIT_TAG" + docker push "$IMAGE_NAME":"$NEW_GIT_TAG" fi - -docker push --all-tags "$IMAGE_NAME" From bdea0f01e8c139b929400263774d6fe8347beada Mon Sep 17 00:00:00 2001 From: yshepilov Date: Wed, 8 Nov 2023 09:10:43 +0100 Subject: [PATCH 351/398] fix multi-tag docker image build --- tools/deploy_docker.sh | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tools/deploy_docker.sh b/tools/deploy_docker.sh index 73306b4a..9c7c582c 100755 --- a/tools/deploy_docker.sh +++ b/tools/deploy_docker.sh @@ -23,11 +23,13 @@ docker run --rm --privileged multiarch/qemu-user-static --reset -p yes docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD" -docker buildx create --use -docker buildx build --platform linux/amd64,linux/arm64 --push -f tools/Dockerfile -t "$IMAGE_NAME":"$DOCKER_TAG" . - -echo "NEW_GIT_TAG=$NEW_GIT_TAG" +ADDITIONAL_TAG_ARG="" if [ ! -z "$NEW_GIT_TAG" ]; then - docker tag "$IMAGE_NAME":"$DOCKER_TAG" "$IMAGE_NAME":"$NEW_GIT_TAG" - docker push "$IMAGE_NAME":"$NEW_GIT_TAG" + ADDITIONAL_TAG_ARG="-t '$IMAGE_NAME:$NEW_GIT_TAG'" fi + +docker buildx create --use +docker buildx build --platform linux/amd64,linux/arm64 --push -f tools/Dockerfile \ + -t "$IMAGE_NAME":"$DOCKER_TAG" \ + $ADDITIONAL_TAG_ARG \ + . From d47b6bcea792d437a7a4d0fca2b9d57c8bfae883 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Wed, 8 Nov 2023 10:01:32 +0100 Subject: [PATCH 352/398] fix multi-tag docker image build --- tools/deploy_docker.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/deploy_docker.sh b/tools/deploy_docker.sh index 9c7c582c..c9bba19d 100755 --- a/tools/deploy_docker.sh +++ b/tools/deploy_docker.sh @@ -25,7 +25,7 @@ docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD" ADDITIONAL_TAG_ARG="" if [ ! -z "$NEW_GIT_TAG" ]; then - ADDITIONAL_TAG_ARG="-t '$IMAGE_NAME:$NEW_GIT_TAG'" + ADDITIONAL_TAG_ARG="-t $IMAGE_NAME:$NEW_GIT_TAG" fi docker buildx create --use From 3550ae4a037cd47a249b9a6896d1ab0f058e7f1e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 11 Nov 2023 05:54:05 +0000 Subject: [PATCH 353/398] Bump axios from 0.27.2 to 1.6.0 in /web-src Bumps [axios](https://github.com/axios/axios) from 0.27.2 to 1.6.0. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v0.27.2...v1.6.0) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 34 +++++++++++++++++++++++----------- web-src/package.json | 2 +- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index 26ded486..e7354e3a 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -9,7 +9,7 @@ "version": "1.18.0", "dependencies": { "ace-builds": "^1.11.2", - "axios": "^0.27.2", + "axios": "^1.6.0", "brace": "^0.11.1", "codemirror": "^5.65.9", "core-js": "^3.25.3", @@ -6114,12 +6114,13 @@ "dev": true }, "node_modules/axios": { - "version": "0.27.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", - "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.0.tgz", + "integrity": "sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==", "dependencies": { - "follow-redirects": "^1.14.9", - "form-data": "^4.0.0" + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" } }, "node_modules/axios-mock-adapter": { @@ -18180,6 +18181,11 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, "node_modules/prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", @@ -29812,12 +29818,13 @@ "dev": true }, "axios": { - "version": "0.27.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", - "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.0.tgz", + "integrity": "sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==", "requires": { - "follow-redirects": "^1.14.9", - "form-data": "^4.0.0" + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" }, "dependencies": { "form-data": { @@ -39456,6 +39463,11 @@ "ipaddr.js": "1.9.1" } }, + "proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, "prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", diff --git a/web-src/package.json b/web-src/package.json index 62cbb500..7d54c86a 100644 --- a/web-src/package.json +++ b/web-src/package.json @@ -4,7 +4,7 @@ "private": true, "dependencies": { "ace-builds": "^1.11.2", - "axios": "^0.27.2", + "axios": "^1.6.0", "brace": "^0.11.1", "codemirror": "^5.65.9", "core-js": "^3.25.3", From e44f10ce6f971d6a85f97c992afa6d04548fcda4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Nov 2023 11:06:29 +0000 Subject: [PATCH 354/398] Bump the npm_and_yarn at /web-src security update group Bumps the npm_and_yarn at /web-src security update group in /web-src with 1 update: [rss-parser](https://github.com/bobby-brennan/rss-parser). - [Commits](https://github.com/bobby-brennan/rss-parser/compare/v3.12.0...v3.13.0) --- updated-dependencies: - dependency-name: rss-parser dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index e7354e3a..0dcf8ecb 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -18968,13 +18968,13 @@ } }, "node_modules/rss-parser": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/rss-parser/-/rss-parser-3.12.0.tgz", - "integrity": "sha512-aqD3E8iavcCdkhVxNDIdg1nkBI17jgqF+9OqPS1orwNaOgySdpvq6B+DoONLhzjzwV8mWg37sb60e4bmLK117A==", + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/rss-parser/-/rss-parser-3.13.0.tgz", + "integrity": "sha512-7jWUBV5yGN3rqMMj7CZufl/291QAhvrrGpDNE4k/02ZchL0npisiYYqULF71jCEKoIiHvK/Q2e6IkDwPziT7+w==", "dev": true, "dependencies": { "entities": "^2.0.3", - "xml2js": "^0.4.19" + "xml2js": "^0.5.0" } }, "node_modules/run-async": { @@ -24850,9 +24850,9 @@ } }, "node_modules/xml2js": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", - "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", "dev": true, "dependencies": { "sax": ">=0.6.0", @@ -40092,13 +40092,13 @@ } }, "rss-parser": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/rss-parser/-/rss-parser-3.12.0.tgz", - "integrity": "sha512-aqD3E8iavcCdkhVxNDIdg1nkBI17jgqF+9OqPS1orwNaOgySdpvq6B+DoONLhzjzwV8mWg37sb60e4bmLK117A==", + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/rss-parser/-/rss-parser-3.13.0.tgz", + "integrity": "sha512-7jWUBV5yGN3rqMMj7CZufl/291QAhvrrGpDNE4k/02ZchL0npisiYYqULF71jCEKoIiHvK/Q2e6IkDwPziT7+w==", "dev": true, "requires": { "entities": "^2.0.3", - "xml2js": "^0.4.19" + "xml2js": "^0.5.0" } }, "run-async": { @@ -44857,9 +44857,9 @@ "requires": {} }, "xml2js": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", - "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", "dev": true, "requires": { "sax": ">=0.6.0", From d587f0e25e1b34921adb64fa69c2c99f9dc270b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 21:02:03 +0000 Subject: [PATCH 355/398] Bump @adobe/css-tools from 4.0.1 to 4.3.2 in /web-src Bumps [@adobe/css-tools](https://github.com/adobe/css-tools) from 4.0.1 to 4.3.2. - [Changelog](https://github.com/adobe/css-tools/blob/main/History.md) - [Commits](https://github.com/adobe/css-tools/commits) --- updated-dependencies: - dependency-name: "@adobe/css-tools" dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index e7354e3a..39ca2a70 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -75,9 +75,9 @@ } }, "node_modules/@adobe/css-tools": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.0.1.tgz", - "integrity": "sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.3.2.tgz", + "integrity": "sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw==", "dev": true }, "node_modules/@akryum/winattr": { @@ -25136,9 +25136,9 @@ } }, "@adobe/css-tools": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.0.1.tgz", - "integrity": "sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.3.2.tgz", + "integrity": "sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw==", "dev": true }, "@akryum/winattr": { From ccf3dc6c93343537ac9ac43826f0eae49b829075 Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 1 Dec 2023 11:02:01 +0100 Subject: [PATCH 356/398] fixed mjs files compilation --- web-src/vue.config.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/web-src/vue.config.js b/web-src/vue.config.js index 71527dd1..9de5836d 100644 --- a/web-src/vue.config.js +++ b/web-src/vue.config.js @@ -59,6 +59,8 @@ module.exports = { const IS_VENDOR = /[\\/]node_modules[\\/]/; + config.resolve.extensions.prepend('.mjs') + // ATTENTION! do not use minSize/maxSize until vue-cli moved to the 4th version of html-webpack-plugin // Otherwise plugin won't be able to find split packages config.optimization From 7ea018023532d42b101fef2ebf7b49ded2ec6b1a Mon Sep 17 00:00:00 2001 From: yshepilov Date: Fri, 1 Dec 2023 11:49:57 +0100 Subject: [PATCH 357/398] fixed mjs files compilation --- web-src/package-lock.json | 12 ++++++------ web-src/vue.config.js | 4 +--- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index 39ca2a70..e7354e3a 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -75,9 +75,9 @@ } }, "node_modules/@adobe/css-tools": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.3.2.tgz", - "integrity": "sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.0.1.tgz", + "integrity": "sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==", "dev": true }, "node_modules/@akryum/winattr": { @@ -25136,9 +25136,9 @@ } }, "@adobe/css-tools": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.3.2.tgz", - "integrity": "sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.0.1.tgz", + "integrity": "sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==", "dev": true }, "@akryum/winattr": { diff --git a/web-src/vue.config.js b/web-src/vue.config.js index 9de5836d..93b805e1 100644 --- a/web-src/vue.config.js +++ b/web-src/vue.config.js @@ -59,8 +59,6 @@ module.exports = { const IS_VENDOR = /[\\/]node_modules[\\/]/; - config.resolve.extensions.prepend('.mjs') - // ATTENTION! do not use minSize/maxSize until vue-cli moved to the 4th version of html-webpack-plugin // Otherwise plugin won't be able to find split packages config.optimization @@ -109,7 +107,7 @@ module.exports = { 'karma-webpack', 'karma-mocha', 'karma-mocha-reporter', - 'karma-allure-reporter', + 'karma-allure-reporter' ] } } From 9a88f7664e08ce44254016648379dac931b56f07 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Jan 2024 06:44:22 +0000 Subject: [PATCH 358/398] Bump follow-redirects from 1.15.2 to 1.15.4 in /web-src Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.2 to 1.15.4. - [Release notes](https://github.com/follow-redirects/follow-redirects/releases) - [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.15.2...v1.15.4) --- updated-dependencies: - dependency-name: follow-redirects dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index e7354e3a..aec692fc 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -11038,9 +11038,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", "funding": [ { "type": "individual", @@ -33789,9 +33789,9 @@ } }, "follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==" + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==" }, "for-each": { "version": "0.3.3", From 5a1b1730a88840ce100436679b3b69c0d65dd813 Mon Sep 17 00:00:00 2001 From: Yogendra Singh Date: Fri, 19 Jan 2024 16:05:38 +0530 Subject: [PATCH 359/398] fix: XSS attack via next login parameter. --- web-src/src/login/login.js | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index f466ca4f..6ce28f1d 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -34,6 +34,13 @@ function checkRedirectReason() { return redirectReason; } +function validateURL(url) { + if (!url || url.startsWith('http') || url.startsWith('/')) { + return url; + } + return '/'; +} + function onLoad() { axiosInstance.get('auth/config').then(({data: config}) => { const loginContainer = document.getElementById('login-content-container'); @@ -109,7 +116,7 @@ function setupOAuth(loginContainer, authConfig, templateName, buttonId) { 'token': token, 'urlFragment': window.location.hash }; - localState[NEXT_URL_KEY] = getQueryParameter(NEXT_URL_KEY); + localState[NEXT_URL_KEY] = validateURL(getQueryParameter(NEXT_URL_KEY)); saveState(localState); @@ -138,7 +145,7 @@ function processCurrentOauthState() { return; } - var nextUrl = oauthState[NEXT_URL_KEY]; + var nextUrl = validateURL(oauthState[NEXT_URL_KEY]); var urlFragment = oauthState['urlFragment']; var previousLocation = getUnparameterizedUrl(); @@ -177,7 +184,7 @@ function getLoginButton() { function sendLoginRequest(formData) { - var nextUrl = getQueryParameter(NEXT_URL_KEY); + var nextUrl = validateURL(getQueryParameter(NEXT_URL_KEY)); var nextUrlFragment = window.location.hash; if (nextUrl) { From b077c7fbf17557cb13c7a1de755fa5d014f24b08 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Feb 2024 03:47:19 +0000 Subject: [PATCH 360/398] Bump ip from 1.1.8 to 1.1.9 in /web-src Bumps [ip](https://github.com/indutny/node-ip) from 1.1.8 to 1.1.9. - [Commits](https://github.com/indutny/node-ip/compare/v1.1.8...v1.1.9) --- updated-dependencies: - dependency-name: ip dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index aec692fc..65412c09 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -12668,9 +12668,9 @@ } }, "node_modules/ip": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz", - "integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz", + "integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ==", "dev": true }, "node_modules/ip-regex": { @@ -35050,9 +35050,9 @@ } }, "ip": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz", - "integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz", + "integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ==", "dev": true }, "ip-regex": { From 6ddf2462f94ebd021af3b8cbbdaf7d6f33d393a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Mar 2024 22:49:24 +0000 Subject: [PATCH 361/398] Bump follow-redirects from 1.15.4 to 1.15.6 in /web-src Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.4 to 1.15.6. - [Release notes](https://github.com/follow-redirects/follow-redirects/releases) - [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.15.4...v1.15.6) --- updated-dependencies: - dependency-name: follow-redirects dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index 65412c09..337539f9 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -11038,9 +11038,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", - "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -33789,9 +33789,9 @@ } }, "follow-redirects": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", - "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==" + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==" }, "for-each": { "version": "0.3.3", From 6e8a21105f9d6dbdf83a49290b78b403c28951f3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 10:58:13 +0000 Subject: [PATCH 362/398] Bump express from 4.18.1 to 4.19.2 in /web-src Bumps [express](https://github.com/expressjs/express) from 4.18.1 to 4.19.2. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/master/History.md) - [Commits](https://github.com/expressjs/express/compare/4.18.1...4.19.2) --- updated-dependencies: - dependency-name: express dependency-type: indirect ... Signed-off-by: dependabot[bot] --- web-src/package-lock.json | 96 +++++++++++++++++++-------------------- 1 file changed, 48 insertions(+), 48 deletions(-) diff --git a/web-src/package-lock.json b/web-src/package-lock.json index 337539f9..ef55d1e9 100644 --- a/web-src/package-lock.json +++ b/web-src/package-lock.json @@ -6410,21 +6410,21 @@ "dev": true }, "node_modules/body-parser": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz", - "integrity": "sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==", + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", "dev": true, "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.10.3", - "raw-body": "2.5.1", + "qs": "6.11.0", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -8038,9 +8038,9 @@ ] }, "node_modules/content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", "dev": true, "engines": { "node": ">= 0.6" @@ -8056,9 +8056,9 @@ } }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "dev": true, "engines": { "node": ">= 0.6" @@ -10509,17 +10509,17 @@ } }, "node_modules/express": { - "version": "4.18.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.1.tgz", - "integrity": "sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dev": true, "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.0", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -10535,7 +10535,7 @@ "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", - "qs": "6.10.3", + "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", @@ -18297,9 +18297,9 @@ } }, "node_modules/qs": { - "version": "6.10.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", - "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", "dev": true, "dependencies": { "side-channel": "^1.0.4" @@ -18399,9 +18399,9 @@ } }, "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dev": true, "dependencies": { "bytes": "3.1.2", @@ -30056,21 +30056,21 @@ "dev": true }, "body-parser": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz", - "integrity": "sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==", + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", "dev": true, "requires": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.10.3", - "raw-body": "2.5.1", + "qs": "6.11.0", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -31372,9 +31372,9 @@ } }, "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", "dev": true }, "convert-source-map": { @@ -31387,9 +31387,9 @@ } }, "cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "dev": true }, "cookie-signature": { @@ -33368,17 +33368,17 @@ } }, "express": { - "version": "4.18.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.1.tgz", - "integrity": "sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dev": true, "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.0", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -33394,7 +33394,7 @@ "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", - "qs": "6.10.3", + "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", @@ -39570,9 +39570,9 @@ "dev": true }, "qs": { - "version": "6.10.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", - "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", "dev": true, "requires": { "side-channel": "^1.0.4" @@ -39639,9 +39639,9 @@ "dev": true }, "raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dev": true, "requires": { "bytes": "3.1.2", From 1a34ee877fbd9c9abc590b9f73fa5a9af6aec732 Mon Sep 17 00:00:00 2001 From: Lionel Zhang Date: Mon, 1 Apr 2024 14:56:34 -0700 Subject: [PATCH 363/398] feat: add support for azure ad oauth --- src/auth/auth_azure_ad_oauth.py | 34 +++++++++++++++++++++++++++ src/model/server_conf.py | 3 +++ src/tests/server_conf_test.py | 13 ++++++++++ web-src/public/login.html | 8 +++++++ web-src/src/assets/azure-ad-logo.png | Bin 0 -> 1379 bytes web-src/src/assets/css/index.css | 5 ++++ web-src/src/login/login.js | 10 ++++++++ 7 files changed, 73 insertions(+) create mode 100644 src/auth/auth_azure_ad_oauth.py create mode 100644 web-src/src/assets/azure-ad-logo.png diff --git a/src/auth/auth_azure_ad_oauth.py b/src/auth/auth_azure_ad_oauth.py new file mode 100644 index 00000000..5696e634 --- /dev/null +++ b/src/auth/auth_azure_ad_oauth.py @@ -0,0 +1,34 @@ +import logging + +import tornado.auth + +from auth.auth_abstract_oauth import AbstractOauthAuthenticator, _OauthUserInfo +from model import model_helper + +LOGGER = logging.getLogger('script_server.AzureADOauthAuthenticator') + + +class AzureAdOAuthAuthenticator(AbstractOauthAuthenticator): + def __init__(self, params_dict): + params_dict['group_support'] = False + self.auth_url = model_helper.read_obligatory(params_dict, 'auth_url', ' for OAuth') + self.token_url = model_helper.read_obligatory(params_dict, 'token_url', ' for OAuth') + + super().__init__( + self.auth_url, + self.token_url, + 'openid email profile', + params_dict, + ) + + async def fetch_user_info(self, access_token) -> _OauthUserInfo: + headers = {'Authorization': f'Bearer {access_token}'} + user_response = await self.http_client.fetch('https://graph.microsoft.com/v1.0/me', headers=headers) + if not user_response: + return None + + user_data = tornado.escape.json_decode(user_response.body) + return _OauthUserInfo(user_data.get('userPrincipalName'), True, user_data) + + async def fetch_user_groups(self, access_token): + return [] diff --git a/src/model/server_conf.py b/src/model/server_conf.py index b8175486..918a82f1 100644 --- a/src/model/server_conf.py +++ b/src/model/server_conf.py @@ -232,6 +232,9 @@ def create_authenticator(auth_object, temp_folder, process_invoker: ProcessInvok elif auth_type == 'google_oauth': from auth.auth_google_oauth import GoogleOauthAuthenticator authenticator = GoogleOauthAuthenticator(auth_object) + elif auth_type == 'azure_ad_oauth': + from auth.auth_azure_ad_oauth import AzureAdOAuthAuthenticator + authenticator = AzureAdOAuthAuthenticator(auth_object) elif auth_type == 'gitlab': from auth.auth_gitlab import GitlabOAuthAuthenticator authenticator = GitlabOAuthAuthenticator(auth_object) diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index 5e63f389..b23f4cb4 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -6,6 +6,7 @@ from auth.auth_gitlab import GitlabOAuthAuthenticator from auth.auth_google_oauth import GoogleOauthAuthenticator +from auth.auth_azure_ad_oauth import AzureAdOAuthAuthenticator from auth.auth_htpasswd import HtpasswdAuthenticator from auth.auth_ldap import LdapAuthenticator from auth.authorization import ANY_USER @@ -263,6 +264,18 @@ def test_google_oauth_without_allowed_users(self): _from_json({'auth': {'type': 'google_oauth', 'client_id': '1234', 'secret': 'abcd'}}) + + def test_azure_ad_oauth(self): + config = _from_json({'auth': {'type': 'azure_ad_oauth', + 'auth_url': 'https://test.com/authorize', + 'token_url': 'https://test.com/token', + 'client_id': '1234', + 'secret': 'abcd'}}) + self.assertIsInstance(config.authenticator, AzureAdOAuthAuthenticator) + self.assertEquals('https://test.com/authorize', config.authenticator.auth_url) + self.assertEquals('https://test.com/token', config.authenticator.token_url) + self.assertEquals('1234', config.authenticator.client_id) + self.assertEquals('abcd', config.authenticator.secret) def test_gitlab_oauth(self): config = _from_json({ diff --git a/web-src/public/login.html b/web-src/public/login.html index 15c31cdd..9b81a5d3 100644 --- a/web-src/public/login.html +++ b/web-src/public/login.html @@ -45,6 +45,14 @@
    + + - \ No newline at end of file + + + + diff --git a/web-src/src/assets/authentik_icon.png b/web-src/src/assets/authentik_icon.png new file mode 100755 index 0000000000000000000000000000000000000000..a6109c915c5c0e7466205dcdbb62efc50b4a4b1a GIT binary patch literal 1686 zcmeAS@N?(olHy`uVBq!ia0vp^AwcZF!3-ofe_Bunq!^2X+?^P2p46!aaySb-B8!2F zuY)k7lg8`{1_mad0G|+7pyXd~-QkV?`za!O@V}=DG8Y&7zZbf)|6Z!N6cZBHA*29X z6nN=@bU{drIQj<_0jt4~MYWC?HP{@0Wb7Yr3@`YgYng3fX9G-6M@oYHf*IHi(%U9Y zX^c~6VN#oU`03~0e?C0e+bzYcviQ~iyUpUNo!?*Qv#$L5w%6pLoZ`l*Y%w>En$G(2 zQgF+Uewkyk#}}=*zvk+rZ|8-U_pE1pUV3FBqZVI#1xISrjOlsK{ER$-3oa@!{@v5@ z;?nk?WsD&^^tuk-6glC}xZ(P~ZGLY8G*5BW*!-%SzVW7q%?s{7^irp{uas>LRE3XXkHhzbtIFAnm)(PU4>`et0&c2s1 zjcE&_6mCYm{?2N&@m}owrv8|QxRDImXvL-!BcpG%^vxrwoZ=bK!mPe0I-+J%EF9xQkIr^HJTB{>?B4-P}K6hVE^hnvVNH@o;US%)sOE0^rheYZ2bY{40o3r=D+F6rty|^;x+`c*Q({9ec z5j)R(`iA#SY^GD^D=xUuByPE=^O1$nt9gHVlviC+pAe97Iw|*uMnJG{kLuQbv#Pzp zg$`dUe=jCD~0LTg4- zGtJ8MzkM5SamI7T)!(n%KlSI`Ukay=-CXx6{7hiU^3oq0Lx0cud24guKj%Me=7r$_ Usy`O&0A_avPgg&ebxsLQ0KQHazW@LL literal 0 HcmV?d00001 diff --git a/web-src/src/assets/css/index.css b/web-src/src/assets/css/index.css index 1aacc54a..d737158a 100644 --- a/web-src/src/assets/css/index.css +++ b/web-src/src/assets/css/index.css @@ -103,7 +103,8 @@ h6.header { #login-panel .login-google_oauth .login-info-label, #login-panel .login-azure_ad_oauth .login-info-label, #login-panel .login-gitlab .login-info-label, -#login-panel .login-keycloak .login-info-label { +#login-panel .login-keycloak .login-info-label, +#login-panel .login-authentik .login-info-label { margin-top: 16px; } @@ -166,3 +167,8 @@ h6.header { padding-left: 42px; background-image: url('../keycloak_icon.png'); } + +#login-authentik-button { + padding-left: 50px; + background-image: url('../authentik_icon.png'); +} diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index 2215ced3..ab3de274 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -3,7 +3,7 @@ import '@/common/materializecss/imports/cards'; import '@/common/materializecss/imports/input-fields'; import '@/common/style_imports'; import '@/common/style_imports.js'; -import {axiosInstance} from '@/common/utils/axios_utils' +import { axiosInstance } from '@/common/utils/axios_utils' import { addClass, contains, @@ -42,7 +42,7 @@ function validateURL(url) { } function onLoad() { - axiosInstance.get('auth/config').then(({data: config}) => { + axiosInstance.get('auth/config').then(({ data: config }) => { const loginContainer = document.getElementById('login-content-container'); if (config['type'] === 'google_oauth') { @@ -51,6 +51,8 @@ function onLoad() { setupAzureAdOAuth(loginContainer, config); } else if (config['type'] === 'keycloak_openid') { setupKeycloakOpenid(loginContainer, config); + } else if (config['type'] === 'authentik') { + setupAuthentikAuth(loginContainer, config); } else if (config['type'] === 'gitlab') { setupGitlabOAuth(loginContainer, config); } else { @@ -106,6 +108,14 @@ function setupKeycloakOpenid(loginContainer, authConfig) { 'login-keycloak-button') } +function setupAuthentikAuth(loginContainer, authConfig) { + setupOAuth( + loginContainer, + authConfig, + 'login-authentik-template', + 'login-authentik-button') +} + function setupGitlabOAuth(loginContainer, authConfig) { setupOAuth( loginContainer, @@ -160,7 +170,7 @@ function processCurrentOauthState() { var previousLocation = getUnparameterizedUrl(); if (nextUrl) { - previousLocation += '?' + toQueryArgs({'next': nextUrl}); + previousLocation += '?' + toQueryArgs({ 'next': nextUrl }); } if (urlFragment) { previousLocation += urlFragment; @@ -239,7 +249,7 @@ function sendLoginRequest(formData) { const loginButton = getLoginButton(); loginButton.setAttribute('disabled', 'disabled'); - axiosInstance.post(loginUrl, formData, {maxRedirects: 0}) + axiosInstance.post(loginUrl, formData, { maxRedirects: 0 }) .then(onSuccess) .catch(onError) } From 7f3729d0f5dc2771bf5c530575537ecb2b3d86d2 Mon Sep 17 00:00:00 2001 From: knom Date: Mon, 5 May 2025 11:02:23 +0200 Subject: [PATCH 376/398] Changes from the PR review --- src/auth/auth_authentik_openid.py | 2 -- web-src/src/login/login.js | 8 ++++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/auth/auth_authentik_openid.py b/src/auth/auth_authentik_openid.py index 0b9ade21..75256293 100644 --- a/src/auth/auth_authentik_openid.py +++ b/src/auth/auth_authentik_openid.py @@ -19,10 +19,8 @@ def __init__(self, params_dict): authenitk_url = authenitk_url + '/' self._authenitk_url = authenitk_url - # (oauth_authorize_url, oauth_token_url, oauth_scope, params_dict): super().__init__(authenitk_url + 'application/o/authorize/', authenitk_url + 'application/o/token/', - # "openid" scope is needed since version 20: 'email openid profile', params_dict) diff --git a/web-src/src/login/login.js b/web-src/src/login/login.js index ab3de274..207a79a3 100644 --- a/web-src/src/login/login.js +++ b/web-src/src/login/login.js @@ -3,7 +3,7 @@ import '@/common/materializecss/imports/cards'; import '@/common/materializecss/imports/input-fields'; import '@/common/style_imports'; import '@/common/style_imports.js'; -import { axiosInstance } from '@/common/utils/axios_utils' +import {axiosInstance} from '@/common/utils/axios_utils' import { addClass, contains, @@ -42,7 +42,7 @@ function validateURL(url) { } function onLoad() { - axiosInstance.get('auth/config').then(({ data: config }) => { + axiosInstance.get('auth/config').then(({data: config}) => { const loginContainer = document.getElementById('login-content-container'); if (config['type'] === 'google_oauth') { @@ -170,7 +170,7 @@ function processCurrentOauthState() { var previousLocation = getUnparameterizedUrl(); if (nextUrl) { - previousLocation += '?' + toQueryArgs({ 'next': nextUrl }); + previousLocation += '?' + toQueryArgs({'next': nextUrl}); } if (urlFragment) { previousLocation += urlFragment; @@ -249,7 +249,7 @@ function sendLoginRequest(formData) { const loginButton = getLoginButton(); loginButton.setAttribute('disabled', 'disabled'); - axiosInstance.post(loginUrl, formData, { maxRedirects: 0 }) + axiosInstance.post(loginUrl, formData, {maxRedirects: 0}) .then(onSuccess) .catch(onError) } From 36bbf9e68cf6923a7e7b2eea74364ab7de135d54 Mon Sep 17 00:00:00 2001 From: Emmanuel Ferdman Date: Fri, 9 May 2025 16:13:17 -0700 Subject: [PATCH 377/398] migrate to `unittest.assertEqual` Signed-off-by: Emmanuel Ferdman --- src/tests/execution_logging_test.py | 10 +++--- src/tests/model_helper_test.py | 10 +++--- src/tests/server_conf_test.py | 48 ++++++++++++++--------------- src/tests/web/server_test.py | 2 +- 4 files changed, 35 insertions(+), 35 deletions(-) diff --git a/src/tests/execution_logging_test.py b/src/tests/execution_logging_test.py index 18be9c1d..79de102a 100644 --- a/src/tests/execution_logging_test.py +++ b/src/tests/execution_logging_test.py @@ -280,7 +280,7 @@ def test_get_history_entries_only_for_current_user(self, user_id): self.simulate_logging(execution_id='id4', user_id='userA') entries = self._get_entries_sorted(user_id) - self.assertEquals(2, len(entries)) + self.assertEqual(2, len(entries)) self.validate_history_entry(entry=entries[0], id='id1', user_id='userA') self.validate_history_entry(entry=entries[1], id='id4', user_id='userA') @@ -292,7 +292,7 @@ def test_get_history_entries_for_power_user(self): self.simulate_logging(execution_id='id4', user_id='userA') entries = self._get_entries_sorted('power_user') - self.assertEquals(4, len(entries)) + self.assertEqual(4, len(entries)) self.validate_history_entry(entry=entries[0], id='id1', user_id='userA') self.validate_history_entry(entry=entries[1], id='id2', user_id='userB') @@ -306,7 +306,7 @@ def test_get_history_entries_for_system_call(self): self.simulate_logging(execution_id='id4', user_id='userA') entries = self._get_entries_sorted('some user', system_call=True) - self.assertEquals(4, len(entries)) + self.assertEqual(4, len(entries)) self.validate_history_entry(entry=entries[0], id='id1', user_id='userA') self.validate_history_entry(entry=entries[1], id='id2', user_id='userB') @@ -357,7 +357,7 @@ def test_entry_with_user_id_name_different(self): entry = self.logging_service.find_history_entry('id1', '192.168.2.12') self.validate_history_entry(entry, id='id1', user_name='userX', user_id='192.168.2.12') - def test_find_entry_when_windows_line_seperator(self): + def test_find_entry_when_windows_line_separator(self): self.simulate_logging(execution_id='id1', user_name='userX', user_id='192.168.2.12') _replace_line_separators(self.get_log_files(), '\n', '\r\n') @@ -381,7 +381,7 @@ def test_find_entry_when_another_user_and_no_entry(self): entry = self.logging_service.find_history_entry('id2', 'userA') self.assertIsNone(entry) - def test_find_log_when_windows_line_seperator(self): + def test_find_log_when_windows_line_separator(self): self.simulate_logging(execution_id='id1', log_lines=['hello', 'wonderful', 'world']) _replace_line_separators(self.get_log_files(), '\n', '\r\n') diff --git a/src/tests/model_helper_test.py b/src/tests/model_helper_test.py index 94a22ce7..9def5ce2 100644 --- a/src/tests/model_helper_test.py +++ b/src/tests/model_helper_test.py @@ -552,7 +552,7 @@ def test_default_value_when_empty_string(self): class TestReadStrFromConfig(unittest.TestCase): def test_normal_text(self): value = read_str_from_config({'key1': 'xyz'}, 'key1') - self.assertEquals('xyz', value) + self.assertEqual('xyz', value) def test_none_value_no_default(self): value = read_str_from_config({'key1': None}, 'key1') @@ -560,7 +560,7 @@ def test_none_value_no_default(self): def test_none_value_with_default(self): value = read_str_from_config({'key1': None}, 'key1', default='abc') - self.assertEquals('abc', value) + self.assertEqual('abc', value) def test_no_key_no_default(self): value = read_str_from_config({'key1': 'xyz'}, 'key2') @@ -568,11 +568,11 @@ def test_no_key_no_default(self): def test_no_key_with_default(self): value = read_str_from_config({'key1': 'xyz'}, 'key2', default='abc') - self.assertEquals('abc', value) + self.assertEqual('abc', value) def test_text_with_whitespaces(self): value = read_str_from_config({'key1': ' xyz \n'}, 'key1') - self.assertEquals(' xyz \n', value) + self.assertEqual(' xyz \n', value) def test_text_when_blank_to_none_and_none(self): value = read_str_from_config({'key1': None}, 'key1', blank_to_none=True) @@ -588,7 +588,7 @@ def test_text_when_blank_to_none_and_blank(self): def test_text_when_blank_to_none_and_blank_and_default(self): value = read_str_from_config({'key1': ' \t \n'}, 'key1', blank_to_none=True, default='abc') - self.assertEquals('abc', value) + self.assertEqual('abc', value) def test_text_when_int(self): self.assertRaisesRegex(InvalidValueTypeException, 'Invalid key1 value: string expected, but was: 5', diff --git a/src/tests/server_conf_test.py b/src/tests/server_conf_test.py index b23f4cb4..467fb583 100644 --- a/src/tests/server_conf_test.py +++ b/src/tests/server_conf_test.py @@ -256,8 +256,8 @@ def test_google_oauth(self): 'allowed_users': [] }}) self.assertIsInstance(config.authenticator, GoogleOauthAuthenticator) - self.assertEquals('1234', config.authenticator.client_id) - self.assertEquals('abcd', config.authenticator.secret) + self.assertEqual('1234', config.authenticator.client_id) + self.assertEqual('abcd', config.authenticator.secret) def test_google_oauth_without_allowed_users(self): with self.assertRaisesRegex(Exception, 'access.allowed_users field is mandatory for google_oauth'): @@ -272,10 +272,10 @@ def test_azure_ad_oauth(self): 'client_id': '1234', 'secret': 'abcd'}}) self.assertIsInstance(config.authenticator, AzureAdOAuthAuthenticator) - self.assertEquals('https://test.com/authorize', config.authenticator.auth_url) - self.assertEquals('https://test.com/token', config.authenticator.token_url) - self.assertEquals('1234', config.authenticator.client_id) - self.assertEquals('abcd', config.authenticator.secret) + self.assertEqual('https://test.com/authorize', config.authenticator.auth_url) + self.assertEqual('https://test.com/token', config.authenticator.token_url) + self.assertEqual('1234', config.authenticator.client_id) + self.assertEqual('abcd', config.authenticator.secret) def test_gitlab_oauth(self): config = _from_json({ @@ -297,18 +297,18 @@ def test_ldap(self): 'base_dn': 'dc=test', 'version': 3}}) self.assertIsInstance(config.authenticator, LdapAuthenticator) - self.assertEquals('http://test-ldap.net', config.authenticator.url) - self.assertEquals('|xyz|', config.authenticator.username_template.substitute(username='xyz')) - self.assertEquals('dc=test', config.authenticator._base_dn) - self.assertEquals(3, config.authenticator.version) + self.assertEqual('http://test-ldap.net', config.authenticator.url) + self.assertEqual('|xyz|', config.authenticator.username_template.substitute(username='xyz')) + self.assertEqual('dc=test', config.authenticator._base_dn) + self.assertEqual(3, config.authenticator.version) def test_ldap_multiple_urls(self): config = _from_json({'auth': {'type': 'ldap', 'url': ['http://test-ldap-1.net', 'http://test-ldap-2.net'], 'username_pattern': '|$username|'}}) self.assertIsInstance(config.authenticator, LdapAuthenticator) - self.assertEquals(['http://test-ldap-1.net', 'http://test-ldap-2.net'], config.authenticator.url) - self.assertEquals('|xyz|', config.authenticator.username_template.substitute(username='xyz')) + self.assertEqual(['http://test-ldap-1.net', 'http://test-ldap-2.net'], config.authenticator.url) + self.assertEqual('|xyz|', config.authenticator.username_template.substitute(username='xyz')) def test_htpasswd_auth(self): file = test_utils.create_file('some-path', text='user1:1yL79Q78yczsM') @@ -332,7 +332,7 @@ class TestSecurityConfig(unittest.TestCase): def test_default_config(self): config = _from_json({}) - self.assertEquals('token', config.xsrf_protection) + self.assertEqual('token', config.xsrf_protection) @parameterized.expand([ ('token',), @@ -344,7 +344,7 @@ def test_xsrf_protection(self, xsrf_protection): 'xsrf_protection': xsrf_protection }}) - self.assertEquals(xsrf_protection, config.xsrf_protection) + self.assertEqual(xsrf_protection, config.xsrf_protection) def test_xsrf_protection_when_unsupported(self): self.assertRaises(InvalidValueException, _from_json, {'security': { @@ -375,18 +375,18 @@ def tearDown(self): def test_default_config(self): config = _from_json({}) env_vars = config.env_vars.build_env_vars() - self.assertEquals(env_vars, os.environ) + self.assertEqual(env_vars, os.environ) def test_config_when_safe_env_variables_used(self): config = _from_json({'title': '$$VAR1', 'auth': {'type': 'ldap', 'url': '$$MY_SECRET'}}) env_vars = config.env_vars.build_env_vars() - self.assertEquals(env_vars, os.environ) + self.assertEqual(env_vars, os.environ) self.assertEqual('abcd', env_vars['VAR1']) self.assertEqual('qwerty', env_vars['MY_SECRET']) - self.assertEquals(config.title, '$$VAR1') - self.assertEquals(config.authenticator.url, '$$MY_SECRET') + self.assertEqual(config.title, '$$VAR1') + self.assertEqual(config.authenticator.url, '$$MY_SECRET') def test_config_when_unsafe_env_variables_used(self): config = _from_json({ @@ -410,18 +410,18 @@ def test_config_when_unsafe_env_variables_used(self): self.assertNotIn('EMAIL_PWD', env_vars) self.assertNotIn('EMAIL_PWD_2', env_vars) - self.assertEquals(config.title, '$$VAR1') - self.assertEquals(config.authenticator.secret, 'qwerty') + self.assertEqual(config.title, '$$VAR1') + self.assertEqual(config.authenticator.secret, 'qwerty') alert_destinations = AlertsService(config.alerts_config)._communication_service._destinations - self.assertEquals(alert_destinations[0]._communicator.password, '1234509') - self.assertEquals(alert_destinations[1]._communicator.password, '$VAR2') + self.assertEqual(alert_destinations[0]._communicator.password, '1234509') + self.assertEqual(alert_destinations[1]._communicator.password, '$VAR2') # noinspection PyTypeChecker callback_feature = ExecutionsCallbackFeature(None, config.callbacks_config, None) callback_destinations = callback_feature._communication_service._destinations - self.assertEquals(callback_destinations[0]._communicator.password, '007') - self.assertEquals(callback_destinations[1]._communicator.password, 'VAR1') + self.assertEqual(callback_destinations[0]._communicator.password, '007') + self.assertEqual(callback_destinations[1]._communicator.password, 'VAR1') def create_email_destination(self, password): return {'type': 'email', diff --git a/src/tests/web/server_test.py b/src/tests/web/server_test.py index 7a4abbbe..59fdceb6 100644 --- a/src/tests/web/server_test.py +++ b/src/tests/web/server_test.py @@ -253,7 +253,7 @@ def test_get_scripts_when_basic_auth_failure(self): test_utils.write_script_config({'name': 's1'}, 's1', self.runners_folder) response = requests.get('http://127.0.0.1:12345/scripts', auth=HTTPBasicAuth('normal_user', 'wrong_pass')) - self.assertEquals(401, response.status_code) + self.assertEqual(401, response.status_code) @staticmethod def get_xsrf_token(session): From 63b88ea46555895ced42a888e71bf75e05df2dba Mon Sep 17 00:00:00 2001 From: "daniel.engelmann" Date: Fri, 11 Jul 2025 08:24:14 +0200 Subject: [PATCH 378/398] Implemented History --- web-src/src/common/components/log_panel.vue | 26 +++ web-src/src/common/utils/parameterHistory.js | 108 +++++++++++ .../scripts/ParameterHistoryModal.vue | 170 ++++++++++++++++++ .../components/scripts/script-view.vue | 36 +++- .../main-app/store/scriptExecutionManager.js | 6 +- web-src/src/main-app/store/scriptSetup.js | 34 +++- 6 files changed, 372 insertions(+), 8 deletions(-) create mode 100644 web-src/src/common/utils/parameterHistory.js create mode 100644 web-src/src/main-app/components/scripts/ParameterHistoryModal.vue diff --git a/web-src/src/common/components/log_panel.vue b/web-src/src/common/components/log_panel.vue index ba24165f..6bd713b4 100644 --- a/web-src/src/common/components/log_panel.vue +++ b/web-src/src/common/components/log_panel.vue @@ -9,6 +9,9 @@ content_copy + + arrow_downward +
    @@ -124,6 +127,19 @@ export default { copyToClipboard(this.output.element); }, + downloadLog: function () { + const content = this.output.element.innerText || this.output.element.textContent || ''; + const blob = new Blob([content], { type: 'text/plain' }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = 'log-output.txt'; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + }, + renderOutputElement: function () { if (!this.output || !this.$el) { return @@ -233,6 +249,16 @@ export default { color: var(--font-color-disabled); } +.log-panel .download-text-button { + position: absolute; + right: 50px; + bottom: 4px; +} + +.log-panel .download-text-button i { + color: var(--font-color-disabled); +} + /*noinspection CssInvalidPropertyValue,CssOverwrittenProperties*/ .log-panel >>> .log-content { display: block; diff --git a/web-src/src/common/utils/parameterHistory.js b/web-src/src/common/utils/parameterHistory.js new file mode 100644 index 00000000..f28373e5 --- /dev/null +++ b/web-src/src/common/utils/parameterHistory.js @@ -0,0 +1,108 @@ +/** + * Parameter History Utility + * Manages saving and loading of parameter values to/from localStorage + */ + +const PARAMETER_HISTORY_PREFIX = 'script_server_param_history_'; +const MAX_HISTORY_ENTRIES = 10; + +/** + * Get the storage key for a specific script's parameter history + * @param {string} scriptName - The name of the script + * @returns {string} The storage key + */ +function getStorageKey(scriptName) { + return PARAMETER_HISTORY_PREFIX + scriptName; +} + +/** + * Save parameter values to localStorage for a specific script + * @param {string} scriptName - The name of the script + * @param {Object} parameterValues - The parameter values to save + */ +export function saveParameterHistory(scriptName, parameterValues) { + try { + const key = getStorageKey(scriptName); + const history = loadParameterHistory(scriptName); + + // check if parameterValues is empty + if (Object.keys(parameterValues).length === 0) { + return; + } + + // Add current values to history (avoid duplicates) + const newEntry = { + timestamp: Date.now(), + values: { ...parameterValues } + }; + + // Remove any existing entry with the same values + const filteredHistory = history.filter(entry => + JSON.stringify(entry.values) !== JSON.stringify(newEntry.values) + ); + + // Add new entry at the beginning + filteredHistory.unshift(newEntry); + + // Keep only the most recent entries + if (filteredHistory.length > MAX_HISTORY_ENTRIES) { + filteredHistory.splice(MAX_HISTORY_ENTRIES); + } + + localStorage.setItem(key, JSON.stringify(filteredHistory)); + } catch (error) { + console.warn('Failed to save parameter history:', error); + } +} + +/** + * Load parameter history from localStorage for a specific script + * @param {string} scriptName - The name of the script + * @returns {Array} Array of historical parameter entries + */ +export function loadParameterHistory(scriptName) { + try { + const key = getStorageKey(scriptName); + const stored = localStorage.getItem(key); + return stored ? JSON.parse(stored) : []; + } catch (error) { + console.warn('Failed to load parameter history:', error); + return []; + } +} + +/** + * Get the most recent parameter values for a script + * @param {string} scriptName - The name of the script + * @returns {Object|null} The most recent parameter values or null if no history + */ +export function getMostRecentValues(scriptName) { + const history = loadParameterHistory(scriptName); + return history.length > 0 ? history[0].values : null; +} + +/** + * Remove a specific parameter history entry for a script + * @param {string} scriptName - The name of the script + * @param {number} index - The index of the entry to remove (0-based) + */ +export function removeParameterHistoryEntry(scriptName, index) { + try { + const key = getStorageKey(scriptName); + const history = loadParameterHistory(scriptName); + + // Check if the index is valid + if (index < 0 || index >= history.length) { + console.warn(`Invalid index: ${index} for script: ${scriptName}`); + return; + } + + // Remove the entry at the specified index + history.splice(index, 1); + + // Save the updated history back to localStorage + localStorage.setItem(key, JSON.stringify(history)); + } catch (error) { + console.warn('Failed to remove parameter history entry:', error); + } +} \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue b/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue new file mode 100644 index 00000000..c4a8680d --- /dev/null +++ b/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue @@ -0,0 +1,170 @@ + + + + + \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/script-view.vue b/web-src/src/main-app/components/scripts/script-view.vue index 6288c991..fa662e92 100644 --- a/web-src/src/main-app/components/scripts/script-view.vue +++ b/web-src/src/main-app/components/scripts/script-view.vue @@ -19,6 +19,12 @@ @click="stopScript"> {{ stopButtonLabel }} +
    @@ -54,6 +60,7 @@ ref="scheduleHolder" :scriptConfigComponentsHeight="scriptConfigComponentsHeight" @close="scheduleMode = false"/> + @@ -64,6 +71,7 @@ import {deepCloneObject, forEachKeyValue, isEmptyObject, isEmptyString, isNull} import ScheduleButton from '@/main-app/components/scripts/ScheduleButton'; import ScriptLoadingText from '@/main-app/components/scripts/ScriptLoadingText'; import ScriptViewScheduleHolder from '@/main-app/components/scripts/ScriptViewScheduleHolder'; +import ParameterHistoryModal from '@/main-app/components/scripts/ParameterHistoryModal'; import DOMPurify from 'dompurify'; import {marked} from 'marked'; import {mapActions, mapState} from 'vuex' @@ -96,7 +104,8 @@ export default { LogPanel, ScriptParametersView, ScheduleButton, - ScriptViewScheduleHolder + ScriptViewScheduleHolder, + ParameterHistoryModal }, computed: { @@ -328,6 +337,17 @@ export default { appendLog: function (text) { this.$refs.logPanel.appendLog(text); + }, + + openParameterHistory() { + this.$refs.parameterHistoryModal.open(); + }, + + handleUseParameters(values) { + // Set all parameter values using the scriptSetup store + for (const [parameterName, value] of Object.entries(values)) { + this.$store.dispatch('scriptSetup/setParameterValue', { parameterName, value }); + } } }, @@ -492,6 +512,20 @@ export default { color: var(--font-on-primary-color-main) } +.button-history { + margin-left: 16px; + flex: 1 1 auto; + color: var(--primary-color); + border: 1px solid var(--outline-color); + display: flex; + align-items: center; + gap: 8px; +} + +.button-history i { + font-size: 18px; +} + .schedule-button { margin-left: 32px; flex: 1 0 auto; diff --git a/web-src/src/main-app/store/scriptExecutionManager.js b/web-src/src/main-app/store/scriptExecutionManager.js index 2d0a89a9..839b3983 100644 --- a/web-src/src/main-app/store/scriptExecutionManager.js +++ b/web-src/src/main-app/store/scriptExecutionManager.js @@ -10,7 +10,8 @@ import scriptExecutor, { STATUS_INITIALIZING } from './scriptExecutor'; import {parametersToFormData} from '@/main-app/store/mainStoreHelper'; -import axios from 'axios' +import axios from 'axios'; +import { saveParameterHistory } from '@/common/utils/parameterHistory'; export default { namespaced: true, @@ -122,6 +123,9 @@ export default { const parameterValues = clone(rootState.scriptSetup.parameterValues); const scriptName = rootState.scriptConfig.scriptConfig.name; + // Save parameter history when script is executed + saveParameterHistory(scriptName, parameterValues); + const formData = parametersToFormData(parameterValues); formData.append('__script_name', scriptName); diff --git a/web-src/src/main-app/store/scriptSetup.js b/web-src/src/main-app/store/scriptSetup.js index 2a10fd46..eae68dc2 100644 --- a/web-src/src/main-app/store/scriptSetup.js +++ b/web-src/src/main-app/store/scriptSetup.js @@ -10,6 +10,7 @@ import { import clone from 'lodash/clone'; import isEqual from 'lodash/isEqual'; import Vue from 'vue'; +import { getMostRecentValues } from '@/common/utils/parameterHistory'; export default { namespaced: true, @@ -73,13 +74,34 @@ export default { return; } - const values = {}; - for (const parameter of parameters) { - const defaultValue = !isNull(parameter.default) ? parameter.default : null; - values[parameter.name] = defaultValue; + // Try to load historical values first + const historicalValues = scriptConfig ? getMostRecentValues(scriptConfig.name) : null; + let values = {}; - if (!isNull(values[parameter.name])) { - commit('MEMORIZE_DEFAULT_VALUE', {parameterName: parameter.name, defaultValue}); + if (historicalValues) { + // Only use historical values for parameters that exist in current config + for (const parameter of parameters) { + const parameterName = parameter.name; + if (historicalValues.hasOwnProperty(parameterName)) { + values[parameterName] = historicalValues[parameterName]; + } else { + const defaultValue = !isNull(parameter.default) ? parameter.default : null; + values[parameterName] = defaultValue; + } + + if (!isNull(values[parameterName])) { + commit('MEMORIZE_DEFAULT_VALUE', {parameterName: parameter.name, defaultValue: values[parameterName]}); + } + } + } else { + // No historical values, use defaults + for (const parameter of parameters) { + const defaultValue = !isNull(parameter.default) ? parameter.default : null; + values[parameter.name] = defaultValue; + + if (!isNull(values[parameter.name])) { + commit('MEMORIZE_DEFAULT_VALUE', {parameterName: parameter.name, defaultValue}); + } } } From 5f949b7c4505b4d59429c03f79d5287aed49a7af Mon Sep 17 00:00:00 2001 From: "daniel.engelmann" Date: Fri, 11 Jul 2025 09:53:57 +0200 Subject: [PATCH 379/398] added favorite button --- web-src/src/common/utils/parameterHistory.js | 90 ++++++++++++++++--- .../scripts/ParameterHistoryModal.vue | 50 ++++++++++- 2 files changed, 126 insertions(+), 14 deletions(-) diff --git a/web-src/src/common/utils/parameterHistory.js b/web-src/src/common/utils/parameterHistory.js index f28373e5..1b7a5801 100644 --- a/web-src/src/common/utils/parameterHistory.js +++ b/web-src/src/common/utils/parameterHistory.js @@ -33,23 +33,41 @@ export function saveParameterHistory(scriptName, parameterValues) { // Add current values to history (avoid duplicates) const newEntry = { timestamp: Date.now(), - values: { ...parameterValues } + values: { ...parameterValues }, + favorite: false }; - // Remove any existing entry with the same values - const filteredHistory = history.filter(entry => - JSON.stringify(entry.values) !== JSON.stringify(newEntry.values) + // Check if an entry with the same values already exists + const existingEntryIndex = history.findIndex(entry => + JSON.stringify(entry.values) === JSON.stringify(newEntry.values) ); - // Add new entry at the beginning - filteredHistory.unshift(newEntry); + let filteredHistory; + if (existingEntryIndex !== -1) { + // If entry exists, preserve its favorite status and update timestamp + filteredHistory = [...history]; + filteredHistory[existingEntryIndex] = { + ...filteredHistory[existingEntryIndex], + timestamp: Date.now() + }; + } else { + // If no duplicate exists, add new entry at the beginning + filteredHistory = [newEntry, ...history]; + } + + // Keep only the most recent entries (excluding favorites) + const nonFavoriteEntries = filteredHistory.filter(entry => !entry.favorite); + const favoriteEntries = filteredHistory.filter(entry => entry.favorite); - // Keep only the most recent entries - if (filteredHistory.length > MAX_HISTORY_ENTRIES) { - filteredHistory.splice(MAX_HISTORY_ENTRIES); + // Limit non-favorite entries + if (nonFavoriteEntries.length > MAX_HISTORY_ENTRIES) { + nonFavoriteEntries.splice(MAX_HISTORY_ENTRIES); } - localStorage.setItem(key, JSON.stringify(filteredHistory)); + // Combine favorites first, then non-favorites + const finalHistory = [...favoriteEntries, ...nonFavoriteEntries]; + + localStorage.setItem(key, JSON.stringify(finalHistory)); } catch (error) { console.warn('Failed to save parameter history:', error); } @@ -64,7 +82,13 @@ export function loadParameterHistory(scriptName) { try { const key = getStorageKey(scriptName); const stored = localStorage.getItem(key); - return stored ? JSON.parse(stored) : []; + const history = stored ? JSON.parse(stored) : []; + + // Ensure all entries have the favorite property (for backward compatibility) + return history.map(entry => ({ + ...entry, + favorite: entry.favorite || false + })); } catch (error) { console.warn('Failed to load parameter history:', error); return []; @@ -97,6 +121,12 @@ export function removeParameterHistoryEntry(scriptName, index) { return; } + // Don't allow removal of favorite entries + if (history[index].favorite) { + console.warn('Cannot remove favorite entry'); + return; + } + // Remove the entry at the specified index history.splice(index, 1); @@ -105,4 +135,42 @@ export function removeParameterHistoryEntry(scriptName, index) { } catch (error) { console.warn('Failed to remove parameter history entry:', error); } +} + +/** + * Toggle favorite status of a parameter history entry + * @param {string} scriptName - The name of the script + * @param {number} index - The index of the entry to toggle (0-based) + */ +export function toggleFavoriteEntry(scriptName, index) { + try { + const key = getStorageKey(scriptName); + const history = loadParameterHistory(scriptName); + + // Check if the index is valid + if (index < 0 || index >= history.length) { + console.warn(`Invalid index: ${index} for script: ${scriptName}`); + return; + } + + // Toggle favorite status + history[index].favorite = !history[index].favorite; + + // Reorder entries: favorites first, then non-favorites + const favoriteEntries = history.filter(entry => entry.favorite); + const nonFavoriteEntries = history.filter(entry => !entry.favorite); + + // Limit non-favorite entries + if (nonFavoriteEntries.length > MAX_HISTORY_ENTRIES) { + nonFavoriteEntries.splice(MAX_HISTORY_ENTRIES); + } + + // Combine favorites first, then non-favorites + const finalHistory = [...favoriteEntries, ...nonFavoriteEntries]; + + // Save the updated history back to localStorage + localStorage.setItem(key, JSON.stringify(finalHistory)); + } catch (error) { + console.warn('Failed to toggle favorite entry:', error); + } } \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue b/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue index c4a8680d..f3462036 100644 --- a/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue +++ b/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue @@ -7,10 +7,16 @@
    -
    +
    {{ formatTimestamp(entry.timestamp) }}
    +
    @@ -37,7 +44,7 @@ \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/script-view.vue b/web-src/src/main-app/components/scripts/script-view.vue index fa662e92..85757937 100644 --- a/web-src/src/main-app/components/scripts/script-view.vue +++ b/web-src/src/main-app/components/scripts/script-view.vue @@ -19,12 +19,6 @@ @click="stopScript"> {{ stopButtonLabel }} -
    @@ -60,7 +54,6 @@ ref="scheduleHolder" :scriptConfigComponentsHeight="scriptConfigComponentsHeight" @close="scheduleMode = false"/> -
    @@ -71,7 +64,6 @@ import {deepCloneObject, forEachKeyValue, isEmptyObject, isEmptyString, isNull} import ScheduleButton from '@/main-app/components/scripts/ScheduleButton'; import ScriptLoadingText from '@/main-app/components/scripts/ScriptLoadingText'; import ScriptViewScheduleHolder from '@/main-app/components/scripts/ScriptViewScheduleHolder'; -import ParameterHistoryModal from '@/main-app/components/scripts/ParameterHistoryModal'; import DOMPurify from 'dompurify'; import {marked} from 'marked'; import {mapActions, mapState} from 'vuex' @@ -104,8 +96,7 @@ export default { LogPanel, ScriptParametersView, ScheduleButton, - ScriptViewScheduleHolder, - ParameterHistoryModal + ScriptViewScheduleHolder }, computed: { @@ -339,16 +330,7 @@ export default { this.$refs.logPanel.appendLog(text); }, - openParameterHistory() { - this.$refs.parameterHistoryModal.open(); - }, - handleUseParameters(values) { - // Set all parameter values using the scriptSetup store - for (const [parameterName, value] of Object.entries(values)) { - this.$store.dispatch('scriptSetup/setParameterValue', { parameterName, value }); - } - } }, watch: { @@ -512,20 +494,6 @@ export default { color: var(--font-on-primary-color-main) } -.button-history { - margin-left: 16px; - flex: 1 1 auto; - color: var(--primary-color); - border: 1px solid var(--outline-color); - display: flex; - align-items: center; - gap: 8px; -} - -.button-history i { - font-size: 18px; -} - .schedule-button { margin-left: 32px; flex: 1 0 auto; From 6a42a79955ec30b2d3d5f7204e6987304fa4036c Mon Sep 17 00:00:00 2001 From: "daniel.engelmann" Date: Fri, 1 Aug 2025 16:36:53 +0200 Subject: [PATCH 382/398] removed boarder and background of parameter history button --- web-src/src/main-app/components/scripts/ScriptHeader.vue | 2 -- 1 file changed, 2 deletions(-) diff --git a/web-src/src/main-app/components/scripts/ScriptHeader.vue b/web-src/src/main-app/components/scripts/ScriptHeader.vue index e4d62bfe..a79b8359 100644 --- a/web-src/src/main-app/components/scripts/ScriptHeader.vue +++ b/web-src/src/main-app/components/scripts/ScriptHeader.vue @@ -70,8 +70,6 @@ export default { .button-history { margin-right: 16px; flex: 0 0 auto; - color: var(--primary-color); - border: 1px solid var(--outline-color); display: flex; align-items: center; justify-content: center; From 6710ad006aefbf628ead6b6513aedaa4fa0e9d09 Mon Sep 17 00:00:00 2001 From: "daniel.engelmann" Date: Mon, 4 Aug 2025 10:09:35 +0200 Subject: [PATCH 383/398] styling and added toggle for preloading of history parameters --- web-src/src/common/components/log_panel.vue | 2 +- web-src/src/common/utils/parameterHistory.js | 14 ++++++++ .../scripts/ParameterHistoryModal.vue | 32 ++++++++++++++++--- .../components/scripts/ScriptHeader.vue | 8 ++--- web-src/src/main-app/store/scriptSetup.js | 4 +-- 5 files changed, 48 insertions(+), 12 deletions(-) diff --git a/web-src/src/common/components/log_panel.vue b/web-src/src/common/components/log_panel.vue index 6bd713b4..401b07b1 100644 --- a/web-src/src/common/components/log_panel.vue +++ b/web-src/src/common/components/log_panel.vue @@ -10,7 +10,7 @@ content_copy - arrow_downward + file_download
    diff --git a/web-src/src/common/utils/parameterHistory.js b/web-src/src/common/utils/parameterHistory.js index 1b7a5801..4a4b8f9b 100644 --- a/web-src/src/common/utils/parameterHistory.js +++ b/web-src/src/common/utils/parameterHistory.js @@ -173,4 +173,18 @@ export function toggleFavoriteEntry(scriptName, index) { } catch (error) { console.warn('Failed to toggle favorite entry:', error); } +} + +/** + * Check if historical values should be used for a script + * @param {string} scriptName - The name of the script + * @returns {boolean} True if historical values should be used, false otherwise + */ +export function shouldUseHistoricalValues(scriptName) { + try { + return localStorage.getItem(`useHistoricalValues_${scriptName}`) === 'true'; + } catch (error) { + console.warn('Failed to check historical values toggle:', error); + return false; + } } \ No newline at end of file diff --git a/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue b/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue index f3462036..38ac2501 100644 --- a/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue +++ b/web-src/src/main-app/components/scripts/ParameterHistoryModal.vue @@ -2,6 +2,17 @@