diff --git a/secheaders/constants.py b/secheaders/constants.py index b7920e2..fb8f761 100644 --- a/secheaders/constants.py +++ b/secheaders/constants.py @@ -3,7 +3,7 @@ DEFAULT_TIMEOUT = 10 # Let's try to imitate a legit browser to avoid being blocked / flagged as web crawler -REQUEST_HEADERS = { +DEFAULT_GET_HEADERS = { 'Accept': ('text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,' 'application/signed-exchange;v=b3;q=0.9'), 'Accept-Encoding': 'gzip, deflate, br', @@ -13,6 +13,14 @@ 'Chrome/106.0.0.0 Safari/537.36'), } +PREFLIGHT_HEADERS = { + **DEFAULT_GET_HEADERS, + 'Access-Control-Request-Method': 'GET', + 'Origin': 'https://nonexistent.example.com', + 'Cookie': 'SESSIONID=123', +} + + EVAL_WARN = 0 EVAL_OK = 1 OK_COLOR = '\033[92m' diff --git a/secheaders/eval_utils.py b/secheaders/eval_utils.py index 53fdbd5..d8cd94b 100644 --- a/secheaders/eval_utils.py +++ b/secheaders/eval_utils.py @@ -1,7 +1,8 @@ import re from typing import Tuple -from .constants import EVAL_WARN, EVAL_OK, UNSAFE_CSP_RULES, RESTRICTED_PERM_POLICY_FEATURES, SERVER_VERSION_HEADERS +from .constants import EVAL_WARN, EVAL_OK, UNSAFE_CSP_RULES, RESTRICTED_PERM_POLICY_FEATURES, SERVER_VERSION_HEADERS, \ + PREFLIGHT_HEADERS from .exceptions import SecurityHeadersException @@ -137,7 +138,87 @@ def permissions_policy_parser(contents: str) -> dict: return retval -def analyze_headers(headers: dict) -> dict: +def eval_coep(contents: str) -> Tuple[int, list]: + # Accept only recommended values as safe + safe_values = ['require-corp', 'unsafe-none'] + notes = [] + + value = contents.strip().lower() + if value in safe_values: + return EVAL_OK, [] + + notes.append(f"Unrecognized or unsafe COEP value: {contents}") + return EVAL_WARN, notes + + +def eval_coop(contents: str) -> Tuple[int, list]: + # Accept only recommended values as safe + safe_values = ['same-origin', 'same-origin-allow-popups', 'unsafe-none'] + notes = [] + + value = contents.strip().lower() + if value in safe_values: + return EVAL_OK, [] + + notes.append(f"Unrecognized or unsafe COOP value: {contents}") + return EVAL_WARN, notes + + +def eval_cors(cors_headers: dict) -> Tuple[int, list]: + contents = cors_headers.get('access-control-allow-origin') + allow_credentials = cors_headers.get('access-control-allow-credentials', False) + + if not contents: + return EVAL_OK, [] + + notes = [] + # Check that the CORS value is not reflected from our preflight request (risky practice) + if contents in PREFLIGHT_HEADERS.values(): + notes.append("CORS header value is reflected back from the Origin or other request headers.") + if allow_credentials and allow_credentials.lower() == 'true': + notes.append("Access-Control-Allow-Credentials is set to true, which is unsafe with reflected origins.") + return EVAL_WARN, notes + + # Allow specific origins (not wildcard) + # Match a valid http or https URL using regex + if re.match(r'^https?://[^\s/$.?#].[^\s]*$', contents): + return EVAL_OK, [] + + if contents == '*': + notes.append("Wildcard '*' allows cross-site requests from any origin.") + if allow_credentials and allow_credentials.lower() == 'true': + notes.append("Access-Control-Allow-Credentials is set to true, which is unsafe with wildcard origins.") + return EVAL_WARN, notes + return EVAL_OK, notes + + notes.append(f"Unrecognized or unsafe CORS value: {contents}") + + return EVAL_WARN, notes + + +def eval_corp(contents: str) -> Tuple[int, list]: + # Accept only recommended values as safe + safe_values = ['same-origin', 'same-site'] + valid_values = safe_values + ['cross-origin'] + + notes = [] + + value = contents.strip().lower() + if value in valid_values: + # Only 'same-origin' and 'same-site' are considered safe + if value in safe_values: + return EVAL_OK, [] + + notes.append( + f"Value '{contents}' is valid but less restrictive; consider using 'same-origin' or 'same-site'." + ) + return EVAL_WARN, notes + + notes.append(f"Unrecognized value: {contents}") + return EVAL_WARN, notes + + +def analyze_headers(headers: dict, cors_headers: dict = None) -> dict: """ Default return array """ retval = {} @@ -170,6 +251,20 @@ def analyze_headers(headers: dict) -> dict: 'permissions-policy': { 'recommended': True, 'eval_func': eval_permissions_policy, + }, + } + cors_headers_mapping = { + 'cross-origin-embedder-policy': { + 'recommended': True, + 'eval_func': eval_coep, + }, + 'cross-origin-opener-policy': { + 'recommended': True, + 'eval_func': eval_coop, + }, + 'cross-origin-resource-policy': { + 'recommended': True, + 'eval_func': eval_corp } } @@ -192,6 +287,33 @@ def analyze_headers(headers: dict) -> dict: warn = settings.get('recommended') retval[header] = {'defined': False, 'warn': warn, 'contents': None, 'notes': []} + if cors_headers: + # cross-origin-allow-origin is a bit special as it depends on the presece of + # the header cross-origin-allow-credentials + res, notes = eval_cors(cors_headers) + retval['access-control-allow-origin'] = { + 'defined': cors_headers.get('access-control-allow-origin') is not None, + 'warn': res == EVAL_WARN, + 'contents': cors_headers.get('access-control-allow-origin'), + 'notes': notes, + } + + for header, settings in cors_headers_mapping.items(): + if header in cors_headers: + eval_func = settings.get('eval_func') + if not eval_func: + raise SecurityHeadersException(f"No evaluation function found for header: {header}") + res, notes = eval_func(cors_headers[header]) + retval[header] = { + 'defined': True, + 'warn': res == EVAL_WARN, + 'contents': cors_headers[header], + 'notes': notes, + } + else: + warn = settings.get('recommended') + retval[header] = {'defined': False, 'warn': warn, 'contents': None, 'notes': []} + for header in SERVER_VERSION_HEADERS: if header in headers: res, notes = eval_version_info(headers[header]) diff --git a/secheaders/secheaders.py b/secheaders/secheaders.py index 4d71cbc..8f701cb 100644 --- a/secheaders/secheaders.py +++ b/secheaders/secheaders.py @@ -3,6 +3,8 @@ import json import sys +from secheaders.constants import PREFLIGHT_HEADERS + from . import cmd_utils from .exceptions import SecurityHeadersException, FailedToFetchHeaders @@ -60,20 +62,22 @@ def async_scan_done(scan): def scan_target(url, args): https = None target = None + cors_headers = None if url: web_client = WebClient(url, args.max_redirects, args.insecure) headers = web_client.get_headers() - https = web_client.test_https() - target = web_client.get_full_url() if not headers: raise FailedToFetchHeaders("Failed to fetch headers") + https = web_client.test_https() + target = web_client.get_full_url() + cors_headers = web_client.get_headers(method='OPTIONS', headers=PREFLIGHT_HEADERS) elif args.file: headers = parse_file_input(args.file) target = args.file else: raise SecurityHeadersException("Failed to fetch headers, no sufficient input provided.") - analysis_result = analyze_headers(headers) + analysis_result = analyze_headers(headers, cors_headers) return {'target': target, 'headers': analysis_result, 'https': https} diff --git a/secheaders/webclient.py b/secheaders/webclient.py index 1e9acc1..6055122 100644 --- a/secheaders/webclient.py +++ b/secheaders/webclient.py @@ -5,7 +5,7 @@ from typing import Union from urllib.parse import ParseResult, urlparse -from .constants import DEFAULT_TIMEOUT, DEFAULT_URL_SCHEME, REQUEST_HEADERS, HEADER_STRUCTURED_LIST +from .constants import DEFAULT_TIMEOUT, DEFAULT_URL_SCHEME, HEADER_STRUCTURED_LIST, DEFAULT_GET_HEADERS from .exceptions import InvalidTargetURL, UnableToConnect @@ -53,7 +53,7 @@ def _follow_redirect_until_response(self, url, follow_redirects=5) -> ParseResul raise InvalidTargetURL("Unsupported protocol scheme") try: - conn.request('GET', temp_url.path, headers=REQUEST_HEADERS) + conn.request('GET', temp_url.path, headers=DEFAULT_GET_HEADERS) res = conn.getresponse() except (socket.gaierror, socket.timeout, ConnectionRefusedError, UnicodeError) as e: raise UnableToConnect(f"Connection failed {temp_url.netloc}") from e @@ -98,13 +98,15 @@ def open_connection(self, target_url) -> Union[http.client.HTTPConnection, http. return conn - def get_headers(self) -> dict: + def get_headers(self, method='GET', headers=None) -> dict: """ Fetch headers from the target site """ retval = {} + if not headers: + headers = DEFAULT_GET_HEADERS conn = self.open_connection(self.target_url) try: - conn.request('GET', self.target_url.path, headers=REQUEST_HEADERS) + conn.request(method=method, url=self.target_url.path, headers=headers) res = conn.getresponse() except (socket.gaierror, socket.timeout, ConnectionRefusedError, ssl.SSLError, UnicodeError) as e: raise UnableToConnect(f"Connection failed {self.target_url.hostname}") from e