Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion secheaders/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
DEFAULT_TIMEOUT = 10

# Let's try to imitate a legit browser to avoid being blocked / flagged as web crawler
REQUEST_HEADERS = {
DEFAULT_GET_HEADERS = {
'Accept': ('text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,'
'application/signed-exchange;v=b3;q=0.9'),
'Accept-Encoding': 'gzip, deflate, br',
Expand All @@ -13,6 +13,14 @@
'Chrome/106.0.0.0 Safari/537.36'),
}

PREFLIGHT_HEADERS = {
**DEFAULT_GET_HEADERS,
'Access-Control-Request-Method': 'GET',
'Origin': 'https://nonexistent.example.com',
'Cookie': 'SESSIONID=123',
}


EVAL_WARN = 0
EVAL_OK = 1
OK_COLOR = '\033[92m'
Expand Down
126 changes: 124 additions & 2 deletions secheaders/eval_utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import re
from typing import Tuple

from .constants import EVAL_WARN, EVAL_OK, UNSAFE_CSP_RULES, RESTRICTED_PERM_POLICY_FEATURES, SERVER_VERSION_HEADERS
from .constants import EVAL_WARN, EVAL_OK, UNSAFE_CSP_RULES, RESTRICTED_PERM_POLICY_FEATURES, SERVER_VERSION_HEADERS, \
PREFLIGHT_HEADERS
from .exceptions import SecurityHeadersException


Expand Down Expand Up @@ -137,7 +138,87 @@ def permissions_policy_parser(contents: str) -> dict:
return retval


def analyze_headers(headers: dict) -> dict:
def eval_coep(contents: str) -> Tuple[int, list]:
# Accept only recommended values as safe
safe_values = ['require-corp', 'unsafe-none']
notes = []

value = contents.strip().lower()
if value in safe_values:
return EVAL_OK, []

notes.append(f"Unrecognized or unsafe COEP value: {contents}")
return EVAL_WARN, notes


def eval_coop(contents: str) -> Tuple[int, list]:
# Accept only recommended values as safe
safe_values = ['same-origin', 'same-origin-allow-popups', 'unsafe-none']
notes = []

value = contents.strip().lower()
if value in safe_values:
return EVAL_OK, []

notes.append(f"Unrecognized or unsafe COOP value: {contents}")
return EVAL_WARN, notes


def eval_cors(cors_headers: dict) -> Tuple[int, list]:
contents = cors_headers.get('access-control-allow-origin')
allow_credentials = cors_headers.get('access-control-allow-credentials', False)

if not contents:
return EVAL_OK, []

notes = []
# Check that the CORS value is not reflected from our preflight request (risky practice)
if contents in PREFLIGHT_HEADERS.values():
notes.append("CORS header value is reflected back from the Origin or other request headers.")
if allow_credentials and allow_credentials.lower() == 'true':
notes.append("Access-Control-Allow-Credentials is set to true, which is unsafe with reflected origins.")
return EVAL_WARN, notes

# Allow specific origins (not wildcard)
# Match a valid http or https URL using regex
if re.match(r'^https?://[^\s/$.?#].[^\s]*$', contents):
return EVAL_OK, []

if contents == '*':
notes.append("Wildcard '*' allows cross-site requests from any origin.")
if allow_credentials and allow_credentials.lower() == 'true':
notes.append("Access-Control-Allow-Credentials is set to true, which is unsafe with wildcard origins.")
return EVAL_WARN, notes
return EVAL_OK, notes

notes.append(f"Unrecognized or unsafe CORS value: {contents}")

return EVAL_WARN, notes


def eval_corp(contents: str) -> Tuple[int, list]:
# Accept only recommended values as safe
safe_values = ['same-origin', 'same-site']
valid_values = safe_values + ['cross-origin']

notes = []

value = contents.strip().lower()
if value in valid_values:
# Only 'same-origin' and 'same-site' are considered safe
if value in safe_values:
return EVAL_OK, []

notes.append(
f"Value '{contents}' is valid but less restrictive; consider using 'same-origin' or 'same-site'."
)
return EVAL_WARN, notes

notes.append(f"Unrecognized value: {contents}")
return EVAL_WARN, notes


def analyze_headers(headers: dict, cors_headers: dict = None) -> dict:
""" Default return array """
retval = {}

Expand Down Expand Up @@ -170,6 +251,20 @@ def analyze_headers(headers: dict) -> dict:
'permissions-policy': {
'recommended': True,
'eval_func': eval_permissions_policy,
},
}
cors_headers_mapping = {
'cross-origin-embedder-policy': {
'recommended': True,
'eval_func': eval_coep,
},
'cross-origin-opener-policy': {
'recommended': True,
'eval_func': eval_coop,
},
'cross-origin-resource-policy': {
'recommended': True,
'eval_func': eval_corp
}
}

Expand All @@ -192,6 +287,33 @@ def analyze_headers(headers: dict) -> dict:
warn = settings.get('recommended')
retval[header] = {'defined': False, 'warn': warn, 'contents': None, 'notes': []}

if cors_headers:
# cross-origin-allow-origin is a bit special as it depends on the presece of
# the header cross-origin-allow-credentials
res, notes = eval_cors(cors_headers)
retval['access-control-allow-origin'] = {
'defined': cors_headers.get('access-control-allow-origin') is not None,
'warn': res == EVAL_WARN,
'contents': cors_headers.get('access-control-allow-origin'),
'notes': notes,
}

for header, settings in cors_headers_mapping.items():
if header in cors_headers:
eval_func = settings.get('eval_func')
if not eval_func:
raise SecurityHeadersException(f"No evaluation function found for header: {header}")
res, notes = eval_func(cors_headers[header])
retval[header] = {
'defined': True,
'warn': res == EVAL_WARN,
'contents': cors_headers[header],
'notes': notes,
}
else:
warn = settings.get('recommended')
retval[header] = {'defined': False, 'warn': warn, 'contents': None, 'notes': []}

for header in SERVER_VERSION_HEADERS:
if header in headers:
res, notes = eval_version_info(headers[header])
Expand Down
10 changes: 7 additions & 3 deletions secheaders/secheaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import json
import sys

from secheaders.constants import PREFLIGHT_HEADERS


from . import cmd_utils
from .exceptions import SecurityHeadersException, FailedToFetchHeaders
Expand Down Expand Up @@ -60,20 +62,22 @@ def async_scan_done(scan):
def scan_target(url, args):
https = None
target = None
cors_headers = None
if url:
web_client = WebClient(url, args.max_redirects, args.insecure)
headers = web_client.get_headers()
https = web_client.test_https()
target = web_client.get_full_url()
if not headers:
raise FailedToFetchHeaders("Failed to fetch headers")
https = web_client.test_https()
target = web_client.get_full_url()
cors_headers = web_client.get_headers(method='OPTIONS', headers=PREFLIGHT_HEADERS)
elif args.file:
headers = parse_file_input(args.file)
target = args.file
else:
raise SecurityHeadersException("Failed to fetch headers, no sufficient input provided.")

analysis_result = analyze_headers(headers)
analysis_result = analyze_headers(headers, cors_headers)
return {'target': target, 'headers': analysis_result, 'https': https}


Expand Down
10 changes: 6 additions & 4 deletions secheaders/webclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from typing import Union
from urllib.parse import ParseResult, urlparse

from .constants import DEFAULT_TIMEOUT, DEFAULT_URL_SCHEME, REQUEST_HEADERS, HEADER_STRUCTURED_LIST
from .constants import DEFAULT_TIMEOUT, DEFAULT_URL_SCHEME, HEADER_STRUCTURED_LIST, DEFAULT_GET_HEADERS
from .exceptions import InvalidTargetURL, UnableToConnect


Expand Down Expand Up @@ -53,7 +53,7 @@ def _follow_redirect_until_response(self, url, follow_redirects=5) -> ParseResul
raise InvalidTargetURL("Unsupported protocol scheme")

try:
conn.request('GET', temp_url.path, headers=REQUEST_HEADERS)
conn.request('GET', temp_url.path, headers=DEFAULT_GET_HEADERS)
res = conn.getresponse()
except (socket.gaierror, socket.timeout, ConnectionRefusedError, UnicodeError) as e:
raise UnableToConnect(f"Connection failed {temp_url.netloc}") from e
Expand Down Expand Up @@ -98,13 +98,15 @@ def open_connection(self, target_url) -> Union[http.client.HTTPConnection, http.

return conn

def get_headers(self) -> dict:
def get_headers(self, method='GET', headers=None) -> dict:
""" Fetch headers from the target site """
retval = {}
if not headers:
headers = DEFAULT_GET_HEADERS

conn = self.open_connection(self.target_url)
try:
conn.request('GET', self.target_url.path, headers=REQUEST_HEADERS)
conn.request(method=method, url=self.target_url.path, headers=headers)
res = conn.getresponse()
except (socket.gaierror, socket.timeout, ConnectionRefusedError, ssl.SSLError, UnicodeError) as e:
raise UnableToConnect(f"Connection failed {self.target_url.hostname}") from e
Expand Down