Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions static_global/css/style.css
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,17 @@ margin:auto;
padding:1%;
}

.circle-small{
border-radius: 50%;
width: 30px;
height: 30px;
display: flex;
align-items: center;
justify-content: center;
background: white;
padding:1%;
}

.bi{
transition: transform 0.3s ease-in-out;
}
Expand Down
5 changes: 4 additions & 1 deletion static_global/js/collapsibleAnimation.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,7 @@ const coll = document.getElementsByClassName("collapsible");
}
$('.accordion-button').click(function () {
$(this).find('.bi').toggleClass('rotate');
});
});
$('.list-group-item').click(function () {
$(this).find('.bi').toggleClass('rotate');
});
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
,rki,frostmourne,22.11.2024 10:08,file:///home/rki/.config/libreoffice/4;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

files should not have spaces

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Forgot to remove that from the tree. This is not used in the code, will remove on a later update. And I didn't name that file myself :)

Binary file not shown.
Binary file not shown.
Binary file not shown.
98 changes: 91 additions & 7 deletions testing/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import subprocess
import sys
import time
import datetime
from datetime import datetime, timezone, timedelta
from dateutil.relativedelta import relativedelta
from base64 import b64decode
from io import BytesIO
from typing import Any, Union, Dict
Expand All @@ -23,6 +24,7 @@
import hashlib
import base64
import smtplib as smtp
from collections import Counter
from concurrent.futures import ThreadPoolExecutor, as_completed
from urllib.parse import urljoin, urlparse
from Crypto.PublicKey import RSA
Expand All @@ -33,6 +35,7 @@
from testing import validators
from testing_platform.settings import PANDORA_ROOT_URL
from .cipher_scoring import load_cipher_info
from pyvulnerabilitylookup import PyVulnerabilityLookup
from pylookyloo import Lookyloo

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -352,18 +355,24 @@ def web_server_check(domain: str):
if port["state"] != "closed":
service = port.get("service", {})
vulners = port.get("scripts", [])
list_of_vulns = []
vuln_dict = {'cve': [], 'others': []}
if vulners:
vulners = vulners[0].get("data", {})
for vuln, vulndata in vulners.items():
try:
items = vulndata.get("children", [])
for vulnerability in items:
vulnerability['severity'] = cvss_rating(vulnerability['cvss'])
if vulnerability["type"] == "cve":
vulnerability["link"] = f"https://cvepremium.circl.lu/cve/{vulnerability['id']}"
vuln_info = lookup_cve(vulnerability['id'])
vulnerability["description"] = vuln_info['description']
vulnerability['cvss_details'] = vuln_info['cvss']
vulnerability['sightings'] = vuln_info['sightings']
vulnerability["link"] = f"https://vulnerability.circl.lu/vuln/{vulnerability['id']}"
vuln_dict['cve'].append(vulnerability)
else:
vulnerability["link"] = f"https://vulners.com/{vulnerability['type']}/{vulnerability['id']}"
list_of_vulns.append(vulnerability)
vuln_dict['others'].append(vulnerability)
except TypeError:
continue
except AttributeError:
Expand All @@ -373,17 +382,89 @@ def web_server_check(domain: str):
try:
vulnerabilities.append({
"service": f'{service.get("product", "Unknown")} - {service.get("name", "Unknown")}',
"vuln_list": list_of_vulns,
"vuln_dict": vuln_dict,
})
except KeyError:
continue

logger.info("server scan: Done!")
logger.info(json.dumps(vulnerabilities, indent=2))

return {"services": services, "vulnerabilities": vulnerabilities}


def cvss_rating(cvss_score):
if float(cvss_score) >= 9:
return "CRITICAL"
elif 9 > float(cvss_score) >= 7:
return "HIGH"
elif 7 > float(cvss_score) >= 4:
return "MEDIUM"
else:
return "LOW"


def lookup_cve(vuln_id):
vuln_lookup = PyVulnerabilityLookup('https://vulnerability.circl.lu')
if vuln_lookup.is_up:
try:
cve = vuln_lookup.get_vulnerability(vuln_id)
except requests.exceptions.ConnectionError:
cve = {}
try:
sightings = vuln_lookup.get_sightings(vuln_id=vuln_id, date_from=(datetime.now() - relativedelta(months=1)).date())
except requests.exceptions.ConnectionError:
sightings = {}

containers = cve.get('containers', {})
cna = containers.get('cna', {})
adp = containers.get('adp', [{}])
cve_info = {}

# Description
descriptions = cna.get('descriptions', [])
for description in descriptions:
if description.get('lang') == 'en':
cve_info['description'] = description.get('value', 'N/A')
break
else:
cve_info['description'] = 'N/A'

# Severity
metrics = cna.get('metrics', [])
if not metrics:
for item in adp:
if 'metrics' in item:
metrics = item['metrics']
break

if metrics:
for metric in metrics:
if 'cvssV3_1' in metric or 'cvssV4_0' in metric:
cvss_data = metric.get('cvssV3_1', {}) or metric.get('cvssV4_0', {})
cve_info['cvss'] = cvss_data
break
else:
cve_info['cvss'] = {}

if sightings:
dates = [
datetime.fromisoformat(
sighting['creation_timestamp']).date()
for sighting in sightings['data']
]
date_counts = dict(Counter(dates))
sightings['dates'] = [str(date) for date in date_counts.keys()]
sightings['counts'] = list(date_counts.values())

cve_info['sightings'] = {
'total': sightings.get('metadata', {}).get('count', 0),
'dates': sightings.get('dates', []),
'counts': sightings.get('counts', [])
}

return cve_info


def web_server_check_no_raw_socket(hostname):
try:
validators.full_domain_validator(hostname)
Expand Down Expand Up @@ -587,6 +668,7 @@ def check_dnssec(domain):

return result


def check_mx(domain):
"""
Check MX records for the domain.
Expand Down Expand Up @@ -616,6 +698,7 @@ def check_mx(domain):

return result


def check_spf(domain):
"""
Check SPF record for the domain.
Expand Down Expand Up @@ -649,6 +732,7 @@ def check_spf(domain):

return result


def check_dmarc(domain: str) -> dict[str, bool | None | str | Any]:
"""
Check the DMARC record for a given domain.
Expand Down Expand Up @@ -1365,7 +1449,7 @@ def get_capture_result(lookyloo, capture_uuid):


def get_recent_captures(lookyloo):
ts = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(weeks=1)
ts = datetime.now(timezone.utc) - timedelta(weeks=1)
recent_captures = lookyloo.get_recent_captures(timestamp=ts)[:10]
print(recent_captures)
for i in range(len(recent_captures)):
Expand Down
183 changes: 0 additions & 183 deletions testing/templates/check_infra.html

This file was deleted.

Loading
Loading