Last active
April 28, 2025 20:06
-
-
Save sam2332/e8b62fc4897e9881f7cca28211dbd2d1 to your computer and use it in GitHub Desktop.
Generate a server wallpaper with stats and suspicious ip addresses
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
""" | |
IIS Wallpaper Updater | |
Discover IIS sites dynamically, parse recent log stats per site (views, errors, and recent requests), render in a table on an image, | |
and set as Windows desktop wallpaper. | |
""" | |
import os | |
import glob | |
import argparse | |
import subprocess | |
import re | |
from collections import deque | |
from datetime import datetime, timedelta | |
from PIL import Image, ImageDraw, ImageFont | |
import ctypes | |
from collections import defaultdict | |
# Windows API constants | |
SPI_SETDESKWALLPAPER = 20 | |
SPIF_UPDATEINIFILE = 0x01 | |
SPIF_SENDCHANGE = 0x02 | |
critical_patterns = [ | |
r'\.git/', r'\.svn/', r'\.env\b', r'\.htaccess\b', r'\.htpasswd\b', | |
r'etc/passwd', r'/cgi-bin/', r'/cgi/', r'cmd=', r'xmlrpc.php', | |
] | |
suspicious_patterns = [ | |
r'\badmin\b', r'\bwp-admin\b', r'wpadmin', r'\bcpanel\b', r'\bwebadmin\b', | |
r'\badministrator\b', r'\badminpanel\b', r'\blogin\b', r'\bbackend\b', r'\buserpanel\b', | |
r'\bsetup\b', r'\binstall\b', r'\bupgrade\b', r'\bconfig\b', r'\bsettings\b', | |
r'\bdrupal\b', r'\bjoomla\b', r'\bmagento\b', r'\bwordpress\b', | |
r'\.php\b', r'\.asp\b', r'\.aspx\b', r'\.jsp\b', r'\.cgi\b', | |
r'\.zip\b', r'\.tar\b', r'\.tar\.gz\b', r'\.bak\b', r'\.old\b', r'\.backup\b', | |
r'passwd', r'wp-login', | |
] | |
critical_patterns = [re.compile(pat, re.I) for pat in critical_patterns] | |
suspicious_patterns = [re.compile(pat, re.I) for pat in suspicious_patterns] | |
filter_out_request_urls_patterns = [ | |
# your original patterns | |
r'^/cdn-cgi/rum(?:\?.*)?$', | |
r'^/css/(.*)$', | |
r'^/js/(.*)$', | |
r'^/images/(.*)$', | |
r'^/fonts/(.*)$', | |
r'\.css$', | |
r'\.js$', | |
r'\.ico$', | |
r'\.png$', | |
r'\.jpg$', | |
r'\.jpeg$', | |
r'\.gif$', | |
r'\.svg$', | |
# additional patterns | |
r'^/favicon\.ico$', | |
r'^/robots\.txt$', | |
r'^/sitemap\.xml$', | |
r'\.map$', | |
r'\.woff2?$', | |
r'\.ttf$', | |
r'\.eot$', | |
r'\.webp$', | |
r'\.avif$', | |
r'\.mp4$', | |
r'\.webm$', | |
r'\.ogg$', | |
r'\.json$', | |
r'\.xml$', | |
r'\.txt$', | |
r'^/static/.*$', | |
r'^/assets/.*$', | |
r'^/_next/.*$', | |
r'^/vendor/.*$', | |
r'^/health(?:check)?$', | |
r'^/metrics$', | |
] | |
# compile all | |
filter_out_request_urls_patterns = [ | |
re.compile(pat, re.I) | |
for pat in filter_out_request_urls_patterns | |
] | |
startupinfo = subprocess.STARTUPINFO() | |
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW | |
def get_site_mapping(): | |
mapping = {} | |
appcmd = r"C:\Windows\System32\inetsrv\appcmd.exe" | |
output = subprocess.check_output( | |
[appcmd, 'list', 'site'], | |
startupinfo=startupinfo, | |
encoding='utf-8' | |
) | |
for line in output.splitlines(): | |
m = re.search(r'id:(\d+),bindings:(.*?),state:', line) | |
if not m: | |
continue | |
sid = int(m.group(1)) | |
bindings = m.group(2) | |
http_binding = None | |
for bind in bindings.split(','): | |
if bind.startswith('http/'): | |
parts = bind.split(':') | |
if len(parts) >= 2: | |
http_binding = parts[-1] | |
break | |
mapping[sid] = http_binding if http_binding else f'site_{sid}' | |
return mapping | |
def parse_logs(log_dir, window_hours, recent_count=13): | |
now = datetime.now() | |
window_start = now - timedelta(hours=window_hours) | |
site_map = get_site_mapping() | |
stats = {} | |
recent_ip_totals = defaultdict(int) | |
evil_ips = defaultdict(list) | |
for sid, name in site_map.items(): | |
#for folder in glob.glob(os.path.join(log_dir, 'W3SVC*')): | |
folder = os.path.join(log_dir, f'W3SVC{sid}') | |
sid_str = os.path.basename(folder).replace('W3SVC', '') | |
if not sid_str.isdigit(): | |
continue | |
sid = int(sid_str) | |
try: | |
name = site_map[sid] | |
views = 0 | |
errors = 0 | |
recent = deque(maxlen=recent_count) | |
for logfile in glob.glob(os.path.join(folder, '*.log')): | |
with open(logfile, 'r', encoding='utf-8', errors='ignore') as f: | |
fields = [] | |
idx_date = idx_time = idx_status = idx_ip = idx_url = None | |
for line in f: | |
if line.startswith('#Fields:'): | |
parts = line.strip().split()[1:] | |
fields = parts | |
if 'date' in parts and 'time' in parts and 'sc-status' in parts: | |
idx_date = parts.index('date') | |
idx_time = parts.index('time') | |
idx_status = parts.index('sc-status') | |
if 'c-ip' in parts: | |
idx_ip = parts.index('c-ip') | |
if 'cs-uri-stem' in parts: | |
idx_url = parts.index('cs-uri-stem') | |
continue | |
if not fields or line.startswith('#'): | |
continue | |
cols = line.split() | |
try: | |
t = datetime.strptime(cols[idx_date] + ' ' + cols[idx_time], '%Y-%m-%d %H:%M:%S') | |
except Exception: | |
continue | |
if t < window_start: | |
continue | |
views += 1 | |
try: | |
if int(cols[idx_status]) >= 400: | |
errors += 1 | |
except Exception: | |
pass | |
ip = cols[idx_ip] if idx_ip is not None else 'N/A' | |
url = cols[idx_url] if idx_url is not None else 'N/A' | |
recent_ip_totals[ip] += 1 | |
blocked_url = False | |
for fo_p in filter_out_request_urls_patterns: | |
if fo_p.search(url): | |
blocked_url = True | |
break | |
if blocked_url: | |
continue | |
recent.append((ip, url)) | |
for pat in critical_patterns: | |
if pat.search(url): | |
evil_ips[ip].append(('critical', url)) | |
break | |
else: | |
for pat in suspicious_patterns: | |
if pat.search(url): | |
evil_ips[ip].append(('suspicious', url)) | |
break | |
except Exception as e: | |
print('error parsing log',e) | |
stats[name] = (views, errors, list(recent)) | |
evil_ips_deduped = {} | |
for ip, entries in evil_ips.items(): | |
if entries: | |
# sort so all criticals come first, then by URL | |
deduped = sorted( | |
{(sev, url) for sev, url in entries}, | |
key=lambda x: (0 if x[0]=='critical' else 1, x[1]) | |
) | |
evil_ips_deduped[ip] = deduped | |
return stats, recent_ip_totals, evil_ips_deduped | |
def render_wallpaper(stats, recent_ips_totals, evil_ips, window_hours, output_path): | |
width, height = 1920, 1080 | |
bg = (30, 30, 30) | |
fg = (230, 230, 230) | |
fg_disabled = (120, 120, 120) | |
errc = (200, 80, 80) | |
rec_c = (150, 150, 150) | |
evil_c = (255, 60, 60) | |
img = Image.new('RGB', (width, height), bg) | |
draw = ImageDraw.Draw(img) | |
try: | |
f_title = ImageFont.truetype('SegoeUI.ttf', 48) | |
f_head = ImageFont.truetype('SegoeUI.ttf', 36) | |
f_cell = ImageFont.truetype('SegoeUI.ttf', 32) | |
f_small = ImageFont.truetype('SegoeUI.ttf', 24) | |
except IOError: | |
f_title = f_head = f_cell = f_small = ImageFont.load_default() | |
title = f'IIS Stats (last {window_hours}h)' | |
b = draw.textbbox((0, 0), title, font=f_title) | |
draw.text(((width - (b[2] - b[0])) / 2, 40), title, fill=fg, font=f_title) | |
px = 100 | |
y = 120 | |
site_names = list(stats.keys()) or ['Site'] | |
max_site_w = max((draw.textbbox((0,0), n, font=f_cell)[2] - draw.textbbox((0,0), n, font=f_cell)[0]) for n in site_names) | |
col1 = px | |
col2 = col1 + max_site_w + 80 | |
col3 = col2 + draw.textbbox((0,0), 'Views', font=f_head)[2] + 80 | |
col5 = col3 + 500 | |
for text, x in [('Site', col1), ('Views', col2), ('Errors', col3)]: | |
draw.text((x, y), text, fill=fg, font=f_head) | |
draw.text((px, y + 40), 'Recent (IP -> URL)', fill=rec_c, font=f_head) | |
y += (draw.textbbox((0,0), 'Hg', font=f_head)[3] - draw.textbbox((0,0), 'Hg', font=f_head)[1]) + 60 | |
for site, (v, e, recs) in sorted(stats.items()): | |
draw_color = fg | |
if not recs: | |
draw_color = fg_disabled | |
draw.text((col1, y), site, fill=draw_color, font=f_cell) | |
draw.text((col2, y), str(v), fill=draw_color, font=f_cell) | |
draw.text((col3, y), str(e), fill=errc, font=f_cell) | |
y += (draw.textbbox((0,0), 'Hg', font=f_cell)[3] - draw.textbbox((0,0), 'Hg', font=f_cell)[1]) + 8 | |
# Group recs by IP | |
ip_to_urls = {} | |
for ip, url in reversed(recs): | |
ip_to_urls.setdefault(ip, []).append(url) | |
for ip, urls in ip_to_urls.items(): | |
draw.text((col1 + 40, y), ip, fill=rec_c, font=f_small) | |
y += (draw.textbbox((0,0), 'Hg', font=f_small)[3] - draw.textbbox((0,0), 'Hg', font=f_small)[1]) + 2 | |
for url in urls: | |
draw.text((col1 + 80, y), f'-> {url}', fill=rec_c, font=f_small) | |
y += (draw.textbbox((0,0), 'Hg', font=f_small)[3] - draw.textbbox((0,0), 'Hg', font=f_small)[1]) + 2 | |
y += 10 | |
y = 120 | |
draw.text((col5, y), 'Recent IPs:', fill=fg, font=f_head) | |
y += (draw.textbbox((0,0), 'Hg', font=f_head)[3] - draw.textbbox((0,0), 'Hg', font=f_head)[1]) + 8 | |
for ip, total in recent_ips_totals.items(): | |
draw.text((col5, y), f'{ip}: {total}', fill=fg, font=f_cell) | |
y += (draw.textbbox((0,0), 'Hg', font=f_cell)[3] - draw.textbbox((0,0), 'Hg', font=f_cell)[1]) + 8 | |
y += 40 | |
draw.text((col5, y), 'Suspicious IPs:', fill=evil_c, font=f_head) | |
y += (draw.textbbox((0,0), 'Hg', font=f_head)[3] - draw.textbbox((0,0), 'Hg', font=f_head)[1]) + 8 | |
# 1) Group IPs: any with critical hits first, then by total hits | |
def severity_sort_key(item): | |
ip, entries = item | |
has_crit = any(sev=='critical' for sev, _ in entries) | |
return (not has_crit, -len(entries), ip) # now deterministic | |
# 2) Sort the URLs within each IP so critical URLs render before suspicious | |
for ip, entries in sorted(evil_ips.items(), key=severity_sort_key): | |
draw.text((col5, y), f'{ip} ({len(entries)})', fill=evil_c, font=f_cell) | |
y += (draw.textbbox((0,0), 'Hg', font=f_cell)[3] - draw.textbbox((0,0), 'Hg', font=f_cell)[1])+2 | |
for sev, url in entries: | |
color = evil_c if sev == 'critical' else (255, 165, 0) | |
draw.text((col5 + 40, y), f'-> {url}', fill=color, font=f_small) | |
y += (draw.textbbox((0,0), 'Hg', font=f_cell)[3] - draw.textbbox((0,0), 'Hg', font=f_cell)[1]) + 2 | |
y += 8 | |
img.save(output_path) | |
def set_wallpaper(path): | |
ctypes.windll.user32.SystemParametersInfoW(SPI_SETDESKWALLPAPER, 0, path, | |
SPIF_UPDATEINIFILE | SPIF_SENDCHANGE) | |
def main(): | |
p = argparse.ArgumentParser(description='Update Windows wallpaper with IIS stats') | |
p.add_argument('--log-dir', default=r'C:\inetpub\logs\LogFiles', help='IIS log root') | |
p.add_argument('--hours', type=int, default=6, help='Lookback hours') | |
p.add_argument('--output', default=r'C:\Windows\Temp\iis_wallpaper.png', help='Output image') | |
args = p.parse_args() | |
stats, recent_ips_totals, evil_ips = parse_logs(args.log_dir, args.hours) | |
render_wallpaper(stats, recent_ips_totals, evil_ips, args.hours, args.output) | |
set_wallpaper(args.output) | |
if __name__ == '__main__': | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment