sdbs-infra/sdbs_infra/dashboard/views.py

222 lines
7.6 KiB
Python
Raw Normal View History

import asyncio
2020-07-20 11:16:03 +02:00
import logging
import socket
2020-06-15 12:01:11 +02:00
import time
from collections import namedtuple
2020-07-15 03:56:01 +02:00
from datetime import datetime
2020-07-15 13:50:52 +02:00
from operator import itemgetter
from urllib.parse import urlparse
2020-06-15 12:01:11 +02:00
import aiohttp
2020-07-15 13:50:52 +02:00
import feedparser
import psutil
from bs4 import BeautifulSoup
2020-06-14 12:08:11 +02:00
from django.views.generic import TemplateView
from humanize import naturalsize
2020-06-14 12:08:11 +02:00
2020-07-15 03:56:01 +02:00
from sdbs_infra import settings
2020-07-15 13:50:52 +02:00
from sdbs_infra.dashboard.models import Service, Status, Link, Machine, Feed
2020-06-14 12:08:11 +02:00
2020-07-20 11:16:03 +02:00
logger = logging.getLogger(__name__)
2020-06-14 12:08:11 +02:00
class IndexView(TemplateView):
template_name = "index.html"
SERVICE_TIMEOUT = 2
REQ_TIMEOUT = 5
2020-06-14 12:08:11 +02:00
def get_context_data(self, **kwargs):
2020-07-15 14:00:55 +02:00
return {
'vps_stats': self.vps_stats(),
**asyncio.run(self.process_all(list(Link.objects.all()),
list(Service.objects.all()),
list(Machine.objects.all()),
list(Feed.objects.all())))
}
2020-06-20 19:32:10 +02:00
2020-07-15 14:00:55 +02:00
async def process_all(self, links, services, machines, feeds):
2020-09-15 20:54:08 +02:00
links, services, machines, feeds = await asyncio.gather(
self.process_links(links), self.process_services(services),
self.process_machines(machines), self.process_feeds(feeds))
2020-06-14 12:08:11 +02:00
return {
2020-09-15 20:54:08 +02:00
'links': links,
'services': services,
'machines': machines,
'feed_items': feeds,
2020-06-14 12:08:11 +02:00
}
2020-06-14 19:51:35 +02:00
2020-06-20 19:32:10 +02:00
async def process_links(self, links):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self.REQ_TIMEOUT, sock_connect=1))
2020-06-20 19:32:10 +02:00
for link in links:
2020-07-15 03:28:03 +02:00
index_text = None
2020-06-20 19:32:10 +02:00
if not link.image:
try:
async with session.get(link.url) as response:
index_status, index_text = response.status, await response.text()
2020-07-20 11:16:03 +02:00
except Exception as exc:
logger.exception(exc)
2020-06-20 19:32:10 +02:00
image = link.image.url if link.image else self.extract_favicon(link.url, index_text)
result.append({
'image_url': image,
**vars(link)
})
await session.close()
return result
async def process_services(self, services):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self.SERVICE_TIMEOUT, sock_connect=1))
for service in services:
index_status, index_text = None, None
if not service.port or not service.image:
try:
async with session.get(service.url) as response:
index_status, index_text = response.status, await response.text()
2020-07-20 11:16:03 +02:00
except Exception as exc:
logging.exception(exc)
if service.port:
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
location = ("localhost", service.port)
result_of_check = a_socket.connect_ex(location)
if result_of_check == 0:
2020-07-15 03:56:01 +02:00
status = Status.OK
else:
2020-07-15 03:56:01 +02:00
status = Status.DOWN
elif index_status:
2020-07-15 03:56:01 +02:00
status = Status.OK if index_status == 200 else Status.DOWN
else:
2020-07-15 03:56:01 +02:00
status = Status.UNKNOWN
2020-06-20 19:32:10 +02:00
image = service.image.url if service.image else self.extract_favicon(service.url, index_text)
result.append({
'status': status.value,
'image_url': image,
**vars(service)
})
await session.close()
return result
2020-07-15 03:56:01 +02:00
async def process_machines(self, machines):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self.REQ_TIMEOUT, sock_connect=1), headers={
2020-07-15 03:56:01 +02:00
'X-Api-Key': settings.HEALTCHECKS_API_KEY
})
for machine in machines:
status = Status.UNKNOWN
last_ping = None
if settings.HEALTCHECKS_API_KEY and machine.healthcheck_id:
try:
async with session.get(
f"https://healthchecks.io/api/v1/checks/{machine.healthcheck_id}") as response:
check = await response.json()
status = {
'up': Status.OK,
'down': Status.DOWN
}.get(check.get('status'), Status.UNKNOWN)
last_ping = datetime.fromisoformat(check.get('last_ping'))
2020-07-20 11:16:03 +02:00
except Exception as exc:
logger.exception(exc)
2020-07-15 03:56:01 +02:00
result.append({
'status': status.value,
'last_ping': last_ping,
**vars(machine)
})
return result
2020-07-15 13:50:52 +02:00
async def process_feeds(self, feeds):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self.REQ_TIMEOUT, sock_connect=1))
2020-07-15 13:50:52 +02:00
for feed in feeds:
try:
async with session.get(feed.url) as response:
parsed_feed = feedparser.parse(await response.text())
entries = parsed_feed.entries
for entry in entries:
entry.published_datetime = datetime(*entry.published_parsed[0:6])
result.extend(parsed_feed.entries)
2020-07-20 11:16:03 +02:00
except Exception as exc:
logger.exception(exc)
2020-07-15 13:50:52 +02:00
continue
result.sort(key=itemgetter('published_parsed'), reverse=True)
return result
2020-06-20 19:32:10 +02:00
@staticmethod
def extract_favicon(url, index_text):
if not index_text:
return None
scheme, netloc, *_ = urlparse(url)
base_url = (f"{scheme}://" if scheme else "") + netloc
2020-06-20 19:32:10 +02:00
parsed_html = BeautifulSoup(index_text, features="html.parser")
link_tags = parsed_html.find_all('link')
for rel in ['apple-touch-icon', 'shortcut', 'icon']:
for link_tag in link_tags:
if rel in link_tag.attrs['rel']:
href = link_tag.attrs['href']
2020-06-20 19:42:47 +02:00
if netloc not in href and not (href.startswith("//") or href.startswith("http")):
image = base_url + (href if href.startswith("/") else f"/{href}")
2020-06-20 19:32:10 +02:00
else:
image = href
return image
2020-06-14 19:51:35 +02:00
# noinspection PyListCreation
@staticmethod
def vps_stats():
stats = []
stats.append(f"<em>LOAD AVG:</em> {', '.join(map(str, psutil.getloadavg()))}")
memory = psutil.virtual_memory()
stats.append(
f"<em>MEM:</em> {naturalsize(memory.used)}/{naturalsize(memory.total)} ({memory.percent}% USED)"
)
disk = psutil.disk_usage('/')
stats.append(
f"<em>DISK:</em> {naturalsize(disk.used)}/{naturalsize(disk.total)} ({disk.percent}% USED)"
)
2020-11-01 15:00:43 +01:00
disk = psutil.disk_usage('/mnt/nas')
stats.append(
f"<em>NAS:</em> {naturalsize(disk.used)}/{naturalsize(disk.total)} ({disk.percent}% USED)"
)
2020-06-14 19:51:35 +02:00
uptime = normalize_seconds(time.time() - psutil.boot_time())
2020-06-15 12:01:11 +02:00
stats.append(
f"<em>UPTIME:</em> {int(uptime.days)} days, {int(uptime.hours)} hours, {int(uptime.minutes)} minutes"
2020-06-15 12:01:11 +02:00
)
return " / ".join(map(lambda stat: stat.replace(" ", "&nbsp;"), stats))
def normalize_seconds(seconds: int):
(days, remainder) = divmod(seconds, 86400)
(hours, remainder) = divmod(remainder, 3600)
(minutes, seconds) = divmod(remainder, 60)
return namedtuple("_", ("days", "hours", "minutes", "seconds"))(days, hours, minutes, seconds)