sdbs-infra/sdbs_infra/dashboard/views.py

177 lines
6.1 KiB
Python
Raw Normal View History

import asyncio
import socket
2020-06-15 12:01:11 +02:00
import time
from collections import namedtuple
2020-07-15 03:56:01 +02:00
from datetime import datetime
from urllib.parse import urlparse
2020-06-15 12:01:11 +02:00
import aiohttp
import psutil
2020-07-15 03:28:03 +02:00
from aiohttp import ClientConnectorError
from bs4 import BeautifulSoup
2020-06-14 12:08:11 +02:00
from django.views.generic import TemplateView
from humanize import naturalsize
2020-06-14 12:08:11 +02:00
2020-07-15 03:56:01 +02:00
from sdbs_infra import settings
from sdbs_infra.dashboard.models import Service, Status, Link, Machine
2020-06-14 12:08:11 +02:00
class IndexView(TemplateView):
template_name = "index.html"
def get_context_data(self, **kwargs):
2020-06-20 19:32:10 +02:00
2020-06-14 12:08:11 +02:00
return {
2020-06-20 19:32:10 +02:00
'links': asyncio.run(self.process_links(list(Link.objects.all()))),
'services': asyncio.run(self.process_services(list(Service.objects.all()))),
2020-07-15 03:56:01 +02:00
'machines': asyncio.run(self.process_machines(list(Machine.objects.all()))),
2020-06-14 19:51:35 +02:00
'vps_stats': self.vps_stats()
2020-06-14 12:08:11 +02:00
}
2020-06-14 19:51:35 +02:00
2020-06-20 19:32:10 +02:00
async def process_links(self, links):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=5, sock_connect=1))
for link in links:
2020-07-15 03:28:03 +02:00
index_text = None
2020-06-20 19:32:10 +02:00
if not link.image:
try:
async with session.get(link.url) as response:
index_status, index_text = response.status, await response.text()
2020-07-15 03:28:03 +02:00
except (asyncio.TimeoutError, ClientConnectorError):
2020-06-20 19:32:10 +02:00
pass
image = link.image.url if link.image else self.extract_favicon(link.url, index_text)
result.append({
'image_url': image,
**vars(link)
})
await session.close()
return result
async def process_services(self, services):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=5, sock_connect=1))
for service in services:
index_status, index_text = None, None
if not service.port or not service.image:
try:
async with session.get(service.url) as response:
index_status, index_text = response.status, await response.text()
2020-07-15 03:28:03 +02:00
except (asyncio.TimeoutError, ClientConnectorError):
pass
if service.port:
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
location = ("localhost", service.port)
result_of_check = a_socket.connect_ex(location)
if result_of_check == 0:
2020-07-15 03:56:01 +02:00
status = Status.OK
else:
2020-07-15 03:56:01 +02:00
status = Status.DOWN
elif index_status:
2020-07-15 03:56:01 +02:00
status = Status.OK if index_status == 200 else Status.DOWN
else:
2020-07-15 03:56:01 +02:00
status = Status.UNKNOWN
2020-06-20 19:32:10 +02:00
image = service.image.url if service.image else self.extract_favicon(service.url, index_text)
result.append({
'status': status.value,
'image_url': image,
**vars(service)
})
await session.close()
return result
2020-07-15 03:56:01 +02:00
async def process_machines(self, machines):
result = []
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=5, sock_connect=1), headers={
'X-Api-Key': settings.HEALTCHECKS_API_KEY
})
for machine in machines:
status = Status.UNKNOWN
last_ping = None
if settings.HEALTCHECKS_API_KEY and machine.healthcheck_id:
try:
async with session.get(
f"https://healthchecks.io/api/v1/checks/{machine.healthcheck_id}") as response:
check = await response.json()
status = {
'up': Status.OK,
'down': Status.DOWN
}.get(check.get('status'), Status.UNKNOWN)
last_ping = datetime.fromisoformat(check.get('last_ping'))
except (asyncio.TimeoutError, ClientConnectorError):
pass
result.append({
'status': status.value,
'last_ping': last_ping,
**vars(machine)
})
return result
2020-06-20 19:32:10 +02:00
@staticmethod
def extract_favicon(url, index_text):
if not index_text:
return None
scheme, netloc, *_ = urlparse(url)
base_url = (f"{scheme}://" if scheme else "") + netloc
2020-06-20 19:32:10 +02:00
parsed_html = BeautifulSoup(index_text, features="html.parser")
link_tags = parsed_html.find_all('link')
for rel in ['apple-touch-icon', 'shortcut', 'icon']:
for link_tag in link_tags:
if rel in link_tag.attrs['rel']:
href = link_tag.attrs['href']
2020-06-20 19:42:47 +02:00
if netloc not in href and not (href.startswith("//") or href.startswith("http")):
image = base_url + (href if href.startswith("/") else f"/{href}")
2020-06-20 19:32:10 +02:00
else:
image = href
return image
2020-06-14 19:51:35 +02:00
# noinspection PyListCreation
@staticmethod
def vps_stats():
stats = []
stats.append(f"<em>LOAD AVG:</em> {', '.join(map(str, psutil.getloadavg()))}")
memory = psutil.virtual_memory()
stats.append(
f"<em>MEM:</em> {naturalsize(memory.used)}/{naturalsize(memory.total)} ({memory.percent}% USED)"
)
disk = psutil.disk_usage('/')
stats.append(
f"<em>DISK:</em> {naturalsize(disk.used)}/{naturalsize(disk.total)} ({disk.percent}% USED)"
)
uptime = normalize_seconds(time.time() - psutil.boot_time())
2020-06-15 12:01:11 +02:00
stats.append(
f"<em>UPTIME:</em> {int(uptime.days)} days, {int(uptime.hours)} hours, {int(uptime.minutes)} minutes"
2020-06-15 12:01:11 +02:00
)
return " / ".join(map(lambda stat: stat.replace(" ", "&nbsp;"), stats))
def normalize_seconds(seconds: int):
(days, remainder) = divmod(seconds, 86400)
(hours, remainder) = divmod(remainder, 3600)
(minutes, seconds) = divmod(remainder, 60)
return namedtuple("_", ("days", "hours", "minutes", "seconds"))(days, hours, minutes, seconds)