2020-06-16 22:20:26 +02:00
|
|
|
import asyncio
|
|
|
|
import socket
|
2020-06-15 12:01:11 +02:00
|
|
|
import time
|
2020-06-15 12:10:15 +02:00
|
|
|
from collections import namedtuple
|
2020-06-15 12:01:11 +02:00
|
|
|
|
2020-06-16 22:20:26 +02:00
|
|
|
import aiohttp
|
2020-06-14 18:45:58 +02:00
|
|
|
import psutil
|
2020-06-16 22:20:26 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2020-06-14 12:08:11 +02:00
|
|
|
from django.views.generic import TemplateView
|
2020-06-15 12:10:15 +02:00
|
|
|
from humanize import naturalsize
|
2020-06-14 12:08:11 +02:00
|
|
|
|
2020-06-20 19:32:10 +02:00
|
|
|
from sdbs_infra.dashboard.models import Service, ServiceStatus, Link
|
2020-06-14 12:08:11 +02:00
|
|
|
|
|
|
|
|
|
|
|
class IndexView(TemplateView):
|
|
|
|
template_name = "index.html"
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
2020-06-20 19:32:10 +02:00
|
|
|
|
2020-06-14 12:08:11 +02:00
|
|
|
return {
|
2020-06-20 19:32:10 +02:00
|
|
|
'links': asyncio.run(self.process_links(list(Link.objects.all()))),
|
2020-06-16 22:20:26 +02:00
|
|
|
'services': asyncio.run(self.process_services(list(Service.objects.all()))),
|
2020-06-14 19:51:35 +02:00
|
|
|
'vps_stats': self.vps_stats()
|
2020-06-14 12:08:11 +02:00
|
|
|
}
|
2020-06-14 19:51:35 +02:00
|
|
|
|
2020-06-20 19:32:10 +02:00
|
|
|
async def process_links(self, links):
|
|
|
|
result = []
|
|
|
|
|
|
|
|
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=5, sock_connect=1))
|
|
|
|
|
|
|
|
for link in links:
|
|
|
|
index_text = None, None
|
|
|
|
if not link.image:
|
|
|
|
try:
|
|
|
|
async with session.get(link.url) as response:
|
|
|
|
index_status, index_text = response.status, await response.text()
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
image = link.image.url if link.image else self.extract_favicon(link.url, index_text)
|
|
|
|
|
|
|
|
result.append({
|
|
|
|
'image_url': image,
|
|
|
|
**vars(link)
|
|
|
|
})
|
|
|
|
|
|
|
|
await session.close()
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def process_services(self, services):
|
2020-06-16 22:20:26 +02:00
|
|
|
result = []
|
|
|
|
|
|
|
|
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=5, sock_connect=1))
|
|
|
|
|
|
|
|
for service in services:
|
|
|
|
index_status, index_text = None, None
|
|
|
|
if not service.port or not service.image:
|
|
|
|
try:
|
|
|
|
async with session.get(service.url) as response:
|
|
|
|
index_status, index_text = response.status, await response.text()
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if service.port:
|
|
|
|
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
location = ("localhost", service.port)
|
|
|
|
result_of_check = a_socket.connect_ex(location)
|
|
|
|
if result_of_check == 0:
|
|
|
|
status = ServiceStatus.OK
|
|
|
|
else:
|
|
|
|
status = ServiceStatus.DOWN
|
|
|
|
elif index_status:
|
|
|
|
status = ServiceStatus.OK if index_status == 200 else ServiceStatus.DOWN
|
|
|
|
else:
|
|
|
|
status = ServiceStatus.UNKNOWN
|
|
|
|
|
2020-06-20 19:32:10 +02:00
|
|
|
image = service.image.url if service.image else self.extract_favicon(service.url, index_text)
|
2020-06-16 22:20:26 +02:00
|
|
|
|
|
|
|
result.append({
|
|
|
|
'status': status.value,
|
|
|
|
'image_url': image,
|
|
|
|
**vars(service)
|
|
|
|
})
|
|
|
|
|
|
|
|
await session.close()
|
|
|
|
return result
|
|
|
|
|
2020-06-20 19:32:10 +02:00
|
|
|
@staticmethod
|
|
|
|
def extract_favicon(url, index_text):
|
|
|
|
if not index_text:
|
|
|
|
return None
|
|
|
|
|
|
|
|
parsed_html = BeautifulSoup(index_text, features="html.parser")
|
|
|
|
link_tags = parsed_html.find_all('link')
|
|
|
|
for rel in ['apple-touch-icon', 'shortcut', 'icon']:
|
|
|
|
for link_tag in link_tags:
|
|
|
|
if rel in link_tag.attrs['rel']:
|
|
|
|
href = link_tag.attrs['href']
|
|
|
|
if url not in href:
|
|
|
|
image = url + (href if href.startswith("/") else f"/{href}")
|
|
|
|
else:
|
|
|
|
image = href
|
|
|
|
return image
|
|
|
|
|
2020-06-14 19:51:35 +02:00
|
|
|
# noinspection PyListCreation
|
|
|
|
@staticmethod
|
|
|
|
def vps_stats():
|
|
|
|
stats = []
|
|
|
|
|
|
|
|
stats.append(f"<em>LOAD AVG:</em> {', '.join(map(str, psutil.getloadavg()))}")
|
|
|
|
|
|
|
|
memory = psutil.virtual_memory()
|
|
|
|
stats.append(
|
|
|
|
f"<em>MEM:</em> {naturalsize(memory.used)}/{naturalsize(memory.total)} ({memory.percent}% USED)"
|
|
|
|
)
|
|
|
|
|
|
|
|
disk = psutil.disk_usage('/')
|
|
|
|
stats.append(
|
|
|
|
f"<em>DISK:</em> {naturalsize(disk.used)}/{naturalsize(disk.total)} ({disk.percent}% USED)"
|
|
|
|
)
|
|
|
|
|
2020-06-15 12:10:15 +02:00
|
|
|
uptime = normalize_seconds(time.time() - psutil.boot_time())
|
|
|
|
|
2020-06-15 12:01:11 +02:00
|
|
|
stats.append(
|
2020-06-15 12:10:15 +02:00
|
|
|
f"<em>UPTIME:</em> {int(uptime.days)} days, {int(uptime.hours)} hours, {int(uptime.minutes)} minutes"
|
2020-06-15 12:01:11 +02:00
|
|
|
)
|
|
|
|
|
2020-06-15 12:10:15 +02:00
|
|
|
return " / ".join(map(lambda stat: stat.replace(" ", " "), stats))
|
|
|
|
|
|
|
|
|
|
|
|
def normalize_seconds(seconds: int):
|
|
|
|
(days, remainder) = divmod(seconds, 86400)
|
|
|
|
(hours, remainder) = divmod(remainder, 3600)
|
|
|
|
(minutes, seconds) = divmod(remainder, 60)
|
|
|
|
|
|
|
|
return namedtuple("_", ("days", "hours", "minutes", "seconds"))(days, hours, minutes, seconds)
|