Merge pull request #12 from ctih1/main
This commit is contained in:
commit
6004633966
7 changed files with 152 additions and 62 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -12,3 +12,4 @@ received_memory.json
|
|||
translation_report.txt
|
||||
translationcompleteness.py
|
||||
modules/volta
|
||||
log.txt
|
|
@ -60,7 +60,7 @@ class GooberWeb(commands.Cog):
|
|||
try:
|
||||
user = await self.bot.fetch_user(int(user_id))
|
||||
blacklisted_users.append({
|
||||
"name": f"{user.name}#{user.discriminator}",
|
||||
"name": f"{user.name}",
|
||||
"avatar_url": str(user.avatar.url) if user.avatar else str(user.default_avatar.url),
|
||||
"id": user.id
|
||||
})
|
||||
|
@ -158,7 +158,7 @@ class GooberWeb(commands.Cog):
|
|||
self._update_command_stats(command.name, interaction.user)
|
||||
|
||||
def _update_command_stats(self, command_name, user):
|
||||
self.last_command = f"{command_name} (by {user.name}#{user.discriminator})"
|
||||
self.last_command = f"{command_name} (by {user.name})"
|
||||
self.last_command_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
if self.websockets:
|
||||
asyncio.create_task(self.update_clients())
|
||||
|
@ -380,7 +380,7 @@ class GooberWeb(commands.Cog):
|
|||
if owner_id:
|
||||
try:
|
||||
owner = await self.bot.fetch_user(int(owner_id))
|
||||
owner_username = f"{owner.name}#{owner.discriminator}"
|
||||
owner_username = f"{owner.name}"
|
||||
owner_pfp = str(owner.avatar.url) if owner and owner.avatar else ""
|
||||
except:
|
||||
pass
|
||||
|
|
57
bot.py
57
bot.py
|
@ -11,9 +11,32 @@ import uuid
|
|||
import asyncio
|
||||
import sys
|
||||
from typing import List, Dict, Set, Optional, Tuple, Any, Union, Callable, Coroutine, TypeVar, Type
|
||||
|
||||
import logging
|
||||
from modules.globalvars import *
|
||||
from modules.prestartchecks import start_checks
|
||||
from modules.logger import GooberFormatter
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("goober")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
console_handler.setFormatter(GooberFormatter())
|
||||
|
||||
file_handler = logging.FileHandler("log.txt", mode="w+", encoding="UTF-8")
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(GooberFormatter(colors=False))
|
||||
|
||||
logger.addHandler(console_handler)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
logger.debug("Testing logging")
|
||||
logger.info("Testing logging")
|
||||
logger.warning("Testing logging")
|
||||
logger.error("Testing logging")
|
||||
logger.critical("Testing logging")
|
||||
|
||||
|
||||
# Print splash text and check for updates
|
||||
print(splashtext) # Print splash text (from modules/globalvars.py)
|
||||
|
@ -65,7 +88,7 @@ bot: commands.Bot = commands.Bot(
|
|||
memory: List[str] = load_memory()
|
||||
markov_model: Optional[markovify.Text] = load_markov_model()
|
||||
if not markov_model:
|
||||
print(f"{RED}{(_('markov_model_not_found'))}{RESET}")
|
||||
logger.error(_('markov_model_not_found'))
|
||||
memory = load_memory()
|
||||
markov_model = train_markov_model(memory)
|
||||
|
||||
|
@ -79,9 +102,9 @@ async def load_cogs_from_folder(bot, folder_name="assets/cogs"):
|
|||
module_path = folder_name.replace("/", ".").replace("\\", ".") + f".{cog_name}"
|
||||
try:
|
||||
await bot.load_extension(module_path)
|
||||
print(f"{GREEN}{(_('loaded_cog'))} {cog_name}{RESET}")
|
||||
logger.info(f"{(_('loaded_cog'))} {cog_name}")
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('cog_fail'))} {cog_name} {e}{RESET}")
|
||||
logger.error(f"{(_('cog_fail'))} {cog_name} {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
async def fetch_active_users() -> str:
|
||||
|
@ -92,7 +115,7 @@ async def fetch_active_users() -> str:
|
|||
else:
|
||||
return "?"
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('error_fetching_active_users'))}{RESET} {e}")
|
||||
logger.e(f"{_('error_fetching_active_users')} {RESET} {e}")
|
||||
return "?"
|
||||
|
||||
async def send_alive_ping_periodically() -> None:
|
||||
|
@ -100,7 +123,7 @@ async def send_alive_ping_periodically() -> None:
|
|||
try:
|
||||
requests.post(f"{VERSION_URL}/aliveping", json={"name": NAME})
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('error_sending_alive_ping'))}{RESET} {e}")
|
||||
logger.error(f"{(_('error_sending_alive_ping'))}{RESET} {e}")
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# Event: Called when the bot is ready
|
||||
|
@ -117,21 +140,21 @@ async def on_ready() -> None:
|
|||
await load_cogs_from_folder(bot)
|
||||
try:
|
||||
synced: List[discord.app_commands.AppCommand] = await bot.tree.sync()
|
||||
print(f"{GREEN}{_('synced_commands')} {len(synced)} {(_('synced_commands2'))} {RESET}")
|
||||
logger.info(f"{_('synced_commands')} {len(synced)} {(_('synced_commands2'))}")
|
||||
slash_commands_enabled = True
|
||||
ping_server() # ping_server from modules/central.py
|
||||
|
||||
active_users: str = await fetch_active_users()
|
||||
print(f"{GREEN}{(_('active_users:'))} {active_users}{RESET}")
|
||||
print(f"{GREEN}{(_('started')).format(name=NAME)}{RESET}")
|
||||
logger.info(f"{(_('active_users:'))} {active_users}")
|
||||
logger.info(f"{(_('started')).format(name=NAME)}")
|
||||
|
||||
bot.loop.create_task(send_alive_ping_periodically())
|
||||
except discord.errors.Forbidden as perm_error:
|
||||
print(f"{RED}Permission error while syncing commands: {perm_error}{RESET}")
|
||||
print(f"{RED}Make sure the bot has the 'applications.commands' scope and is invited with the correct permissions.{RESET}")
|
||||
logger.error(f"Permission error while syncing commands: {perm_error}")
|
||||
logger.error("Make sure the bot has the 'applications.commands' scope and is invited with the correct permissions.")
|
||||
quit()
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('fail_commands_sync'))} {e}{RESET}")
|
||||
logger.error(f"{_('fail_commands_sync')} {e}")
|
||||
traceback.print_exc()
|
||||
quit()
|
||||
|
||||
|
@ -219,7 +242,7 @@ async def talk(ctx: commands.Context, sentence_size: int = 5) -> None:
|
|||
combined_message: str = f"{coherent_response}\n[jif]({gif_url})"
|
||||
else:
|
||||
combined_message: str = coherent_response
|
||||
print(combined_message)
|
||||
logger.info(combined_message)
|
||||
os.environ['gooberlatestgen'] = combined_message
|
||||
await send_message(ctx, combined_message)
|
||||
else:
|
||||
|
@ -375,7 +398,7 @@ async def on_message(message: discord.Message) -> None:
|
|||
return
|
||||
|
||||
if message.content.startswith((f"{PREFIX}talk", f"{PREFIX}mem", f"{PREFIX}help", f"{PREFIX}stats", f"{PREFIX}")):
|
||||
print(f"{(_('command_ran')).format(message=message)}")
|
||||
logger.info(f"{(_('command_ran')).format(message=message)}")
|
||||
await bot.process_commands(message)
|
||||
return
|
||||
|
||||
|
@ -399,14 +422,14 @@ async def on_message(message: discord.Message) -> None:
|
|||
try:
|
||||
await message.add_reaction(emoji)
|
||||
except Exception as e:
|
||||
print(f"Failed to react with emoji: {e}")
|
||||
logger.info(f"Failed to react with emoji: {e}")
|
||||
|
||||
await bot.process_commands(message)
|
||||
|
||||
# Event: Called on every interaction (slash command, etc.)
|
||||
@bot.event
|
||||
async def on_interaction(interaction: discord.Interaction) -> None:
|
||||
print(f"{(_('command_ran_s')).format(interaction=interaction)}{interaction.data['name']}")
|
||||
logger.info(f"{(_('command_ran_s')).format(interaction=interaction)}{interaction.data['name']}")
|
||||
|
||||
# Global check: Block blacklisted users from running commands
|
||||
@bot.check
|
||||
|
@ -483,7 +506,7 @@ async def mem(ctx: commands.Context) -> None:
|
|||
return
|
||||
command: str = """curl -F "reqtype=fileupload" -F "time=1h" -F "fileToUpload=@memory.json" https://litterbox.catbox.moe/resources/internals/api.php"""
|
||||
memorylitter: subprocess.CompletedProcess = subprocess.run(command, shell=True, capture_output=True, text=True)
|
||||
print(memorylitter)
|
||||
logger.debug(memorylitter)
|
||||
await send_message(ctx, memorylitter.stdout.strip())
|
||||
|
||||
# Helper: Improve sentence coherence (simple capitalization fix)
|
||||
|
|
37
log.txt
Normal file
37
log.txt
Normal file
|
@ -0,0 +1,37 @@
|
|||
[ DEBUG ]: Testing logging [1;30m [07/08/25 17:01:45.827] (bot.py:<module>) [0m
|
||||
[ INFO ]: Testing logging [1;30m [07/08/25 17:01:45.828] (bot.py:<module>) [0m
|
||||
[ WARNING ]: Testing logging [1;30m [07/08/25 17:01:45.828] (bot.py:<module>) [0m
|
||||
[ ERROR ]: Testing logging [1;30m [07/08/25 17:01:45.829] (bot.py:<module>) [0m
|
||||
[ CRITICAL ]: Testing logging [1;30m [07/08/25 17:01:45.829] (bot.py:<module>) [0m
|
||||
[ INFO ]: Suoritetaan esikäynnistystarkistuksia... [1;30m [07/08/25 17:01:45.830] (prestartchecks.py:start_checks) [0m
|
||||
[ INFO ]: Model is installed. [1;30m [07/08/25 17:01:45.832] (prestartchecks.py:check_for_model) [0m
|
||||
[ INFO ]: OK aiohttp [1;30m [07/08/25 17:01:48.091] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK better-profanity [1;30m [07/08/25 17:01:48.091] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK discord.py [1;30m [07/08/25 17:01:48.092] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK discord.py [1;30m [07/08/25 17:01:48.092] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK python-dotenv [1;30m [07/08/25 17:01:48.093] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK markovify [1;30m [07/08/25 17:01:48.093] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK pillow [1;30m [07/08/25 17:01:48.093] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK psutil [1;30m [07/08/25 17:01:48.094] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK python-dotenv [1;30m [07/08/25 17:01:48.094] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK requests [1;30m [07/08/25 17:01:48.095] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK spacy [1;30m [07/08/25 17:01:48.095] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: OK spacytextblob [1;30m [07/08/25 17:01:48.095] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: Kaikki vaatimukset täyttyvät. [1;30m [07/08/25 17:01:48.096] (prestartchecks.py:check_requirements) [0m
|
||||
[ INFO ]: Ping osoitteeseen 1.1.1.1: 117.0 ms [1;30m [07/08/25 17:01:48.240] (prestartchecks.py:check_latency) [0m
|
||||
[ INFO ]: Muistin käyttö: 12.796562194824219 Gt / 15.846412658691406 Gt (80.753685205879%) [1;30m [07/08/25 17:01:48.248] (prestartchecks.py:check_memory) [0m
|
||||
[ INFO ]: Kokonaismuisti: 15.846412658691406 Gt [1;30m [07/08/25 17:01:48.249] (prestartchecks.py:check_memory) [0m
|
||||
[ INFO ]: Käytetty muisti: 12.796562194824219 Gt [1;30m [07/08/25 17:01:48.249] (prestartchecks.py:check_memory) [0m
|
||||
[ INFO ]: Muistitiedosto: 0.00022029876708984375 Mt [1;30m [07/08/25 17:01:48.250] (prestartchecks.py:check_memoryjson) [0m
|
||||
[ INFO ]: Mitataan suorittimen käyttöä ytimittäin... [1;30m [07/08/25 17:01:48.250] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 0: [██------------------] 14.1% [1;30m [07/08/25 17:01:49.251] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 1: [█-------------------] 9.4% [1;30m [07/08/25 17:01:49.252] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 2: [███-----------------] 17.2% [1;30m [07/08/25 17:01:49.252] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 3: [███-----------------] 18.8% [1;30m [07/08/25 17:01:49.253] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 4: [███████-------------] 35.4% [1;30m [07/08/25 17:01:49.253] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 5: [██████--------------] 31.8% [1;30m [07/08/25 17:01:49.253] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 6: [██████████----------] 54.4% [1;30m [07/08/25 17:01:49.254] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Ydin 7: [███████-------------] 39.1% [1;30m [07/08/25 17:01:49.254] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Kokonaisprosessorin käyttö: 27.525% [1;30m [07/08/25 17:01:49.255] (prestartchecks.py:check_cpu) [0m
|
||||
[ INFO ]: Jatketaan 5 sekunnin kuluttua... Paina mitä tahansa näppäintä ohittaaksesi. [1;30m [07/08/25 17:01:49.255] (prestartchecks.py:start_checks) [0m
|
||||
[ ERROR ]: [VOLTA] [33mMissing key: 'markov_model_not_found' in en.json![0m [1;30m [07/08/25 17:01:59.630] (bot.py:<module>) [0m
|
|
@ -10,6 +10,7 @@ ANSI = "\033["
|
|||
RED = f"{ANSI}31m"
|
||||
GREEN = f"{ANSI}32m"
|
||||
YELLOW = f"{ANSI}33m"
|
||||
PURPLE = f"{ANSI}35m"
|
||||
DEBUG = f"{ANSI}1;30m"
|
||||
RESET = f"{ANSI}0m"
|
||||
VERSION_URL = "https://goober.expect.ovh"
|
||||
|
|
25
modules/logger.py
Normal file
25
modules/logger.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import logging
|
||||
from modules.globalvars import *
|
||||
|
||||
class GooberFormatter(logging.Formatter):
|
||||
def __init__(self, colors: bool = True): # Disable colors for TXT output
|
||||
self.colors = colors
|
||||
|
||||
self._format = f"[ %(levelname)-8s ]: %(message)s {DEBUG} [%(asctime)s.%(msecs)03d] (%(filename)s:%(funcName)s) {RESET}"
|
||||
|
||||
self.FORMATS = {
|
||||
logging.DEBUG: DEBUG + self._format + RESET,
|
||||
logging.INFO: self._format.replace("%(levelname)-8s", f"{GREEN}%(levelname)-8s{RESET}"),
|
||||
logging.WARNING: YELLOW + self._format + RESET,
|
||||
logging.ERROR: RED + self._format + RESET,
|
||||
logging.CRITICAL: PURPLE + self._format + RESET
|
||||
}
|
||||
|
||||
def format(self, record: logging.LogRecord):
|
||||
if self.colors:
|
||||
log_fmt = self.FORMATS.get(record.levelno) # Add colors
|
||||
else:
|
||||
log_fmt = self._format # Just use the default format
|
||||
|
||||
formatter = logging.Formatter(log_fmt, datefmt="%m/%d/%y %H:%M:%S")
|
||||
return formatter.format(record)
|
|
@ -10,6 +10,9 @@ import json
|
|||
import re
|
||||
from spacy.util import is_package
|
||||
import importlib.metadata
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("goober")
|
||||
|
||||
# import shutil
|
||||
psutilavaliable = True
|
||||
|
@ -18,20 +21,20 @@ try:
|
|||
import psutil
|
||||
except ImportError:
|
||||
psutilavaliable = False
|
||||
print(RED, _('missing_requests_psutil'), RESET)
|
||||
logger.error(_('missing_requests_psutil'))
|
||||
|
||||
def check_for_model():
|
||||
if is_package("en_core_web_sm"):
|
||||
print("Model is installed.")
|
||||
logger.info("Model is installed.")
|
||||
else:
|
||||
print("Model is not installed.")
|
||||
logger.info("Model is not installed.")
|
||||
|
||||
|
||||
def iscloned():
|
||||
if os.path.exists(".git"):
|
||||
return True
|
||||
else:
|
||||
print(f"{RED}{(_('not_cloned'))}{RESET}")
|
||||
logger.error(f"{_('not_cloned')}")
|
||||
sys.exit(1)
|
||||
|
||||
def get_stdlib_modules():
|
||||
|
@ -63,7 +66,7 @@ def check_requirements():
|
|||
requirements_path = os.path.abspath(os.path.join(parent_dir, '..', 'requirements.txt'))
|
||||
|
||||
if not os.path.exists(requirements_path):
|
||||
print(f"{RED}{(_('requirements_not_found')).format(path=requirements_path)}{RESET}")
|
||||
logger.error(f"{(_('requirements_not_found')).format(path=requirements_path)}")
|
||||
return
|
||||
|
||||
with open(requirements_path, 'r') as f:
|
||||
|
@ -95,9 +98,9 @@ def check_requirements():
|
|||
continue
|
||||
requirements.add(pkg)
|
||||
except Exception as e:
|
||||
print(f"{YELLOW}{(_('warning_failed_parse_imports')).format(filename=filename, error=e)}{RESET}")
|
||||
logger.warning(f"{(_('warning_failed_parse_imports')).format(filename=filename, error=e)}")
|
||||
else:
|
||||
print(f"{YELLOW}{(_('cogs_dir_not_found')).format(path=cogs_dir)}{RESET}")
|
||||
logger.warning(f"{(_('cogs_dir_not_found')).format(path=cogs_dir)}")
|
||||
|
||||
installed_packages = {dist.metadata['Name'].lower() for dist in importlib.metadata.distributions()}
|
||||
missing = []
|
||||
|
@ -110,16 +113,16 @@ def check_requirements():
|
|||
check_name = PACKAGE_ALIASES.get(req, req).lower()
|
||||
|
||||
if check_name in installed_packages:
|
||||
print(f"[ {GREEN}{(_('ok_installed')).format(package=check_name)}{RESET} ] {check_name}")
|
||||
logger.info(f"{_('ok_installed').format(package=check_name)} {check_name}")
|
||||
else:
|
||||
print(f"[ {RED}{(_('missing_package')).format(package=check_name)}{RESET} ] {check_name} {(_('missing_package2'))}")
|
||||
logger.error(f"{(_('missing_package')).format(package=check_name)} {check_name} {(_('missing_package2'))}")
|
||||
missing.append(check_name)
|
||||
|
||||
if missing:
|
||||
print(RED, _('missing_packages_detected'), RESET)
|
||||
logger.error(_('missing_packages_detected'))
|
||||
for pkg in missing:
|
||||
print(f" - {pkg}")
|
||||
print((_('telling_goober_central')).format(url=VERSION_URL))
|
||||
logger.info((_('telling_goober_central')).format(url=VERSION_URL))
|
||||
payload = {
|
||||
"name": NAME,
|
||||
"version": local_version,
|
||||
|
@ -129,10 +132,10 @@ def check_requirements():
|
|||
try:
|
||||
requests.post(VERSION_URL + "/ping", json=payload) # type: ignore
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('failed_to_contact')).format(url=VERSION_URL, error=e)}{RESET}")
|
||||
logger.error(f"{(_('failed_to_contact')).format(url=VERSION_URL, error=e)}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(_('all_requirements_satisfied'))
|
||||
logger.info(_('all_requirements_satisfied'))
|
||||
|
||||
def check_latency():
|
||||
host = "1.1.1.1"
|
||||
|
@ -158,16 +161,16 @@ def check_latency():
|
|||
match = re.search(latency_pattern, result.stdout)
|
||||
if match:
|
||||
latency_ms = float(match.group(1))
|
||||
print((_('ping_to')).format(host=host, latency=latency_ms))
|
||||
logger.info((_('ping_to')).format(host=host, latency=latency_ms))
|
||||
if latency_ms > 300:
|
||||
print(f"{YELLOW}{(_('high_latency'))}{RESET}")
|
||||
logger.warning(f"{(_('high_latency'))}")
|
||||
else:
|
||||
print(f"{YELLOW}{(_('could_not_parse_latency'))}{RESET}")
|
||||
logger.warning((_('could_not_parse_latency')))
|
||||
else:
|
||||
print(result.stderr)
|
||||
print(f"{RED}{(_('ping_failed')).format(host=host)}{RESET}")
|
||||
logger.error(f"{(_('ping_failed')).format(host=host)}{RESET}")
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('error_running_ping')).format(error=e)}{RESET}")
|
||||
logger.error((_('error_running_ping')).format(error=e))
|
||||
|
||||
def check_memory():
|
||||
if psutilavaliable == False:
|
||||
|
@ -178,21 +181,21 @@ def check_memory():
|
|||
used_memory = memory_info.used / (1024 ** 3)
|
||||
free_memory = memory_info.available / (1024 ** 3)
|
||||
|
||||
print((_('memory_usage')).format(used=used_memory, total=total_memory, percent=(used_memory / total_memory) * 100))
|
||||
logger.info((_('memory_usage')).format(used=used_memory, total=total_memory, percent=(used_memory / total_memory) * 100))
|
||||
if used_memory > total_memory * 0.9:
|
||||
print(f"{YELLOW}{(_('memory_above_90')).format(percent=(used_memory / total_memory) * 100)}{RESET}")
|
||||
print((_('total_memory')).format(total=total_memory))
|
||||
print((_('used_memory')).format(used=used_memory))
|
||||
logger.info((_('total_memory')).format(total=total_memory))
|
||||
logger.info((_('used_memory')).format(used=used_memory))
|
||||
if free_memory < 1:
|
||||
print(f"{RED}{(_('low_free_memory')).format(free=free_memory)}{RESET}")
|
||||
logger.warning(f"{(_('low_free_memory')).format(free=free_memory)}")
|
||||
sys.exit(1)
|
||||
except ImportError:
|
||||
print(_('psutil_not_installed')) # todo: translate this into italian and put it in the translations "psutil is not installed. Memory check skipped."
|
||||
logger.error(_('psutil_not_installed')) # todo: translate this into italian and put it in the translations "psutil is not installed. Memory check skipped."
|
||||
|
||||
def check_cpu():
|
||||
if psutilavaliable == False:
|
||||
return
|
||||
print((_('measuring_cpu')))
|
||||
logger.info((_('measuring_cpu')))
|
||||
cpu_per_core = psutil.cpu_percent(interval=1, percpu=True) # type: ignore
|
||||
for idx, core_usage in enumerate(cpu_per_core):
|
||||
bar_length = int(core_usage / 5)
|
||||
|
@ -203,33 +206,33 @@ def check_cpu():
|
|||
color = YELLOW
|
||||
else:
|
||||
color = GREEN
|
||||
print((_('core_usage')).format(idx=idx, bar=bar, usage=core_usage))
|
||||
logger.info((_('core_usage')).format(idx=idx, bar=bar, usage=core_usage))
|
||||
total_cpu = sum(cpu_per_core) / len(cpu_per_core)
|
||||
print((_('total_cpu_usage')).format(usage=total_cpu))
|
||||
logger.info((_('total_cpu_usage')).format(usage=total_cpu))
|
||||
if total_cpu > 85:
|
||||
print(f"{YELLOW}{(_('high_avg_cpu')).format(usage=total_cpu)}{RESET}")
|
||||
logger.warning(f"{(_('high_avg_cpu')).format(usage=total_cpu)}")
|
||||
if total_cpu > 95:
|
||||
print(f"{RED}{(_('really_high_cpu'))}{RESET}")
|
||||
logger.error(_('really_high_cpu'))
|
||||
sys.exit(1)
|
||||
|
||||
def check_memoryjson():
|
||||
try:
|
||||
print((_('memory_file')).format(size=os.path.getsize(MEMORY_FILE) / (1024 ** 2)))
|
||||
logger.info((_('memory_file')).format(size=os.path.getsize(MEMORY_FILE) / (1024 ** 2)))
|
||||
if os.path.getsize(MEMORY_FILE) > 1_073_741_824:
|
||||
print(f"{YELLOW}{(_('memory_file_large'))}{RESET}")
|
||||
logger.warning(f"{(_('memory_file_large'))}")
|
||||
try:
|
||||
with open(MEMORY_FILE, 'r', encoding='utf-8') as f:
|
||||
json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"{RED}{(_('memory_file_corrupted')).format(error=e)}{RESET}")
|
||||
print(f"{YELLOW}{(_('consider_backup_memory'))}{RESET}")
|
||||
logger.error(f"{(_('memory_file_corrupted')).format(error=e)}")
|
||||
logger.warning(f"{(_('consider_backup_memory'))}")
|
||||
except UnicodeDecodeError as e:
|
||||
print(f"{RED}{(_('memory_file_encoding')).format(error=e)}{RESET}")
|
||||
print(f"{YELLOW}{(_('consider_backup_memory'))}{RESET}")
|
||||
logger.error(f"{(_('memory_file_encoding')).format(error=e)}")
|
||||
logger.warning(f"{(_('consider_backup_memory'))}")
|
||||
except Exception as e:
|
||||
print(f"{RED}{(_('error_reading_memory')).format(error=e)}{RESET}")
|
||||
logger.error(f"{(_('error_reading_memory')).format(error=e)}")
|
||||
except FileNotFoundError:
|
||||
print(f"{YELLOW}{(_('memory_file_not_found'))}{RESET}")
|
||||
logger(f"{(_('memory_file_not_found'))}")
|
||||
|
||||
def presskey2skip(timeout):
|
||||
if os.name == 'nt':
|
||||
|
@ -265,9 +268,9 @@ def presskey2skip(timeout):
|
|||
beta = beta
|
||||
def start_checks():
|
||||
if CHECKS_DISABLED == "True":
|
||||
print(f"{YELLOW}{(_('checks_disabled'))}{RESET}")
|
||||
logger.warning(f"{(_('checks_disabled'))}")
|
||||
return
|
||||
print(_('running_prestart_checks'))
|
||||
logger.info(_('running_prestart_checks'))
|
||||
check_for_model()
|
||||
iscloned()
|
||||
check_missing_translations()
|
||||
|
@ -279,13 +282,13 @@ def start_checks():
|
|||
if os.path.exists(".env"):
|
||||
pass
|
||||
else:
|
||||
print(f"{YELLOW}{(_('env_file_not_found'))}{RESET}")
|
||||
logger.warning(f"{(_('env_file_not_found'))}")
|
||||
sys.exit(1)
|
||||
if beta == True:
|
||||
print(f"{YELLOW}this build isnt finished yet, some things might not work as expected{RESET}")
|
||||
logger.warning(f"this build isnt finished yet, some things might not work as expected")
|
||||
else:
|
||||
pass
|
||||
print(_('continuing_in_seconds').format(seconds=5))
|
||||
logger.info(_('continuing_in_seconds').format(seconds=5))
|
||||
presskey2skip(timeout=5)
|
||||
os.system('cls' if os.name == 'nt' else 'clear')
|
||||
print(splashtext)
|
Loading…
Add table
Add a link
Reference in a new issue