forked from gooberinc/goober
sigh
This commit is contained in:
parent
9a4ac706b9
commit
b6449cfa28
6 changed files with 25 additions and 62 deletions
|
@ -1,4 +1,6 @@
|
|||
{
|
||||
"memory_file_valid": "The memory.json file is valid!",
|
||||
"file_aint_uft8": "File is not valid UTF-8 text. Might be binary or corrupted.",
|
||||
"psutil_not_installed": "Memory check skipped.",
|
||||
"not_cloned": "Goober is not cloned! Please clone it from GitHub.",
|
||||
"checks_disabled": "Checks are disabled!",
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
{
|
||||
"memory_file_valid": "memory.json on toimiva!",
|
||||
"file_aint_uft8": "Tiedosto ei ole UTF-8 tekstiä. Saattaa olla binääriä tai korruptoitunut.",
|
||||
"active_users:": "Aktiiviset käyttäjät:",
|
||||
"cog_fail2": "Moduulin lataaminen epäonnistui:",
|
||||
"command_ran_s": "Info: {interaction.user} suoritti",
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
{
|
||||
"memory_file_valid": "Il file JSON è valido!",
|
||||
"file_aint_utf8": "Il file non è un UTF-8 valido. Forse è binario?",
|
||||
"psutil_not_installed": "Controllo memoria saltato.",
|
||||
"not_cloned": "Goober non è stato clonato! Clonalo da GitHub.",
|
||||
"checks_disabled": "I controlli sono disabilitati!",
|
||||
|
|
|
@ -49,7 +49,7 @@ arch = platform.machine()
|
|||
slash_commands_enabled = True # 100% broken, its a newer enough version so its probably enabled by default.... fix this at somepoint or hard code it in goober central code
|
||||
launched = False
|
||||
latest_version = "0.0.0"
|
||||
local_version = "2.3.2"
|
||||
local_version = "2.3.3"
|
||||
os.environ['gooberlocal_version'] = local_version
|
||||
REACT = os.getenv("REACT")
|
||||
if get_git_branch() == "dev":
|
||||
|
|
|
@ -59,7 +59,8 @@ def check_requirements():
|
|||
PACKAGE_ALIASES = {
|
||||
"discord": "discord.py",
|
||||
"better_profanity": "better-profanity",
|
||||
"dotenv": "python-dotenv"
|
||||
"dotenv": "python-dotenv",
|
||||
"pil": "pillow"
|
||||
}
|
||||
|
||||
parent_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
@ -71,36 +72,13 @@ def check_requirements():
|
|||
|
||||
with open(requirements_path, 'r') as f:
|
||||
lines = f.readlines()
|
||||
requirements = {
|
||||
line.strip() for line in lines
|
||||
if line.strip() and not line.startswith('#')
|
||||
}
|
||||
|
||||
cogs_dir = os.path.abspath(os.path.join(parent_dir, '..', 'assets', 'cogs'))
|
||||
if os.path.isdir(cogs_dir):
|
||||
for filename in os.listdir(cogs_dir):
|
||||
if filename.endswith('.py'):
|
||||
filepath = os.path.join(cogs_dir, filename)
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
try:
|
||||
tree = ast.parse(f.read(), filename=filename)
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
pkg = alias.name.split('.')[0]
|
||||
if pkg in STD_LIB_MODULES or pkg == 'modules':
|
||||
continue
|
||||
requirements.add(pkg)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
if node.module:
|
||||
pkg = node.module.split('.')[0]
|
||||
if pkg in STD_LIB_MODULES or pkg == 'modules':
|
||||
continue
|
||||
requirements.add(pkg)
|
||||
except Exception as e:
|
||||
logger.warning(f"{(_('warning_failed_parse_imports')).format(filename=filename, error=e)}")
|
||||
else:
|
||||
logger.warning(f"{(_('cogs_dir_not_found')).format(path=cogs_dir)}")
|
||||
requirements = set()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#'):
|
||||
base_pkg = line.split('==')[0].lower()
|
||||
aliased_pkg = PACKAGE_ALIASES.get(base_pkg, base_pkg)
|
||||
requirements.add(aliased_pkg)
|
||||
|
||||
installed_packages = {dist.metadata['Name'].lower() for dist in importlib.metadata.distributions()}
|
||||
missing = []
|
||||
|
@ -110,7 +88,7 @@ def check_requirements():
|
|||
print((_('std_lib_local_skipped')).format(package=req))
|
||||
continue
|
||||
|
||||
check_name = PACKAGE_ALIASES.get(req, req).lower()
|
||||
check_name = req.lower()
|
||||
|
||||
if check_name in installed_packages:
|
||||
logger.info(f"{_('ok_installed').format(package=check_name)} {check_name}")
|
||||
|
@ -122,31 +100,25 @@ def check_requirements():
|
|||
logger.error(_('missing_packages_detected'))
|
||||
for pkg in missing:
|
||||
print(f" - {pkg}")
|
||||
logger.info((_('telling_goober_central')).format(url=VERSION_URL))
|
||||
payload = {
|
||||
"name": NAME,
|
||||
"version": local_version,
|
||||
"slash_commands": f"{slash_commands_enabled}\n\n**Error**\nMissing packages have been detected, Failed to start",
|
||||
"token": gooberTOKEN
|
||||
}
|
||||
try:
|
||||
requests.post(VERSION_URL + "/ping", json=payload) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(f"{(_('failed_to_contact')).format(url=VERSION_URL, error=e)}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.info(_('all_requirements_satisfied'))
|
||||
|
||||
def check_latency():
|
||||
host = "1.1.1.1"
|
||||
|
||||
system = platform.system()
|
||||
|
||||
if system == "Windows":
|
||||
cmd = ["ping", "-n", "1", "-w", "1000", host]
|
||||
latency_pattern = r"Average = (\d+)ms"
|
||||
|
||||
elif system == "Darwin":
|
||||
cmd = ["ping", "-c", "1", host]
|
||||
latency_pattern = r"time=([\d\.]+) ms"
|
||||
|
||||
else:
|
||||
cmd = ["ping", "-c", "1", "-W", "1", host]
|
||||
latency_pattern = r"time[=<]\s*([\d\.]+)\s*ms"
|
||||
latency_pattern = r"time=([\d\.]+) ms"
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
|
@ -157,7 +129,6 @@ def check_latency():
|
|||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
print(result.stdout)
|
||||
match = re.search(latency_pattern, result.stdout)
|
||||
if match:
|
||||
latency_ms = float(match.group(1))
|
||||
|
@ -197,16 +168,6 @@ def check_cpu():
|
|||
return
|
||||
logger.info((_('measuring_cpu')))
|
||||
cpu_per_core = psutil.cpu_percent(interval=1, percpu=True) # type: ignore
|
||||
for idx, core_usage in enumerate(cpu_per_core):
|
||||
bar_length = int(core_usage / 5)
|
||||
bar = '█' * bar_length + '-' * (20 - bar_length)
|
||||
if core_usage > 85:
|
||||
color = RED
|
||||
elif core_usage > 60:
|
||||
color = YELLOW
|
||||
else:
|
||||
color = GREEN
|
||||
logger.info((_('core_usage')).format(idx=idx, bar=bar, usage=core_usage))
|
||||
total_cpu = sum(cpu_per_core) / len(cpu_per_core)
|
||||
logger.info((_('total_cpu_usage')).format(usage=total_cpu))
|
||||
if total_cpu > 85:
|
||||
|
|
|
@ -70,9 +70,7 @@ def get_latest_version_info():
|
|||
return None
|
||||
|
||||
# Check if an update is available and perform update if needed
|
||||
def check_for_update():
|
||||
if ALIVEPING != "True":
|
||||
return
|
||||
def check_for_update():
|
||||
global latest_version, local_version, launched
|
||||
|
||||
latest_version_info = get_latest_version_info()
|
||||
|
@ -87,12 +85,10 @@ def check_for_update():
|
|||
if not latest_version or not download_url:
|
||||
logger.error(f"{RED}{_('invalid_server')}{RESET}")
|
||||
return None, None
|
||||
|
||||
# Check if local_version is valid
|
||||
if local_version == "0.0.0" or None:
|
||||
logger.error(f"{RED}{_('cant_find_local_version')}{RESET}")
|
||||
return
|
||||
|
||||
# Compare local and latest versions
|
||||
if local_version < latest_version:
|
||||
logger.info(f"{YELLOW}{_('new_version').format(latest_version=latest_version, local_version=local_version)}{RESET}")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue