commit before utter laptop death

This commit is contained in:
WhatDidYouExpect 2025-07-23 16:58:21 +02:00
parent 4e111b410d
commit d6b51c787a
11 changed files with 163 additions and 221 deletions

View file

@ -3,55 +3,75 @@ import json
import markovify
import pickle
from modules.globalvars import *
from modules.volta.main import _
import logging
from modules.volta.main import _
from modules.settings import instance as settings_manager
settings = settings_manager.settings
logger = logging.getLogger("goober")
def get_file_info(file_path: str) -> dict:
# Get file size and line count for a given file path
def get_file_info(file_path):
try:
file_size = os.path.getsize(file_path)
with open(file_path, "r") as f:
lines = f.readlines()
return {
"file_size_bytes": file_size,
"line_count": len(lines)
}
return {"file_size_bytes": file_size, "line_count": len(lines)}
except Exception as e:
return {"error": str(e)}
def load_memory() -> list:
try:
with open(MEMORY_FILE, "r") as f:
return json.load(f)
except FileNotFoundError:
return []
def save_memory(memory: list) -> None:
with open(MEMORY_FILE, "w") as f:
# Load memory data from file, or use default dataset if not loaded yet
def load_memory():
data = []
# Try to load data from MEMORY_FILE
try:
with open(settings["bot"]["active_memory"], "r") as f:
data = json.load(f)
except FileNotFoundError:
pass
return data
# Save memory data to MEMORY_FILE
def save_memory(memory):
with open(settings["bot"]["active_memory"], "w") as f:
json.dump(memory, f, indent=4)
def train_markov_model(memory: list, additional_data: list = None):
lines = [line for line in (memory or []) if isinstance(line, str)]
if additional_data:
lines.extend(line for line in additional_data if isinstance(line, str))
if not lines:
def train_markov_model(memory, additional_data=None) -> markovify.NewlineText | None:
if not memory:
return None
text = "\n".join(lines)
return markovify.NewlineText(text, state_size=2)
filtered_memory = [line for line in memory if isinstance(line, str)]
if additional_data:
filtered_memory.extend(
line for line in additional_data if isinstance(line, str)
)
def save_markov_model(model, filename: str = 'markov_model.pkl') -> None:
with open(filename, 'wb') as f:
if not filtered_memory:
return None
text = "\n".join(filtered_memory)
model = markovify.NewlineText(text, state_size=2)
return model
def save_markov_model(model, filename="markov_model.pkl"):
with open(filename, "wb") as f:
pickle.dump(model, f)
logger.info(f"Markov model saved to {filename}.")
def load_markov_model(filename: str = 'markov_model.pkl'):
def load_markov_model(filename="markov_model.pkl"):
try:
with open(filename, 'rb') as f:
with open(filename, "rb") as f:
model = pickle.load(f)
logger.info(f"{_('model_loaded')} {filename}.{RESET}")
return model
except FileNotFoundError:
logger.error(f"{filename} {_('not_found')}{RESET}")
return None
return None