Merge pull request #7 from gooberinc/rewrite/spacy

merge it
This commit is contained in:
WhatDidYouExpect 2025-07-02 15:59:23 +02:00 committed by GitHub
commit 3f8db9e263
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 36 additions and 64 deletions

View file

@ -1,4 +1,7 @@
{
"active_users:": "Active users:",
"spacy_initialized": "spaCy and spacytextblob are ready.",
"spacy_model_not_found": "The spaCy model was not found! Downloading it....`",
"env_file_not_found": "The .env file was not found! Please create one with the required variables.",
"error_fetching_active_users": "Error fetching active users: {error}",
"error_sending_alive_ping": "Error sending alive ping: {error}",

View file

@ -1,4 +1,7 @@
{
"active_users:": "Utenti attivi:",
"spacy_initialized": "spaCy e spacytextblob sono pronti.",
"spacy_model_not_found": "Il modello spaCy non è stato trovato! Lo sto scaricando...",
"env_file_not_found": "Il file .env non è stato trovato! Crea un file con le variabili richieste.",
"error fetching_active_users": "Errore nel recupero degli utenti attivi:",
"error_sending_alive_ping": "Errore nell'invio di aliveping:",

6
bot.py
View file

@ -109,11 +109,11 @@ async def on_ready():
print(f"{GREEN}{get_translation(LOCALE, 'synced_commands')} {len(synced)} {get_translation(LOCALE, 'synced_commands2')} {RESET}")
slash_commands_enabled = True
ping_server() # ping_server from modules/central.py
# --- Mostra utenti attivi ---
# I FORGOT TO REMOVE THE ITALIAN VERSION FUCKKKKKKKKK
active_users = await fetch_active_users()
print(f"{GREEN}Utenti attivi: {active_users}{RESET}")
print(f"{GREEN}{get_translation(LOCALE, 'active_users:')} {active_users}{RESET}")
print(f"{GREEN}{get_translation(LOCALE, 'started').format(name=NAME)}{RESET}")
# --- Avvia il task periodico ---
bot.loop.create_task(send_alive_ping_periodically())
except discord.errors.Forbidden as perm_error:
print(f"{RED}Permission error while syncing commands: {perm_error}{RESET}")

View file

@ -2,9 +2,6 @@ DISCORD_BOT_TOKEN=token
BOT_PREFIX="g."
PING_LINE="The Beretta fires fast and won't make you feel any better!"
BLACKLISTED_USERS=
cooldown=10800
hourlyspeak=1318263176134918246
ownerid=542701119948849163
USERTRAIN_ENABLED="true"
showmemenabled="true"
NAME="an instance of goober"

View file

@ -39,5 +39,5 @@ arch = platform.machine()
slash_commands_enabled = False
launched = False
latest_version = "0.0.0"
local_version = "1.0.6"
local_version = "1.0.7"
os.environ['gooberlocal_version'] = local_version

View file

@ -3,7 +3,7 @@ import re
from PIL import Image, ImageDraw, ImageFont
from modules.markovmemory import load_markov_model
from modules.sentenceprocessing import improve_sentence_coherence, rephrase_for_coherence
# add comments l8r
generated_sentences = set()
async def gen_image(input_image_path, sentence_size=5, max_attempts=10):

View file

@ -2,59 +2,43 @@ import re
from modules.globalvars import *
from modules.translations import *
import nltk
import nltk.data
import spacy
from spacy.tokens import Doc
from spacytextblob.spacytextblob import SpacyTextBlob
nlp = spacy.load("en_core_web_sm")
nlp.add_pipe("spacytextblob")
Doc.set_extension("polarity", getter=lambda doc: doc._.blob.polarity)
# Ensure required NLTK resources are available
def check_resources():
# Check for required NLTK resources and download if missing
resources = {
'vader_lexicon': 'sentiment/vader_lexicon',
'punkt_tab': 'tokenizers/punkt',
}
for resource, path in resources.items():
try:
nltk.data.find(path)
logger.info(f"{resource} is already installed.")
except Exception:
nltk.download(str(resource))
nlp = spacy.load("en_core_web_sm")
except OSError:
print(get_translation(LOCALE, 'spacy_model_not_found'))
spacy.cli.download("en_core_web_sm")
nlp = spacy.load("en_core_web_sm")
if "spacytextblob" not in nlp.pipe_names:
nlp.add_pipe("spacytextblob")
print(get_translation(LOCALE, 'spacy_initialized'))
check_resources()
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from nltk.tokenize import word_tokenize
# Initialize the sentiment analyzer
analyzer = SentimentIntensityAnalyzer()
def is_positive(sentence):
"""
Determines if the sentiment of the sentence is positive.
logger.infos debug information and returns True if sentiment score > 0.1.
"""
scores = analyzer.polarity_scores(sentence)
sentiment_score = scores['compound']
doc = nlp(sentence)
sentiment_score = doc._.polarity # from spacytextblob
# logger.info debug message with sentiment score
debug_message = f"{DEBUG}{get_translation(LOCALE, 'sentence_positivity')} {sentiment_score}{RESET}"
logger.info(debug_message)
print(debug_message)
return sentiment_score > 0.1
async def send_message(ctx, message=None, embed=None, file=None, edit=False, message_reference=None):
"""
Sends or edits a message in a Discord context.
Handles both slash command and regular command contexts.
"""
if edit and message_reference:
try:
# Editing the existing message
await message_reference.edit(content=message, embed=embed)
except Exception as e:
await ctx.send(f"{RED}{get_translation(LOCALE, 'edit_fail')} {e}{RESET}")
else:
if hasattr(ctx, "respond"):
# For slash command contexts
sent_message = None
if embed:
sent_message = await ctx.respond(embed=embed, ephemeral=False)
@ -63,7 +47,6 @@ async def send_message(ctx, message=None, embed=None, file=None, edit=False, mes
if file:
sent_message = await ctx.respond(file=file, ephemeral=False)
else:
# For regular command contexts
sent_message = None
if embed:
sent_message = await ctx.send(embed=embed)
@ -74,34 +57,19 @@ async def send_message(ctx, message=None, embed=None, file=None, edit=False, mes
return sent_message
def append_mentions_to_18digit_integer(message):
"""
Removes 18-digit integers from the message (commonly used for Discord user IDs).
"""
pattern = r'\b\d{18}\b'
return re.sub(pattern, lambda match: f"", message)
return re.sub(pattern, lambda match: "", message)
def preprocess_message(message):
"""
Preprocesses the message by removing 18-digit integers and non-alphanumeric tokens.
Returns the cleaned message as a string.
"""
message = append_mentions_to_18digit_integer(message)
tokens = word_tokenize(message)
tokens = [token for token in tokens if token.isalnum()]
doc = nlp(message)
tokens = [token.text for token in doc if token.is_alpha or token.is_digit]
return " ".join(tokens)
def improve_sentence_coherence(sentence):
"""
Improves sentence coherence by capitalizing isolated 'i' pronouns.
"""
sentence = sentence.replace(" i ", " I ")
return sentence
return re.sub(r'\bi\b', 'I', sentence)
def rephrase_for_coherence(sentence):
"""
Rephrases the sentence for coherence by joining words with spaces.
(Currently a placeholder function.)
"""
words = sentence.split()
coherent_sentence = " ".join(words)
return coherent_sentence

View file

@ -1,6 +1,7 @@
discord.py
markovify
nltk
spacy
spacytextblob
requests
psutil
better_profanity