added translations + finnish 1
This commit is contained in:
parent
59c7494675
commit
01ba29c944
12 changed files with 591 additions and 521 deletions
18
cogs/README.md
Normal file
18
cogs/README.md
Normal file
|
@ -0,0 +1,18 @@
|
|||
# goobers custom commands
|
||||
[Hello World!](https://github.com/WhatDidYouExpect/goobercustomcommands/blob/main/customcommands/hello.py)
|
||||
by expect
|
||||
|
||||
[WhoAmI (lists username and nickname)](https://github.com/WhatDidYouExpect/goober/blob/main/customcommands/whoami.py)
|
||||
by PowerPCFan
|
||||
|
||||
[Cog Manager](https://github.com/WhatDidYouExpect/goober/blob/main/customcommands/cogmanager.py)
|
||||
by expect
|
||||
|
||||
[TensorFlow integration](https://github.com/WhatDidYouExpect/goober/blob/main/customcommands/tf.py)
|
||||
by SuperSilly2 (requires Python 3.7 - 3.10, tensorflow-metal/tensorflow-gpu and tensorflow/tensorflow-macos)
|
||||
|
||||
[Web Scraper](https://raw.githubusercontent.com/WhatDidYouExpect/goober/refs/heads/main/customcommands/webscraper.py)
|
||||
by expect (requires goober version 0.11.7.2 or higher)
|
||||
|
||||
[Status Changer](https://raw.githubusercontent.com/WhatDidYouExpect/goober/refs/heads/main/customcommands/songchanger.py)
|
||||
by expect (requires goober version 0.11.8 or higher)
|
66
cogs/cogmanager.py
Normal file
66
cogs/cogmanager.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
import os
|
||||
from config import ownerid
|
||||
|
||||
class CogManager(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
@commands.command()
|
||||
async def load(self, ctx, cog_name: str = None):
|
||||
if ctx.author.id != ownerid:
|
||||
await ctx.send("You do not have permission to use this command.")
|
||||
return
|
||||
if cog_name is None:
|
||||
await ctx.send("Please provide the cog name to load.")
|
||||
return
|
||||
try:
|
||||
await self.bot.load_extension(f"cogs.{cog_name}")
|
||||
await ctx.send(f"Loaded cog `{cog_name}` successfully.")
|
||||
except Exception as e:
|
||||
await ctx.send(f"Error loading cog `{cog_name}`: {e}")
|
||||
|
||||
@commands.command()
|
||||
async def unload(self, ctx, cog_name: str = None):
|
||||
if ctx.author.id != ownerid:
|
||||
await ctx.send("You do not have permission to use this command.")
|
||||
return
|
||||
if cog_name is None:
|
||||
await ctx.send("Please provide the cog name to unload.")
|
||||
return
|
||||
try:
|
||||
await self.bot.unload_extension(f"cogs.{cog_name}")
|
||||
await ctx.send(f"Unloaded cog `{cog_name}` successfully.")
|
||||
except Exception as e:
|
||||
await ctx.send(f"Error unloading cog `{cog_name}`: {e}")
|
||||
|
||||
@commands.command()
|
||||
async def reload(self, ctx, cog_name: str = None):
|
||||
if ctx.author.id != ownerid:
|
||||
await ctx.send("You do not have permission to use this command.")
|
||||
return
|
||||
if cog_name is None:
|
||||
await ctx.send("Please provide the cog name to reload.")
|
||||
return
|
||||
try:
|
||||
await self.bot.unload_extension(f"cogs.{cog_name}")
|
||||
await self.bot.load_extension(f"cogs.{cog_name}")
|
||||
await ctx.send(f"Reloaded cog `{cog_name}` successfully.")
|
||||
except Exception as e:
|
||||
await ctx.send(f"Error reloading cog `{cog_name}`: {e}")
|
||||
|
||||
@commands.command()
|
||||
async def listcogs(self, ctx):
|
||||
"""Lists all currently loaded cogs in an embed."""
|
||||
cogs = list(self.bot.cogs.keys())
|
||||
if not cogs:
|
||||
await ctx.send("No cogs are currently loaded.")
|
||||
return
|
||||
|
||||
embed = discord.Embed(title="Loaded Cogs", description="Here is a list of all currently loaded cogs:")
|
||||
embed.add_field(name="Cogs", value="\n".join(cogs), inline=False)
|
||||
await ctx.send(embed=embed)
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(CogManager(bot))
|
13
cogs/hello.py
Normal file
13
cogs/hello.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
|
||||
class Hello(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
@commands.command()
|
||||
async def hello(self, ctx):
|
||||
await ctx.send("Hello, world!")
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(Hello(bot))
|
22
cogs/slashcomandexample.py
Normal file
22
cogs/slashcomandexample.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
from discord import app_commands
|
||||
|
||||
class Ping(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
@app_commands.command(name="slashcommand", description="slashcommandexample")
|
||||
async def ping(self, interaction: discord.Interaction):
|
||||
await interaction.response.defer()
|
||||
exampleembed = discord.Embed(
|
||||
title="Pong!!",
|
||||
description="The Beretta fires fast and won't make you feel any better!",
|
||||
color=discord.Color.blue()
|
||||
)
|
||||
exampleembed.set_footer(text=f"Requested by {interaction.user.name}", icon_url=interaction.user.avatar.url)
|
||||
|
||||
await interaction.followup.send(embed=exampleembed)
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(Ping(bot))
|
33
cogs/songchanger.py
Normal file
33
cogs/songchanger.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
from config import RED, GREEN, RESET, LOCAL_VERSION_FILE
|
||||
import os
|
||||
|
||||
class songchange(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
def get_local_version():
|
||||
if os.path.exists(LOCAL_VERSION_FILE):
|
||||
with open(LOCAL_VERSION_FILE, "r") as f:
|
||||
return f.read().strip()
|
||||
return "0.0.0"
|
||||
|
||||
global local_version
|
||||
local_version = get_local_version()
|
||||
|
||||
@commands.command()
|
||||
async def changesong(self, ctx):
|
||||
if LOCAL_VERSION_FILE > "0.11.8":
|
||||
await ctx.send(f"Goober is too old! you must have version 0.11.8 you have {local_version}")
|
||||
return
|
||||
await ctx.send("Check the terminal! (this does not persist across restarts)")
|
||||
song = input("\nEnter a song:\n")
|
||||
try:
|
||||
await self.bot.change_presence(activity=discord.Activity(type=discord.ActivityType.listening, name=f"{song}"))
|
||||
print(f"{GREEN}Changed song to {song}{RESET}")
|
||||
except Exception as e:
|
||||
print(f"{RED}An error occurred while changing songs..: {str(e)}{RESET}")
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(songchange(bot))
|
340
cogs/tf.py
Normal file
340
cogs/tf.py
Normal file
|
@ -0,0 +1,340 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
import os
|
||||
from typing import List, TypedDict
|
||||
import numpy as np
|
||||
import json
|
||||
from time import strftime, localtime
|
||||
import pickle
|
||||
import functools
|
||||
import re
|
||||
import time
|
||||
import asyncio
|
||||
|
||||
ready: bool = True
|
||||
MODEL_MATCH_STRING = "[0-9]{2}_[0-9]{2}_[0-9]{4}-[0-9]{2}_[0-9]{2}"
|
||||
|
||||
try:
|
||||
import tensorflow as tf
|
||||
from tensorflow import keras
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras_preprocessing.sequence import pad_sequences
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Embedding, LSTM, Dense
|
||||
from keras.models import load_model
|
||||
from keras.backend import clear_session
|
||||
tf.config.optimizer.set_jit(True)
|
||||
except ImportError:
|
||||
print("ERROR: Failed to import Tensorflow. Here is a list of required dependencies:",(
|
||||
"tensorflow==2.10.0"
|
||||
"(for Nvidia users: tensorflow-gpu==2.10.0)"
|
||||
"(for macOS: tensorflow-metal==0.6.0, tensorflow-macos==2.10.0)"
|
||||
"numpy~=1.23"
|
||||
))
|
||||
ready = False
|
||||
|
||||
class TFCallback(keras.callbacks.Callback):
|
||||
def __init__(self,bot, progress_embed:discord.Embed, message):
|
||||
self.embed:discord.Embed = progress_embed
|
||||
self.bot:commands.Bot = bot
|
||||
self.message = message
|
||||
self.times:List[int] = [time.time()]
|
||||
|
||||
def on_train_begin(self, logs=None):
|
||||
pass
|
||||
|
||||
async def send_message(self,message:str, description:str, **kwargs):
|
||||
if "epoch" in kwargs:
|
||||
self.times.append(time.time())
|
||||
average_epoch_time:int = np.average(np.diff(np.array(self.times)))
|
||||
description = f"ETA: {round(average_epoch_time)}s"
|
||||
self.embed.add_field(name=f"<t:{round(time.time())}:t> - {message}",value=description,inline=False)
|
||||
await self.message.edit(embed=self.embed)
|
||||
|
||||
def on_train_end(self,logs=None):
|
||||
self.bot.loop.create_task(self.send_message("Training stopped", "training has been stopped."))
|
||||
|
||||
def on_epoch_begin(self, epoch, logs=None):
|
||||
self.bot.loop.create_task(self.send_message(f"Starting epoch {epoch}","This might take a while", epoch=True))
|
||||
|
||||
def on_epoch_end(self, epoch, logs=None):
|
||||
self.bot.loop.create_task(self.send_message(f"Epoch {epoch} ended",f"Accuracy: {round(logs.get('accuracy',0.0),4)}"))
|
||||
|
||||
|
||||
class Ai:
|
||||
def __init__(self):
|
||||
model_path = settings.get("model_path")
|
||||
if model_path:
|
||||
self.__load_model(model_path)
|
||||
self.is_loaded = model_path is not None
|
||||
self.batch_size = 64
|
||||
|
||||
def get_model_name_from_path(self,path:str):
|
||||
match:re.Match = re.search(MODEL_MATCH_STRING, path)
|
||||
return path[match.start():][:match.end()]
|
||||
|
||||
def generate_model_name(self) -> str:
|
||||
return strftime('%d_%m_%Y-%H_%M', localtime())
|
||||
|
||||
def generate_model_abs_path(self, name:str):
|
||||
name = name or self.generate_model_name()
|
||||
return os.path.join(".","models",self.generate_model_name(),"model.h5")
|
||||
|
||||
def generate_tokenizer_abs_path(self, name:str):
|
||||
name = name or self.generate_model_name()
|
||||
return os.path.join(".","models",name,"tokenizer.pkl")
|
||||
|
||||
def generate_info_abs_path(self,name:str):
|
||||
name = name or self.generate_model_name()
|
||||
return os.path.join(".","models",name,"info.json")
|
||||
|
||||
|
||||
def save_model(self,model, tokenizer, history, _name:str=None):
|
||||
name:str = _name or self.generate_model_name()
|
||||
os.makedirs(os.path.join(".","models",name), exist_ok=True)
|
||||
|
||||
with open(self.generate_info_abs_path(name),"w") as f:
|
||||
json.dump(history.history,f)
|
||||
|
||||
with open(self.generate_tokenizer_abs_path(name), "wb") as f:
|
||||
pickle.dump(tokenizer,f)
|
||||
|
||||
model.save(self.generate_model_abs_path(name))
|
||||
|
||||
|
||||
def __load_model(self, model_path:str):
|
||||
clear_session()
|
||||
self.model = load_model(os.path.join(model_path,"model.h5"))
|
||||
|
||||
model_name:str = self.get_model_name_from_path(model_path)
|
||||
|
||||
try:
|
||||
with open(self.generate_tokenizer_abs_path(model_name),"rb") as f:
|
||||
self.tokenizer = pickle.load(f)
|
||||
except FileNotFoundError:
|
||||
print("Failed to load tokenizer for model... Using default")
|
||||
self.tokenizer = Tokenizer()
|
||||
|
||||
with open("memory.json","r") as f:
|
||||
self.tokenizer.fit_on_sequences(json.load(f))
|
||||
self.is_loaded = True
|
||||
|
||||
def reload_model(self):
|
||||
clear_session()
|
||||
model_path:str = settings.get("model_path")
|
||||
if model_path:
|
||||
self.model = self.__load_model(model_path)
|
||||
self.is_loaded = True
|
||||
|
||||
async def run_async(self,func,bot,*args,**kwargs):
|
||||
func = functools.partial(func,*args,**kwargs)
|
||||
return await bot.loop.run_in_executor(None,func)
|
||||
|
||||
class Learning(Ai):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def __generate_labels_and_inputs(self,memory: List[str], tokenizer=None) -> tuple:
|
||||
if not tokenizer:
|
||||
tokenizer = Tokenizer()
|
||||
tokenizer.fit_on_texts(memory)
|
||||
sequences = tokenizer.texts_to_sequences(memory)
|
||||
|
||||
x = []
|
||||
y = []
|
||||
for seq in sequences:
|
||||
for i in range(1, len(seq)):
|
||||
x.append(seq[:i])
|
||||
y.append(seq[i])
|
||||
|
||||
return x,y, tokenizer
|
||||
|
||||
def create_model(self,memory: list, iters:int=2):
|
||||
memory = memory[:2000]
|
||||
X,y,tokenizer = self.__generate_labels_and_inputs(memory)
|
||||
maxlen:int = max([len(x) for x in X])
|
||||
x_pad = pad_sequences(X, maxlen=maxlen, padding="pre")
|
||||
|
||||
y = np.array(y)
|
||||
|
||||
model = Sequential()
|
||||
model.add(Embedding(input_dim=VOCAB_SIZE,output_dim=128,input_length=maxlen))
|
||||
model.add(LSTM(64))
|
||||
model.add(Dense(VOCAB_SIZE, activation="softmax"))
|
||||
|
||||
model.compile(optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"])
|
||||
history = model.fit(x_pad, y, epochs=iters, batch_size=64, callbacks=[tf_callback])
|
||||
self.save_model(model, tokenizer, history)
|
||||
return
|
||||
|
||||
|
||||
def add_training(self,memory: List[str], iters:int=2):
|
||||
tokenizer_path = os.path.join(settings.get("model_path"),"tokenizer.pkl")
|
||||
with open(tokenizer_path, "rb") as f:
|
||||
tokenizer = pickle.load(f)
|
||||
|
||||
X,y,_ = self.__generate_labels_and_inputs(memory, tokenizer)
|
||||
|
||||
maxlen:int = max([len(x) for x in X])
|
||||
x_pad = pad_sequences(X, maxlen=maxlen, padding="pre")
|
||||
y = np.array(y)
|
||||
|
||||
history = self.model.fit(x_pad,y, epochs=iters, validation_data=(x_pad,y), batch_size=64, callbacks=[tf_callback]) # Ideally, validation data would be seperate from the actual data
|
||||
self.save_model(self.model,tokenizer,history,self.get_model_name_from_path(settings.get("model_path")))
|
||||
return
|
||||
|
||||
class Generation(Ai):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def generate_sentence(self, word_amount:int, seed:str):
|
||||
if not self.is_loaded:
|
||||
return False
|
||||
for _ in range(word_amount):
|
||||
token_list = self.tokenizer.texts_to_sequences([seed])[0]
|
||||
token_list = pad_sequences([token_list], maxlen=self.model.layers[0].input_shape[1], padding="pre")
|
||||
|
||||
output_word = "" # Sometimes model fails to predict the word, so using a fallback
|
||||
|
||||
predicted_probs = self.model.predict(token_list, verbose=0)
|
||||
predicted_word_index = np.argmax(predicted_probs, axis=-1)[0]
|
||||
|
||||
for word, index in self.tokenizer.word_index.items():
|
||||
if index == predicted_word_index:
|
||||
output_word = word
|
||||
break
|
||||
|
||||
seed += " " + output_word
|
||||
return seed
|
||||
|
||||
|
||||
VOCAB_SIZE = 100_000
|
||||
SETTINGS_TYPE = TypedDict("SETTINGS_TYPE", {
|
||||
"model_path":str, # path to the base folder of the model, aka .../models/05-01-2025-22_31/
|
||||
"tokenizer_path":str,
|
||||
})
|
||||
|
||||
tf_callback:TFCallback
|
||||
model_dropdown_items = []
|
||||
settings: SETTINGS_TYPE = {}
|
||||
|
||||
target_message:int
|
||||
learning:Learning
|
||||
generation: Generation
|
||||
|
||||
class Settings:
|
||||
def __init__(self):
|
||||
self.settings_path:str = os.path.join(".","models","settings.json")
|
||||
|
||||
def load(self):
|
||||
global settings
|
||||
try:
|
||||
with open(self.settings_path,"r") as f:
|
||||
settings = json.load(f)
|
||||
except FileNotFoundError:
|
||||
with open(self.settings_path,"w") as f:
|
||||
json.dump({},f)
|
||||
|
||||
def change_model(self,new_model_base_path:str):
|
||||
global settings
|
||||
new_model_path = os.path.join(".","models",new_model_base_path)
|
||||
|
||||
with open(self.settings_path,"r") as f:
|
||||
settings = json.load(f)
|
||||
|
||||
settings["model_path"] = new_model_path
|
||||
|
||||
with open(self.settings_path, "w") as f:
|
||||
json.dump(settings,f)
|
||||
|
||||
|
||||
class Dropdown(discord.ui.Select):
|
||||
def __init__(self, items:List[str]):
|
||||
global model_dropdown_items
|
||||
model_dropdown_items = []
|
||||
|
||||
for item in items:
|
||||
model_dropdown_items.append(
|
||||
discord.SelectOption(label=item)
|
||||
)
|
||||
|
||||
super().__init__(placeholder="Select model", options=model_dropdown_items)
|
||||
|
||||
async def callback(self, interaction: discord.Interaction):
|
||||
if int(interaction.user.id) != int(os.getenv("ownerid")):
|
||||
await interaction.message.channel.send("KILL YOURSELF")
|
||||
Settings().change_model(self.values[0])
|
||||
await interaction.message.channel.send(f"Changed model to {self.values[0]}")
|
||||
|
||||
class DropdownView(discord.ui.View):
|
||||
def __init__(self, timeout, models):
|
||||
super().__init__(timeout=timeout)
|
||||
self.add_item(Dropdown(models))
|
||||
|
||||
|
||||
class Tf(commands.Cog):
|
||||
def __init__(self,bot):
|
||||
global learning, generation, ready
|
||||
os.makedirs(os.path.join(".","models"),exist_ok=True)
|
||||
Settings().load()
|
||||
self.bot = bot
|
||||
learning = Learning()
|
||||
generation = Generation()
|
||||
|
||||
@commands.command()
|
||||
async def start(self,ctx):
|
||||
await ctx.defer()
|
||||
await ctx.send("hi")
|
||||
|
||||
@commands.command()
|
||||
async def generate(self,ctx,seed:str,word_amount:int=5):
|
||||
await ctx.defer()
|
||||
await ctx.send(generation.generate_sentence(word_amount,seed))
|
||||
|
||||
@commands.command()
|
||||
async def create(self,ctx:commands.Context, epochs:int=3):
|
||||
global tf_callback
|
||||
await ctx.defer()
|
||||
with open("memory.json","r") as f:
|
||||
memory:List[str] = json.load(f)
|
||||
await ctx.send("Initializing tensorflow")
|
||||
embed = discord.Embed(title="Creating a model...", description="Progress of creating a model")
|
||||
embed.set_footer(text="Note: Progress tracking might report delayed / wrong data, since the function is run asynchronously")
|
||||
target_message:discord.Message = await ctx.send(embed=embed)
|
||||
|
||||
tf_callback = TFCallback(self.bot,embed,target_message)
|
||||
await learning.run_async(learning.create_model,self.bot,memory,epochs)
|
||||
embed = target_message.embeds[0]
|
||||
embed.add_field(name=f"<t:{round(time.time())}:t> Finished",value="Model saved.")
|
||||
await target_message.edit(embed=embed)
|
||||
|
||||
|
||||
@commands.command()
|
||||
async def train(self,ctx, epochs:int=2):
|
||||
global tf_callback
|
||||
|
||||
await ctx.defer()
|
||||
with open("memory.json","r") as f:
|
||||
memory:List[str] = json.load(f)
|
||||
|
||||
embed = discord.Embed(title="Training model...", description="Progress of training model")
|
||||
target_message = await ctx.send(embed=embed)
|
||||
tf_callback = TFCallback(self.bot,embed,target_message)
|
||||
|
||||
await learning.run_async(learning.add_training,self.bot,memory,epochs)
|
||||
await ctx.send("Finished!")
|
||||
|
||||
@commands.command()
|
||||
async def change(self,ctx,model:str=None):
|
||||
embed = discord.Embed(title="Change model",description="Which model would you like to use?")
|
||||
if model is None:
|
||||
models:List[str] = os.listdir(os.path.join(".","models"))
|
||||
models = [folder for folder in models if re.match(MODEL_MATCH_STRING,folder)]
|
||||
if len(models) == 0:
|
||||
models = ["No models available."]
|
||||
await ctx.send(embed=embed,view=DropdownView(90,models))
|
||||
learning.reload_model()
|
||||
generation.reload_model()
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(Tf(bot))
|
113
cogs/webscraper.py
Normal file
113
cogs/webscraper.py
Normal file
|
@ -0,0 +1,113 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
import aiohttp
|
||||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import asyncio
|
||||
from urllib.parse import urljoin
|
||||
from config import ownerid
|
||||
class WebScraper(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
self.visited_urls = set()
|
||||
|
||||
async def fetch(self, session, url):
|
||||
"""Fetch the HTML content of a URL."""
|
||||
try:
|
||||
async with session.get(url, timeout=10) as response:
|
||||
return await response.text()
|
||||
except Exception as e:
|
||||
print(f"Failed to fetch {url}: {e}")
|
||||
return None
|
||||
|
||||
def extract_sentences(self, text):
|
||||
"""Extract sentences from text."""
|
||||
sentences = text.split('.')
|
||||
return [sentence.strip() for sentence in sentences if sentence.strip()]
|
||||
|
||||
def save_to_json(self, sentences):
|
||||
"""Save sentences to memory.json."""
|
||||
try:
|
||||
try:
|
||||
with open("memory.json", "r") as file:
|
||||
data = json.load(file)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
data = []
|
||||
data.extend(sentences)
|
||||
with open("memory.json", "w") as file:
|
||||
json.dump(data, file, indent=4)
|
||||
except Exception as e:
|
||||
print(f"Failed to save to JSON: {e}")
|
||||
|
||||
def undo_last_scrape(self):
|
||||
"""Undo the last scrape by removing the most recent sentences."""
|
||||
try:
|
||||
with open("memory.json", "r") as file:
|
||||
data = json.load(file)
|
||||
|
||||
if not data:
|
||||
print("No data to undo.")
|
||||
return False
|
||||
|
||||
|
||||
data = data[:-1]
|
||||
|
||||
with open("memory.json", "w") as file:
|
||||
json.dump(data, file, indent=4)
|
||||
|
||||
return True
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
print("No data to undo or failed to load JSON.")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"Failed to undo last scrape: {e}")
|
||||
return False
|
||||
|
||||
async def scrape_links(self, session, url, depth=2):
|
||||
print(f"Scraping: {url}")
|
||||
self.visited_urls.add(url)
|
||||
|
||||
html = await self.fetch(session, url)
|
||||
if not html:
|
||||
return
|
||||
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
|
||||
for paragraph in soup.find_all('p'):
|
||||
sentences = self.extract_sentences(paragraph.get_text())
|
||||
self.save_to_json(sentences)
|
||||
|
||||
|
||||
@commands.command()
|
||||
async def start_scrape(self, ctx, start_url: str):
|
||||
"""Command to start the scraping process."""
|
||||
if ctx.author.id != ownerid:
|
||||
await ctx.send("You do not have permission to use this command.")
|
||||
return
|
||||
|
||||
if not start_url.startswith("http"):
|
||||
await ctx.send("Please provide a valid URL.")
|
||||
return
|
||||
|
||||
await ctx.send(f"Starting scrape from {start_url}... This may take a while!")
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
await self.scrape_links(session, start_url)
|
||||
|
||||
await ctx.send("Scraping complete! Sentences saved to memory.json.")
|
||||
|
||||
@commands.command()
|
||||
async def undo_scrape(self, ctx):
|
||||
"""Command to undo the last scrape."""
|
||||
if ctx.author.id != ownerid:
|
||||
await ctx.send("You do not have permission to use this command.")
|
||||
return
|
||||
|
||||
success = self.undo_last_scrape()
|
||||
if success:
|
||||
await ctx.send("Last scrape undone successfully.")
|
||||
else:
|
||||
await ctx.send("No data to undo or an error occurred.")
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(WebScraper(bot))
|
24
cogs/whoami.py
Normal file
24
cogs/whoami.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
import discord
|
||||
from discord.ext import commands
|
||||
|
||||
class whoami(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
@commands.command()
|
||||
async def whoami(self, ctx):
|
||||
user_id = ctx.author.id
|
||||
username = ctx.author.name
|
||||
|
||||
embed = discord.Embed(
|
||||
title="User Information",
|
||||
description=f"Your User ID is: {user_id}\n"
|
||||
f"Your username is: {username}\n"
|
||||
f"Your nickname in this server is: <@{user_id}>",
|
||||
color=discord.Color.blue()
|
||||
)
|
||||
|
||||
await ctx.send(embed=embed)
|
||||
|
||||
async def setup(bot):
|
||||
await bot.add_cog(whoami(bot))
|
Loading…
Add table
Add a link
Reference in a new issue