Merge pull request #99 from adator85/asyncio

Fix rehash command.
This commit is contained in:
adator
2025-11-24 01:54:40 +01:00
committed by GitHub
14 changed files with 256 additions and 117 deletions

View File

@@ -378,7 +378,7 @@ class Base:
if run_once: if run_once:
for task in asyncio.all_tasks(): for task in asyncio.all_tasks():
if task.get_name().lower() == async_name.lower(): if task.get_name().lower() == name.lower():
return None return None
task = asyncio.create_task(func, name=name) task = asyncio.create_task(func, name=name)

View File

@@ -244,7 +244,7 @@ class IProtocol(ABC):
""" """
@abstractmethod @abstractmethod
async def send_uid(self, nickname:str, username: str, hostname: str, uid:str, umodes: str, vhost: str, remote_ip: str, realname: str, print_log: bool = True) -> None: async def send_uid(self, nickname:str, username: str, hostname: str, uid:str, umodes: str, vhost: str, remote_ip: str, realname: str, geoip: str, print_log: bool = True) -> None:
"""Send UID to the server """Send UID to the server
- Insert User to User Object - Insert User to User Object
Args: Args:
@@ -256,6 +256,7 @@ class IProtocol(ABC):
vhost (str): vhost of the client you want to create vhost (str): vhost of the client you want to create
remote_ip (str): remote_ip of the client you want to create remote_ip (str): remote_ip of the client you want to create
realname (str): realname of the client you want to create realname (str): realname of the client you want to create
geoip (str): The country code of the client you want to create
print_log (bool, optional): print logs if true. Defaults to True. print_log (bool, optional): print logs if true. Defaults to True.
""" """

View File

@@ -1,9 +1,11 @@
import asyncio import asyncio
import importlib import importlib
import sys import sys
import time import threading
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import socket import core.module as module_mod
from core.classes.modules import user, admin, client, channel, reputation, sasl
from core.utils import tr
if TYPE_CHECKING: if TYPE_CHECKING:
from core.loader import Loader from core.loader import Loader
@@ -12,9 +14,16 @@ if TYPE_CHECKING:
REHASH_MODULES = [ REHASH_MODULES = [
'core.definition', 'core.definition',
'core.utils', 'core.utils',
'core.classes.modules.config',
'core.base', 'core.base',
'core.module',
'core.classes.modules.config',
'core.classes.modules.commands', 'core.classes.modules.commands',
'core.classes.modules.user',
'core.classes.modules.admin',
'core.classes.modules.client',
'core.classes.modules.channel',
'core.classes.modules.reputation',
'core.classes.modules.sasl',
'core.classes.modules.rpc.rpc_channel', 'core.classes.modules.rpc.rpc_channel',
'core.classes.modules.rpc.rpc_command', 'core.classes.modules.rpc.rpc_command',
'core.classes.modules.rpc.rpc_user', 'core.classes.modules.rpc.rpc_user',
@@ -61,26 +70,30 @@ async def restart_service(uplink: 'Loader', reason: str = "Restarting with no re
for module in uplink.ModuleUtils.model_get_loaded_modules().copy(): for module in uplink.ModuleUtils.model_get_loaded_modules().copy():
await uplink.ModuleUtils.reload_one_module(module.module_name, uplink.Settings.current_admin) await uplink.ModuleUtils.reload_one_module(module.module_name, uplink.Settings.current_admin)
uplink.Irc.signal = True
await uplink.Irc.run() await uplink.Irc.run()
uplink.Config.DEFENDER_RESTART = 0 uplink.Config.DEFENDER_RESTART = 0
async def rehash_service(uplink: 'Loader', nickname: str) -> None: async def rehash_service(uplink: 'Loader', nickname: str) -> None:
need_a_restart = ["SERVEUR_ID"] need_a_restart = ["SERVEUR_ID"]
uplink.Settings.set_cache('db_commands', uplink.Commands.DB_COMMANDS) uplink.Settings.set_cache('commands', uplink.Commands.DB_COMMANDS)
uplink.Settings.set_cache('users', uplink.User.UID_DB)
uplink.Settings.set_cache('clients', uplink.Client.CLIENT_DB)
uplink.Settings.set_cache('admins', uplink.Admin.UID_ADMIN_DB)
uplink.Settings.set_cache('reputations', uplink.Reputation.UID_REPUTATION_DB)
uplink.Settings.set_cache('channels', uplink.Channel.UID_CHANNEL_DB)
uplink.Settings.set_cache('sasl', uplink.Sasl.DB_SASL)
uplink.Settings.set_cache('modules', uplink.ModuleUtils.DB_MODULES)
uplink.Settings.set_cache('module_headers', uplink.ModuleUtils.DB_MODULE_HEADERS)
await uplink.RpcServer.stop_rpc_server() await uplink.RpcServer.stop_rpc_server()
restart_flag = False restart_flag = False
config_model_bakcup = uplink.Config config_model_bakcup = uplink.Config
mods = REHASH_MODULES mods = REHASH_MODULES
_count_reloaded_modules = len(mods)
for mod in mods: for mod in mods:
importlib.reload(sys.modules[mod]) importlib.reload(sys.modules[mod])
await uplink.Irc.Protocol.send_priv_msg(
nick_from=uplink.Config.SERVICE_NICKNAME,
msg=f'[REHASH] Module [{mod}] reloaded',
channel=uplink.Config.SERVICE_CHANLOG
)
uplink.Utils = sys.modules['core.utils'] uplink.Utils = sys.modules['core.utils']
uplink.Config = uplink.ConfModule.Configuration(uplink).configuration_model uplink.Config = uplink.ConfModule.Configuration(uplink).configuration_model
uplink.Config.HSID = config_model_bakcup.HSID uplink.Config.HSID = config_model_bakcup.HSID
@@ -115,9 +128,27 @@ async def rehash_service(uplink: 'Loader', nickname: str) -> None:
# Reload Main Commands Module # Reload Main Commands Module
uplink.Commands = uplink.CommandModule.Command(uplink) uplink.Commands = uplink.CommandModule.Command(uplink)
uplink.Commands.DB_COMMANDS = uplink.Settings.get_cache('db_commands') uplink.Commands.DB_COMMANDS = uplink.Settings.get_cache('commands')
uplink.Base = uplink.BaseModule.Base(uplink) uplink.Base = uplink.BaseModule.Base(uplink)
uplink.User = user.User(uplink)
uplink.Client = client.Client(uplink)
uplink.Admin = admin.Admin(uplink)
uplink.Channel = channel.Channel(uplink)
uplink.Reputation = reputation.Reputation(uplink)
uplink.ModuleUtils = module_mod.Module(uplink)
uplink.Sasl = sasl.Sasl(uplink)
# Backup data
uplink.User.UID_DB = uplink.Settings.get_cache('users')
uplink.Client.CLIENT_DB = uplink.Settings.get_cache('clients')
uplink.Admin.UID_ADMIN_DB = uplink.Settings.get_cache('admins')
uplink.Channel.UID_CHANNEL_DB = uplink.Settings.get_cache('channels')
uplink.Reputation.UID_REPUTATION_DB = uplink.Settings.get_cache('reputations')
uplink.Sasl.DB_SASL = uplink.Settings.get_cache('sasl')
uplink.ModuleUtils.DB_MODULE_HEADERS = uplink.Settings.get_cache('module_headers')
uplink.ModuleUtils.DB_MODULES = uplink.Settings.get_cache('modules')
uplink.Irc.Protocol = uplink.PFactory.get() uplink.Irc.Protocol = uplink.PFactory.get()
uplink.Irc.Protocol.register_command() uplink.Irc.Protocol.register_command()
@@ -128,6 +159,15 @@ async def rehash_service(uplink: 'Loader', nickname: str) -> None:
for module in uplink.ModuleUtils.model_get_loaded_modules().copy(): for module in uplink.ModuleUtils.model_get_loaded_modules().copy():
await uplink.ModuleUtils.reload_one_module(module.module_name, nickname) await uplink.ModuleUtils.reload_one_module(module.module_name, nickname)
color_green = uplink.Config.COLORS.green
color_reset = uplink.Config.COLORS.nogc
await uplink.Irc.Protocol.send_priv_msg(
uplink.Config.SERVICE_NICKNAME,
tr("[ %sREHASH INFO%s ] Rehash completed! %s modules reloaded.", color_green, color_reset, _count_reloaded_modules),
uplink.Config.SERVICE_CHANLOG
)
return None return None
async def shutdown(uplink: 'Loader') -> None: async def shutdown(uplink: 'Loader') -> None:
@@ -141,6 +181,14 @@ async def shutdown(uplink: 'Loader') -> None:
for module in uplink.ModuleUtils.model_get_loaded_modules().copy(): for module in uplink.ModuleUtils.model_get_loaded_modules().copy():
await uplink.ModuleUtils.unload_one_module(module.module_name) await uplink.ModuleUtils.unload_one_module(module.module_name)
uplink.Logs.debug(f"=======> Closing all Sockets!")
for soc in uplink.Base.running_sockets:
soc.close()
while soc.fileno() != -1:
soc.close()
uplink.Base.running_sockets.remove(soc)
uplink.Logs.debug(f"> Socket ==> closed {str(soc.fileno())}")
# Nettoyage des timers # Nettoyage des timers
uplink.Logs.debug(f"=======> Closing all timers!") uplink.Logs.debug(f"=======> Closing all timers!")
for timer in uplink.Base.running_timers: for timer in uplink.Base.running_timers:
@@ -158,25 +206,37 @@ async def shutdown(uplink: 'Loader') -> None:
uplink.Logs.debug(f"> Cancelling {thread.name} {thread.native_id}") uplink.Logs.debug(f"> Cancelling {thread.name} {thread.native_id}")
uplink.Logs.debug(f"=======> Closing all IO Threads!") uplink.Logs.debug(f"=======> Closing all IO Threads!")
[th.thread_event.clear() for th in uplink.Base.running_iothreads] [th.thread_event.clear() for th in uplink.Base.running_iothreads if isinstance(th.thread_event, threading.Event)]
uplink.Logs.debug(f"=======> Closing all IO TASKS!") uplink.Logs.debug(f"=======> Closing all IO TASKS!")
try: t = None
await asyncio.wait_for(asyncio.gather(*uplink.Base.running_iotasks), timeout=5) for task in uplink.Base.running_iotasks:
except asyncio.exceptions.TimeoutError as te: if 'force_shutdown' == task.get_name():
uplink.Logs.debug(f"Asyncio Timeout reached! {te}") t = task
for task in uplink.Base.running_iotasks: if t:
task.cancel() uplink.Base.running_iotasks.remove(t)
except asyncio.exceptions.CancelledError as cerr:
uplink.Logs.debug(f"Asyncio CancelledError reached! {cerr}")
uplink.Logs.debug(f"=======> Closing all Sockets!") task_already_canceled: list = []
for soc in uplink.Base.running_sockets: for task in uplink.Base.running_iotasks:
soc.close() try:
while soc.fileno() != -1: if not task.cancel():
soc.close() print(task.get_name())
uplink.Base.running_sockets.remove(soc) task_already_canceled.append(task)
uplink.Logs.debug(f"> Socket ==> closed {str(soc.fileno())}") except asyncio.exceptions.CancelledError as cerr:
uplink.Logs.debug(f"Asyncio CancelledError reached! {task}")
for task in task_already_canceled:
uplink.Base.running_iotasks.remove(task)
for task in uplink.Base.running_iotasks:
try:
await asyncio.wait_for(asyncio.gather(task), timeout=5)
except asyncio.exceptions.TimeoutError as te:
uplink.Logs.debug(f"Asyncio Timeout reached! {te} {task}")
for task in uplink.Base.running_iotasks:
task.cancel()
except asyncio.exceptions.CancelledError as cerr:
uplink.Logs.debug(f"Asyncio CancelledError reached! {cerr} {task}")
uplink.Base.running_timers.clear() uplink.Base.running_timers.clear()
uplink.Base.running_threads.clear() uplink.Base.running_threads.clear()
@@ -186,4 +246,10 @@ async def shutdown(uplink: 'Loader') -> None:
uplink.Base.db_close() uplink.Base.db_close()
return None return None
async def force_shutdown(uplink: 'Loader') -> None:
await asyncio.sleep(10)
uplink.Logs.critical("The system has been killed because something is blocking the loop")
uplink.Logs.critical(asyncio.all_tasks())
sys.exit('The system has been killed')

View File

@@ -510,7 +510,7 @@ class Unrealircd6(IProtocol):
return None return None
async def send_uid(self, nickname:str, username: str, hostname: str, uid:str, umodes: str, async def send_uid(self, nickname:str, username: str, hostname: str, uid:str, umodes: str,
vhost: str, remote_ip: str, realname: str, print_log: bool = True) -> None: vhost: str, remote_ip: str, realname: str, geoip: str, print_log: bool = True) -> None:
"""Send UID to the server """Send UID to the server
- Insert User to User Object - Insert User to User Object
Args: Args:
@@ -535,7 +535,7 @@ class Unrealircd6(IProtocol):
self._ctx.Definition.MUser( self._ctx.Definition.MUser(
uid=uid, nickname=nickname, username=username, uid=uid, nickname=nickname, username=username,
realname=realname,hostname=hostname, umodes=umodes, realname=realname,hostname=hostname, umodes=umodes,
vhost=vhost, remote_ip=remote_ip vhost=vhost, remote_ip=remote_ip, geoip=geoip
) )
) )
@@ -1472,7 +1472,7 @@ class Unrealircd6(IProtocol):
sasl_obj.fingerprint = str(scopy[6]) sasl_obj.fingerprint = str(scopy[6])
await self.send2socket(f":{self._ctx.Config.SERVEUR_LINK} SASL {self._ctx.Settings.MAIN_SERVER_HOSTNAME} {sasl_obj.client_uid} C +") await self.send2socket(f":{self._ctx.Config.SERVEUR_LINK} SASL {self._ctx.Settings.MAIN_SERVER_HOSTNAME} {sasl_obj.client_uid} C +")
self.on_sasl_authentication_process(sasl_obj) await self.on_sasl_authentication_process(sasl_obj)
return sasl_obj return sasl_obj
case 'C': case 'C':

View File

@@ -75,9 +75,6 @@ class Irc:
self.ctx.Commands.build_command(1, 'core', 'deauth', 'Deauth from the irc service') self.ctx.Commands.build_command(1, 'core', 'deauth', 'Deauth from the irc service')
self.ctx.Commands.build_command(1, 'core', 'checkversion', 'Check the version of the irc service') self.ctx.Commands.build_command(1, 'core', 'checkversion', 'Check the version of the irc service')
self.ctx.Commands.build_command(2, 'core', 'show_modules', 'Display a list of loaded modules') self.ctx.Commands.build_command(2, 'core', 'show_modules', 'Display a list of loaded modules')
self.ctx.Commands.build_command(2, 'core', 'show_timers', 'Display active timers')
self.ctx.Commands.build_command(2, 'core', 'show_threads', 'Display active threads in the system')
self.ctx.Commands.build_command(2, 'core', 'show_asyncio', 'Display active asyncio')
self.ctx.Commands.build_command(2, 'core', 'show_channels', 'Display a list of active channels') self.ctx.Commands.build_command(2, 'core', 'show_channels', 'Display a list of active channels')
self.ctx.Commands.build_command(2, 'core', 'show_users', 'Display a list of connected users') self.ctx.Commands.build_command(2, 'core', 'show_users', 'Display a list of connected users')
self.ctx.Commands.build_command(2, 'core', 'show_clients', 'Display a list of connected clients') self.ctx.Commands.build_command(2, 'core', 'show_clients', 'Display a list of connected clients')
@@ -85,15 +82,18 @@ class Irc:
self.ctx.Commands.build_command(2, 'core', 'show_configuration', 'Display the current configuration settings') self.ctx.Commands.build_command(2, 'core', 'show_configuration', 'Display the current configuration settings')
self.ctx.Commands.build_command(2, 'core', 'show_cache', 'Display the current cache') self.ctx.Commands.build_command(2, 'core', 'show_cache', 'Display the current cache')
self.ctx.Commands.build_command(2, 'core', 'clear_cache', 'Clear the cache!') self.ctx.Commands.build_command(2, 'core', 'clear_cache', 'Clear the cache!')
self.ctx.Commands.build_command(3, 'core', 'quit', 'Disconnect the bot or user from the server.')
self.ctx.Commands.build_command(3, 'core', 'restart', 'Restart the bot or service.')
self.ctx.Commands.build_command(3, 'core', 'addaccess', 'Add a user or entity to an access list with specific permissions.') self.ctx.Commands.build_command(3, 'core', 'addaccess', 'Add a user or entity to an access list with specific permissions.')
self.ctx.Commands.build_command(3, 'core', 'editaccess', 'Modify permissions for an existing user or entity in the access list.') self.ctx.Commands.build_command(3, 'core', 'editaccess', 'Modify permissions for an existing user or entity in the access list.')
self.ctx.Commands.build_command(3, 'core', 'delaccess', 'Remove a user or entity from the access list.') self.ctx.Commands.build_command(3, 'core', 'delaccess', 'Remove a user or entity from the access list.')
self.ctx.Commands.build_command(3, 'core', 'cert', 'Append your new fingerprint to your account!') self.ctx.Commands.build_command(3, 'core', 'cert', 'Append your new fingerprint to your account!')
self.ctx.Commands.build_command(4, 'core', 'quit', 'Disconnect the bot or user from the server.')
self.ctx.Commands.build_command(4, 'core', 'rehash', 'Reload the configuration file without restarting') self.ctx.Commands.build_command(4, 'core', 'rehash', 'Reload the configuration file without restarting')
self.ctx.Commands.build_command(4, 'core', 'restart', 'Restart the bot or service.')
self.ctx.Commands.build_command(4, 'core', 'raw', 'Send a raw command directly to the IRC server') self.ctx.Commands.build_command(4, 'core', 'raw', 'Send a raw command directly to the IRC server')
self.ctx.Commands.build_command(4, 'core', 'print_vars', 'Print users in a file.') self.ctx.Commands.build_command(4, 'core', 'print_vars', 'Print users in a file.')
self.ctx.Commands.build_command(4, 'core', 'show_timers', 'Display active timers')
self.ctx.Commands.build_command(4, 'core', 'show_threads', 'Display active threads in the system')
self.ctx.Commands.build_command(4, 'core', 'show_asyncio', 'Display active asyncio')
self.ctx.Commands.build_command(4, 'core', 'start_rpc', 'Start defender jsonrpc server') self.ctx.Commands.build_command(4, 'core', 'start_rpc', 'Start defender jsonrpc server')
self.ctx.Commands.build_command(4, 'core', 'stop_rpc', 'Stop defender jsonrpc server') self.ctx.Commands.build_command(4, 'core', 'stop_rpc', 'Stop defender jsonrpc server')
@@ -103,6 +103,7 @@ class Irc:
async def run(self): async def run(self):
try: try:
self.signal = True
await self.connect() await self.connect()
await self.listen() await self.listen()
except asyncio.exceptions.IncompleteReadError as ie: except asyncio.exceptions.IncompleteReadError as ie:
@@ -206,64 +207,52 @@ class Irc:
return None return None
async def on_sasl_authentication_process(self, sasl_model: 'MSasl') -> bool: # async def on_sasl_authentication_process(self, sasl_model: 'MSasl') -> bool:
s = sasl_model # s = sasl_model
if sasl_model: # if sasl_model:
async def db_get_admin_info(*, username: Optional[str] = None, password: Optional[str] = None, fingerprint: Optional[str] = None) -> Optional[dict[str, Any]]: # async def db_get_admin_info(*, username: Optional[str] = None, password: Optional[str] = None, fingerprint: Optional[str] = None) -> Optional[dict[str, Any]]:
if fingerprint: # if fingerprint:
mes_donnees = {'fingerprint': fingerprint} # mes_donnees = {'fingerprint': fingerprint}
query = f"SELECT user, level, language FROM {self.ctx.Config.TABLE_ADMIN} WHERE fingerprint = :fingerprint" # query = f"SELECT user, level, language FROM {self.ctx.Config.TABLE_ADMIN} WHERE fingerprint = :fingerprint"
else: # else:
mes_donnees = {'user': username, 'password': self.ctx.Utils.hash_password(password)} # mes_donnees = {'user': username, 'password': self.ctx.Utils.hash_password(password)}
query = f"SELECT user, level, language FROM {self.ctx.Config.TABLE_ADMIN} WHERE user = :user AND password = :password" # query = f"SELECT user, level, language FROM {self.ctx.Config.TABLE_ADMIN} WHERE user = :user AND password = :password"
result = await self.ctx.Base.db_execute_query(query, mes_donnees) # result = await self.ctx.Base.db_execute_query(query, mes_donnees)
user_from_db = result.fetchone() # user_from_db = result.fetchone()
if user_from_db: # if user_from_db:
return {'user': user_from_db[0], 'level': user_from_db[1], 'language': user_from_db[2]} # return {'user': user_from_db[0], 'level': user_from_db[1], 'language': user_from_db[2]}
else: # else:
return None # return None
if s.message_type == 'C' and s.mechanisme == 'PLAIN': # if s.message_type == 'C' and s.mechanisme == 'PLAIN':
# Connection via PLAIN # # Connection via PLAIN
admin_info = await db_get_admin_info(username=s.username, password=s.password) # admin_info = await db_get_admin_info(username=s.username, password=s.password)
if admin_info is not None: # if admin_info is not None:
s.auth_success = True # s.auth_success = True
s.level = admin_info.get('level', 0) # s.level = admin_info.get('level', 0)
s.language = admin_info.get('language', 'EN') # s.language = admin_info.get('language', 'EN')
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D S") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D S")
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 903 {s.username} :SASL authentication successful") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 903 {s.username} :SASL authentication successful")
else: # else:
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D F") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D F")
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 904 {s.username} :SASL authentication failed") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 904 {s.username} :SASL authentication failed")
elif s.message_type == 'S' and s.mechanisme == 'EXTERNAL': # elif s.message_type == 'S' and s.mechanisme == 'EXTERNAL':
# Connection using fingerprints # # Connection using fingerprints
admin_info = await db_get_admin_info(fingerprint=s.fingerprint) # admin_info = await db_get_admin_info(fingerprint=s.fingerprint)
if admin_info is not None: # if admin_info is not None:
s.auth_success = True # s.auth_success = True
s.level = admin_info.get('level', 0) # s.level = admin_info.get('level', 0)
s.username = admin_info.get('user', None) # s.username = admin_info.get('user', None)
s.language = admin_info.get('language', 'EN') # s.language = admin_info.get('language', 'EN')
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D S") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D S")
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 903 {s.username} :SASL authentication successful") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 903 {s.username} :SASL authentication successful")
else: # else:
# "904 <nick> :SASL authentication failed" # # "904 <nick> :SASL authentication failed"
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D F") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} SASL {self.ctx.Settings.MAIN_SERVER_HOSTNAME} {s.client_uid} D F")
await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 904 {s.username} :SASL authentication failed") # await self.Protocol.send2socket(f":{self.ctx.Config.SERVEUR_LINK} 904 {s.username} :SASL authentication failed")
def get_defender_uptime(self) -> str:
"""Savoir depuis quand Defender est connecté
Returns:
str: L'écart entre la date du jour et celle de la connexion de Defender
"""
current_datetime = datetime.now()
diff_date = current_datetime - self.defender_connexion_datetime
uptime = timedelta(days=diff_date.days, seconds=diff_date.seconds)
return uptime
def insert_db_admin(self, uid: str, account: str, level: int, language: str) -> None: def insert_db_admin(self, uid: str, account: str, level: int, language: str) -> None:
user_obj = self.ctx.User.get_user(uid) user_obj = self.ctx.User.get_user(uid)
@@ -946,6 +935,7 @@ class Irc:
try: try:
final_reason = ' '.join(cmd[1:]) final_reason = ' '.join(cmd[1:])
self.hb_active = False self.hb_active = False
self.ctx.Base.create_asynctask(rehash.force_shutdown(self.ctx), run_once=True)
await rehash.shutdown(self.ctx) await rehash.shutdown(self.ctx)
self.ctx.Base.execute_periodic_action() self.ctx.Base.execute_periodic_action()
@@ -1135,7 +1125,7 @@ class Irc:
return None return None
case 'uptime': case 'uptime':
uptime = self.get_defender_uptime() uptime = self.ctx.Utils.get_defender_uptime()
await self.Protocol.send_notice( await self.Protocol.send_notice(
nick_from=dnickname, nick_from=dnickname,
nick_to=fromuser, nick_to=fromuser,

View File

@@ -22,6 +22,25 @@ class Module:
def __init__(self, loader: 'Loader') -> None: def __init__(self, loader: 'Loader') -> None:
self._ctx = loader self._ctx = loader
def is_module_compliant(self, obj: object) -> bool:
class_name = obj.__name__
is_compliant = True
attributs = {'MOD_HEADER', 'mod_config'}
methods = {'load', 'unload', 'create_tables', 'cmd', 'hcmds', 'ModConfModel'}
obj_attributs: set = set([attribut for attribut in dir(obj) if not callable(getattr(obj, attribut)) and not attribut.startswith('__')])
obj_methods: set = set([method for method in dir(obj) if callable(getattr(obj, method)) and not method.startswith('__')])
if not attributs.issubset(obj_attributs):
self._ctx.Logs.error(f'[{class_name}] Your module is not valid make sure you have implemented required attributes {attributs}')
raise AttributeError(f'[{class_name}] Your module is not valid make sure you have implemented required attributes {attributs}')
if not methods.issubset(obj_methods):
self._ctx.Logs.error(f'[{class_name}] Your module is not valid make sure you have implemented required methods {methods}')
raise AttributeError(f'[{class_name}] Your module is not valid make sure you have implemented required methods {methods}')
return is_compliant
def get_all_available_modules(self) -> list[str]: def get_all_available_modules(self) -> list[str]:
"""Get list of all main modules """Get list of all main modules
using this pattern mod_*.py using this pattern mod_*.py
@@ -100,13 +119,14 @@ class Module:
channel=self._ctx.Config.SERVICE_CHANLOG channel=self._ctx.Config.SERVICE_CHANLOG
) )
return False return False
reload_mod = await self.reload_one_module(module_name, nickname)
return self.reload_one_module(module_name, nickname) return reload_mod
# Charger le module # Charger le module
try: try:
loaded_module = importlib.import_module(f'mods.{module_folder}.{module_name}') loaded_module = importlib.import_module(f'mods.{module_folder}.{module_name}')
my_class = getattr(loaded_module, class_name, None) # Récuperer le nom de classe my_class = getattr(loaded_module, class_name, None) # Récuperer le nom de classe
self.is_module_compliant(my_class)
create_instance_of_the_class: 'IModule' = my_class(self._ctx) # Créer une nouvelle instance de la classe create_instance_of_the_class: 'IModule' = my_class(self._ctx) # Créer une nouvelle instance de la classe
await create_instance_of_the_class.load() if self._ctx.Utils.is_coroutinefunction(create_instance_of_the_class.load) else create_instance_of_the_class.load() await create_instance_of_the_class.load() if self._ctx.Utils.is_coroutinefunction(create_instance_of_the_class.load) else create_instance_of_the_class.load()
self.create_module_header(create_instance_of_the_class.MOD_HEADER) self.create_module_header(create_instance_of_the_class.MOD_HEADER)
@@ -118,17 +138,9 @@ class Module:
msg=tr("[%sMODULE ERROR%s] Module %s is facing issues! %s", red, nogc, module_name, attr), msg=tr("[%sMODULE ERROR%s] Module %s is facing issues! %s", red, nogc, module_name, attr),
channel=self._ctx.Config.SERVICE_CHANLOG channel=self._ctx.Config.SERVICE_CHANLOG
) )
self._ctx.Logs.error(msg=attr, exc_info=True) self.drop_module_from_sys_modules(module_name)
return False
if not hasattr(create_instance_of_the_class, 'cmd'):
await self._ctx.Irc.Protocol.send_priv_msg(
nick_from=self._ctx.Config.SERVICE_NICKNAME,
msg=tr("cmd method is not available in the module (%s)", module_name),
channel=self._ctx.Config.SERVICE_CHANLOG
)
self._ctx.Logs.critical(f"The Module {module_name} has not been loaded because cmd method is not available")
await self.db_delete_module(module_name) await self.db_delete_module(module_name)
self._ctx.Logs.error(msg=attr, exc_info=True)
return False return False
# Charger la nouvelle class dans la variable globale # Charger la nouvelle class dans la variable globale
@@ -184,6 +196,7 @@ class Module:
the_module = sys.modules[f'mods.{module_folder}.{module_name}'] the_module = sys.modules[f'mods.{module_folder}.{module_name}']
importlib.reload(the_module) importlib.reload(the_module)
my_class = getattr(the_module, class_name, None) my_class = getattr(the_module, class_name, None)
self.is_module_compliant(my_class)
new_instance: 'IModule' = my_class(self._ctx) new_instance: 'IModule' = my_class(self._ctx)
await new_instance.load() if self._ctx.Utils.is_coroutinefunction(new_instance.load) else new_instance.load() await new_instance.load() if self._ctx.Utils.is_coroutinefunction(new_instance.load) else new_instance.load()
self.create_module_header(new_instance.MOD_HEADER) self.create_module_header(new_instance.MOD_HEADER)
@@ -215,7 +228,9 @@ class Module:
msg=f"[RELOAD MODULE ERROR]: {err}", msg=f"[RELOAD MODULE ERROR]: {err}",
channel=self._ctx.Config.SERVICE_CHANLOG channel=self._ctx.Config.SERVICE_CHANLOG
) )
self.drop_module_from_sys_modules(module_name)
await self.db_delete_module(module_name) await self.db_delete_module(module_name)
return False
def reload_all_modules(self) -> bool: def reload_all_modules(self) -> bool:
... ...
@@ -323,6 +338,24 @@ class Module:
self._ctx.Logs.debug(f"[SYS MODULE] (mods.{module_folder}.{module_name}) not found in sys.modules") self._ctx.Logs.debug(f"[SYS MODULE] (mods.{module_folder}.{module_name}) not found in sys.modules")
return False return False
def drop_module_from_sys_modules(self, module_name: str) -> bool:
"""_summary_
Args:
module_name (str): _description_
Returns:
bool: _description_
"""
module_folder, module_name, class_name = self.get_module_information(module_name)
full_module_name = "mods." + module_folder + "." + module_name
del sys.modules[full_module_name]
if not self.is_module_exist_in_sys_module(module_name):
return True
return False
''' '''
ALL METHODS RELATED TO THE MModule MODEL DATACLASS ALL METHODS RELATED TO THE MModule MODEL DATACLASS
''' '''
@@ -343,6 +376,22 @@ class Module:
self._ctx.Logs.debug(f"[MODEL MODULE GET] The module {module_name} not found in the model DB_MODULES") self._ctx.Logs.debug(f"[MODEL MODULE GET] The module {module_name} not found in the model DB_MODULES")
return None return None
def model_drop_module(self, module_name: str) -> bool:
"""Drop a module model object from DB_MODULES
Args:
module_name (str): The module name you want to drop
Returns:
bool: True if the model has been dropped
"""
module = self.model_get_module(module_name)
if module:
self.DB_MODULES.remove(module)
return True
return False
def model_get_loaded_modules(self) -> list[MModule]: def model_get_loaded_modules(self) -> list[MModule]:
"""Get the instance of DB_MODULES. """Get the instance of DB_MODULES.
Warning: You should use a copy if you want to loop through the list! Warning: You should use a copy if you want to loop through the list!

View File

@@ -5,9 +5,8 @@ import gc
import ssl import ssl
from pathlib import Path from pathlib import Path
from re import match, sub from re import match, sub
import threading
from typing import Literal, Optional, Any, TYPE_CHECKING from typing import Literal, Optional, Any, TYPE_CHECKING
from datetime import datetime from datetime import datetime, timedelta
from time import time, sleep from time import time, sleep
from random import choice from random import choice
from hashlib import md5, sha3_512 from hashlib import md5, sha3_512
@@ -115,6 +114,18 @@ def get_ssl_context() -> ssl.SSLContext:
ctx.verify_mode = ssl.CERT_NONE ctx.verify_mode = ssl.CERT_NONE
return ctx return ctx
def get_defender_uptime(loader: 'Loader') -> str:
"""Savoir depuis quand Defender est connecté
Returns:
str: L'écart entre la date du jour et celle de la connexion de Defender
"""
current_datetime = datetime.now()
diff_date = current_datetime - loader.Irc.defender_connexion_datetime
uptime = timedelta(days=diff_date.days, seconds=diff_date.seconds)
return uptime
def run_python_garbage_collector() -> int: def run_python_garbage_collector() -> int:
"""Run Python garbage collector """Run Python garbage collector

View File

@@ -11,7 +11,7 @@ from core import install
############################################# #############################################
async def main(): async def main():
install.update_packages() # install.update_packages()
from core.loader import Loader from core.loader import Loader
loader = Loader() loader = Loader()
await loader.start() await loader.start()

View File

@@ -17,6 +17,7 @@ class MClone(MainModel):
hostname: str = 'localhost' hostname: str = 'localhost'
umodes: str = None umodes: str = None
remote_ip: str = '127.0.0.1' remote_ip: str = '127.0.0.1'
group: str = 'Default' group: str = 'Default',
geoip: str = 'XX'
# DB_CLONES: list[MClone] = [] # DB_CLONES: list[MClone] = []

View File

@@ -28,7 +28,7 @@ async def coro_connect_clones(uplink: 'Clone',
break break
if not clone.connected: if not clone.connected:
await uplink.ctx.Irc.Protocol.send_uid(clone.nickname, clone.username, clone.hostname, clone.uid, clone.umodes, clone.vhost, clone.remote_ip, clone.realname, print_log=False) await uplink.ctx.Irc.Protocol.send_uid(clone.nickname, clone.username, clone.hostname, clone.uid, clone.umodes, clone.vhost, clone.remote_ip, clone.realname, clone.geoip, print_log=False)
await uplink.ctx.Irc.Protocol.send_join_chan(uidornickname=clone.uid, channel=uplink.ctx.Config.CLONE_CHANNEL, password=uplink.ctx.Config.CLONE_CHANNEL_PASSWORD, print_log=False) await uplink.ctx.Irc.Protocol.send_join_chan(uidornickname=clone.uid, channel=uplink.ctx.Config.CLONE_CHANNEL, password=uplink.ctx.Config.CLONE_CHANNEL_PASSWORD, print_log=False)
await asyncio.sleep(interval) await asyncio.sleep(interval)

View File

@@ -103,6 +103,17 @@ def generate_ipv4_for_clone(faker_instance: 'Faker', auto: bool = True) -> str:
""" """
return faker_instance.ipv4_private() if auto else '127.0.0.1' return faker_instance.ipv4_private() if auto else '127.0.0.1'
def generate_country_code_for_clone(faker_instance: 'Faker') -> str:
"""Generate the alpha-2 country code for clone
Args:
faker_instance (Faker): The Faker Instance
Returns:
str: The Country Code
"""
return faker_instance.country_code('alpha-2')
def generate_hostname_for_clone(faker_instance: 'Faker') -> str: def generate_hostname_for_clone(faker_instance: 'Faker') -> str:
"""Generate hostname for clone """Generate hostname for clone
@@ -143,6 +154,8 @@ def create_new_clone(uplink: 'Clone', faker_instance: 'Faker', group: str = 'Def
hostname = generate_hostname_for_clone(faker) hostname = generate_hostname_for_clone(faker)
vhost = generate_vhost_for_clone(faker) vhost = generate_vhost_for_clone(faker)
geoip = generate_country_code_for_clone(faker)
checkNickname = uplink.Clone.nickname_exists(nickname) checkNickname = uplink.Clone.nickname_exists(nickname)
checkUid = uplink.Clone.uid_exists(uid=uid) checkUid = uplink.Clone.uid_exists(uid=uid)
@@ -167,7 +180,8 @@ def create_new_clone(uplink: 'Clone', faker_instance: 'Faker', group: str = 'Def
remote_ip=decoded_ip, remote_ip=decoded_ip,
vhost=vhost, vhost=vhost,
group=group, group=group,
channels=[] channels=[],
geoip=geoip
) )
uplink.Clone.insert(clone) uplink.Clone.insert(clone)

View File

@@ -227,6 +227,7 @@ async def coro_psutil_scan(uplink: 'Defender'):
for user in uplink.Schemas.DB_PSUTIL_USERS: for user in uplink.Schemas.DB_PSUTIL_USERS:
result = await uplink.ctx.Base.create_thread_io(uplink.mod_utils.action_scan_client_with_psutil, uplink, user) result = await uplink.ctx.Base.create_thread_io(uplink.mod_utils.action_scan_client_with_psutil, uplink, user)
list_to_remove.append(user) list_to_remove.append(user)
if not result: if not result:
continue continue

View File

@@ -534,8 +534,14 @@ def action_scan_client_with_abuseipdb(uplink: 'Defender', user_model: 'MUser') -
'Accept': 'application/json', 'Accept': 'application/json',
'Key': uplink.abuseipdb_key 'Key': uplink.abuseipdb_key
} }
try:
response = sess.request(method='GET', url=url, headers=headers, params=querystring, timeout=uplink.timeout) response = sess.request(method='GET', url=url, headers=headers, params=querystring, timeout=uplink.timeout)
except (requests.exceptions.ReadTimeout, requests.exceptions.ConnectTimeout) as err:
uplink.ctx.Logs.error(f"Time-out Error: {err}")
return None
except Exception as e:
uplink.ctx.Logs.error(f"Time-out Error: {e}")
return None
if response.status_code != 200: if response.status_code != 200:
uplink.ctx.Logs.warning(f'status code = {str(response.status_code)}') uplink.ctx.Logs.warning(f'status code = {str(response.status_code)}')

View File

@@ -126,8 +126,8 @@ class Test(IModule):
await self.ctx.Irc.Protocol.send_priv_msg(nick_from=dnickname, msg=f"This is private message to the sender ...", channel=c.name) await self.ctx.Irc.Protocol.send_priv_msg(nick_from=dnickname, msg=f"This is private message to the sender ...", channel=c.name)
# How to update your module configuration # How to update your module configuration
self.update_configuration('param_exemple2', 7) await self.update_configuration('param_exemple2', 7)
self.update_configuration('param_exemple1', 'my_value') await self.update_configuration('param_exemple1', 'my_value')
# Log if you want the result # Log if you want the result
self.ctx.Logs.debug(f"Test logs ready") self.ctx.Logs.debug(f"Test logs ready")