Introduce full asyncio version (still some module to migrate). Defender JSONRPC Server ready and using with uvcorn

This commit is contained in:
adator
2025-11-20 00:29:32 +01:00
parent 1b30b1ff4e
commit aa15aea749
34 changed files with 2533 additions and 2627 deletions

View File

@@ -173,7 +173,7 @@ class Admin:
return admin.language
def db_auth_admin_via_fingerprint(self, fp: str, uidornickname: str) -> bool:
async def db_auth_admin_via_fingerprint(self, fp: str, uidornickname: str) -> bool:
"""Check the fingerprint
Args:
@@ -188,7 +188,7 @@ class Admin:
query = f"SELECT user, level, language FROM {self.Config.TABLE_ADMIN} WHERE fingerprint = :fp"
data = {'fp': fp}
exe = self.Base.db_execute_query(query, data)
exe = await self.Base.db_execute_query(query, data)
result = exe.fetchone()
if result:
account = result[0]
@@ -204,7 +204,7 @@ class Admin:
return False
def db_is_admin_exist(self, admin_nickname: str) -> bool:
async def db_is_admin_exist(self, admin_nickname: str) -> bool:
"""Verify if the admin exist in the database!
Args:
@@ -216,7 +216,7 @@ class Admin:
mes_donnees = {'admin': admin_nickname}
query_search_user = f"SELECT id FROM {self.Config.TABLE_ADMIN} WHERE user = :admin"
r = self.Base.db_execute_query(query_search_user, mes_donnees)
r = await self.Base.db_execute_query(query_search_user, mes_donnees)
exist_user = r.fetchone()
if exist_user:
return True

View File

@@ -17,9 +17,7 @@ class Channel:
Args:
loader (Loader): The Loader Instance
"""
self.Logs = loader.Logs
self.Base = loader.Base
self.Utils = loader.Utils
self._ctx = loader
def insert(self, new_channel: 'MChannel') -> bool:
"""This method will insert a new channel and if the channel exist it will update the user list (uids)
@@ -34,14 +32,14 @@ class Channel:
exist = False
if not self.is_valid_channel(new_channel.name):
self.Logs.error(f"The channel {new_channel.name} is not valid, channel must start with #")
self._ctx.Logs.error(f"The channel {new_channel.name} is not valid, channel must start with #")
return False
for record in self.UID_CHANNEL_DB:
if record.name.lower() == new_channel.name.lower():
# If the channel exist, update the user list and do not go further
exist = True
# self.Logs.debug(f'{record.name} already exist')
# self._ctx.Logs.debug(f'{record.name} already exist')
for user in new_channel.uids:
record.uids.append(user)
@@ -49,7 +47,7 @@ class Channel:
# Supprimer les doublons
del_duplicates = list(set(record.uids))
record.uids = del_duplicates
# self.Logs.debug(f'Updating a new UID to the channel {record}')
# self._ctx.Logs.debug(f'Updating a new UID to the channel {record}')
return result
if not exist:
@@ -57,10 +55,10 @@ class Channel:
new_channel.name = new_channel.name.lower()
self.UID_CHANNEL_DB.append(new_channel)
result = True
# self.Logs.debug(f'New Channel Created: ({new_channel})')
# self._ctx.Logs.debug(f'New Channel Created: ({new_channel})')
if not result:
self.Logs.critical(f'The Channel Object was not inserted {new_channel}')
self._ctx.Logs.critical(f'The Channel Object was not inserted {new_channel}')
self.clean_channel()
@@ -103,7 +101,7 @@ class Channel:
return result
for userid in chan_obj.uids:
if self.Utils.clean_uid(userid) == self.Utils.clean_uid(uid):
if self._ctx.Utils.clean_uid(userid) == self._ctx.Utils.clean_uid(uid):
chan_obj.uids.remove(userid)
result = True
@@ -111,7 +109,7 @@ class Channel:
return result
except ValueError as ve:
self.Logs.error(f'{ve}')
self._ctx.Logs.error(f'{ve}')
return False
def delete_user_from_all_channel(self, uid:str) -> bool:
@@ -128,7 +126,7 @@ class Channel:
for record in self.UID_CHANNEL_DB:
for user_id in record.uids:
if self.Utils.clean_uid(user_id) == self.Utils.clean_uid(uid):
if self._ctx.Utils.clean_uid(user_id) == self._ctx.Utils.clean_uid(uid):
record.uids.remove(user_id)
result = True
@@ -136,7 +134,7 @@ class Channel:
return result
except ValueError as ve:
self.Logs.error(f'{ve}')
self._ctx.Logs.error(f'{ve}')
return False
def add_user_to_a_channel(self, channel_name: str, uid: str) -> bool:
@@ -154,7 +152,7 @@ class Channel:
if chan_obj is None:
# Create a new channel if the channel don't exist
self.Logs.debug(f"New channel will be created ({channel_name} - {uid})")
self._ctx.Logs.debug(f"New channel will be created ({channel_name} - {uid})")
return self.insert(MChannel(channel_name, uids=[uid]))
chan_obj.uids.append(uid)
@@ -163,7 +161,7 @@ class Channel:
return True
except Exception as err:
self.Logs.error(f'{err}')
self._ctx.Logs.error(f'{err}')
return False
def is_user_present_in_channel(self, channel_name: str, uid: str) -> bool:
@@ -180,9 +178,9 @@ class Channel:
if chan is None:
return False
clean_uid = self.Utils.clean_uid(uid=uid)
clean_uid = self._ctx.Utils.clean_uid(uid=uid)
for chan_uid in chan.uids:
if self.Utils.clean_uid(chan_uid) == clean_uid:
if self._ctx.Utils.clean_uid(chan_uid) == clean_uid:
return True
return False
@@ -197,7 +195,7 @@ class Channel:
return None
except Exception as err:
self.Logs.error(f'{err}')
self._ctx.Logs.error(f'{err}')
def get_channel(self, channel_name: str) -> Optional['MChannel']:
"""Get the channel object
@@ -237,13 +235,13 @@ class Channel:
else:
return True
except TypeError as te:
self.Logs.error(f'TypeError: [{channel_to_check}] - {te}')
self._ctx.Logs.error(f'TypeError: [{channel_to_check}] - {te}')
return False
except Exception as err:
self.Logs.error(f'Error Not defined: {err}')
self._ctx.Logs.error(f'Error Not defined: {err}')
return False
def db_query_channel(self, action: Literal['add','del'], module_name: str, channel_name: str) -> bool:
async def db_query_channel(self, action: Literal['add','del'], module_name: str, channel_name: str) -> bool:
"""You can add a channel or delete a channel.
Args:
@@ -256,39 +254,49 @@ class Channel:
"""
try:
channel_name = channel_name.lower() if self.is_valid_channel(channel_name) else None
core_table = self.Base.Config.TABLE_CHANNEL
core_table = self._ctx.Base.Config.TABLE_CHANNEL
if not channel_name:
self.Logs.warning(f'The channel [{channel_name}] is not correct')
self._ctx.Logs.warning(f'The channel [{channel_name}] is not correct')
return False
match action:
case 'add':
mes_donnees = {'module_name': module_name, 'channel_name': channel_name}
response = self.Base.db_execute_query(f"SELECT id FROM {core_table} WHERE module_name = :module_name AND channel_name = :channel_name", mes_donnees)
response = await self._ctx.Base.db_execute_query(f"SELECT id FROM {core_table} WHERE module_name = :module_name AND channel_name = :channel_name", mes_donnees)
is_channel_exist = response.fetchone()
if is_channel_exist is None:
mes_donnees = {'datetime': self.Utils.get_sdatetime(), 'channel_name': channel_name, 'module_name': module_name}
insert = self.Base.db_execute_query(f"INSERT INTO {core_table} (datetime, channel_name, module_name) VALUES (:datetime, :channel_name, :module_name)", mes_donnees)
mes_donnees = {'datetime': self._ctx.Utils.get_sdatetime(), 'channel_name': channel_name, 'module_name': module_name}
insert = await self._ctx.Base.db_execute_query(f"INSERT INTO {core_table} (datetime, channel_name, module_name) VALUES (:datetime, :channel_name, :module_name)", mes_donnees)
if insert.rowcount:
self.Logs.debug(f'Channel added to DB: channel={channel_name} / module_name={module_name}')
self._ctx.Logs.debug(f'Channel added to DB: channel={channel_name} / module_name={module_name}')
return True
else:
return False
case 'del':
mes_donnes = {'channel_name': channel_name, 'module_name': module_name}
response = self.Base.db_execute_query(f"DELETE FROM {core_table} WHERE channel_name = :channel_name AND module_name = :module_name", mes_donnes)
response = await self._ctx.Base.db_execute_query(f"DELETE FROM {core_table} WHERE channel_name = :channel_name AND module_name = :module_name", mes_donnes)
if response.rowcount > 0:
self.Logs.debug(f'Channel deleted from DB: channel={channel_name} / module: {module_name}')
self._ctx.Logs.debug(f'Channel deleted from DB: channel={channel_name} / module: {module_name}')
return True
else:
return False
except Exception as err:
self.Logs.error(err)
self._ctx.Logs.error(err)
return False
async def db_join_saved_channels(self) -> None:
"""## Joining saved channels"""
exec_query = await self._ctx.Base.db_execute_query(f'SELECT distinct channel_name FROM {self._ctx.Config.TABLE_CHANNEL}')
result_query = exec_query.fetchall()
if result_query:
for chan_name in result_query:
chan = chan_name[0]
await self._ctx.Irc.Protocol.send_sjoin(channel=chan)

View File

@@ -201,7 +201,7 @@ class Client:
return True
def db_is_account_exist(self, account: str) -> bool:
async def db_is_account_exist(self, account: str) -> bool:
"""Check if the account exist in the database
Args:
@@ -213,7 +213,7 @@ class Client:
table_client = self.Base.Config.TABLE_CLIENT
account_to_check = {'account': account.lower()}
account_to_check_query = self.Base.db_execute_query(f"""
account_to_check_query = await self.Base.db_execute_query(f"""
SELECT id FROM {table_client} WHERE LOWER(account) = :account
""", account_to_check)

View File

@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING
import socket
if TYPE_CHECKING:
from core.irc import Irc
from core.loader import Loader
# Modules impacted by rehashing!
REHASH_MODULES = [
@@ -14,7 +14,10 @@ REHASH_MODULES = [
'core.classes.modules.config',
'core.base',
'core.classes.modules.commands',
'core.classes.modules.rpc',
'core.classes.modules.rpc.rpc_channel',
'core.classes.modules.rpc.rpc_command',
'core.classes.modules.rpc.rpc_user',
'core.classes.modules.rpc.rpc',
'core.classes.interfaces.iprotocol',
'core.classes.interfaces.imodule',
'core.classes.protocols.command_handler',
@@ -24,66 +27,62 @@ REHASH_MODULES = [
]
def restart_service(uplink: 'Irc', reason: str = "Restarting with no reason!") -> None:
async def restart_service(uplink: 'Loader', reason: str = "Restarting with no reason!") -> None:
"""
Args:
uplink (Irc): The Irc instance
reason (str): The reason of the restart.
"""
# reload modules.
# unload modules.
for module in uplink.ModuleUtils.model_get_loaded_modules().copy():
uplink.ModuleUtils.unload_one_module(uplink, module.module_name)
await uplink.ModuleUtils.unload_one_module(module.module_name)
uplink.Base.garbage_collector_thread()
uplink.Logs.debug(f'[{uplink.Config.SERVICE_NICKNAME} RESTART]: Reloading configuration!')
uplink.Protocol.send_squit(server_id=uplink.Config.SERVEUR_ID, server_link=uplink.Config.SERVEUR_LINK, reason=reason)
await uplink.Irc.Protocol.send_squit(server_id=uplink.Config.SERVEUR_ID, server_link=uplink.Config.SERVEUR_LINK, reason=reason)
uplink.Logs.debug('Restarting Defender ...')
uplink.IrcSocket.shutdown(socket.SHUT_RDWR)
uplink.IrcSocket.close()
while uplink.IrcSocket.fileno() != -1:
time.sleep(0.5)
uplink.Logs.warning('-- Waiting for socket to close ...')
# Reload configuration
uplink.Loader.Config = uplink.Loader.ConfModule.Configuration(uplink.Loader).configuration_model
uplink.Loader.Base = uplink.Loader.BaseModule.Base(uplink.Loader)
for mod in REHASH_MODULES:
importlib.reload(sys.modules[mod])
uplink.Protocol = uplink.Loader.PFactory.get()
uplink.Protocol.register_command()
# Reload configuration
uplink.Config = uplink.ConfModule.Configuration(uplink).configuration_model
uplink.Base = uplink.BaseModule.Base(uplink)
uplink.ModuleUtils.model_clear() # Clear loaded modules.
uplink.User.UID_DB.clear() # Clear User Object
uplink.Channel.UID_CHANNEL_DB.clear() # Clear Channel Object
uplink.Client.CLIENT_DB.clear() # Clear Client object
uplink.Irc.Protocol.Handler.DB_IRCDCOMMS.clear()
uplink.init_service_user()
uplink.Utils.create_socket(uplink)
uplink.Protocol.send_link()
# Reload Service modules
for module in uplink.ModuleUtils.model_get_loaded_modules().copy():
await uplink.ModuleUtils.reload_one_module(module.module_name, uplink.Settings.current_admin)
uplink.Irc.signal = True
await uplink.Irc.run()
uplink.Config.DEFENDER_RESTART = 0
def rehash_service(uplink: 'Irc', nickname: str) -> None:
async def rehash_service(uplink: 'Loader', nickname: str) -> None:
need_a_restart = ["SERVEUR_ID"]
uplink.Settings.set_cache('db_commands', uplink.Commands.DB_COMMANDS)
uplink.Loader.RpcServer.stop_server()
await uplink.RpcServer.stop_server()
restart_flag = False
config_model_bakcup = uplink.Config
mods = REHASH_MODULES
for mod in mods:
importlib.reload(sys.modules[mod])
uplink.Protocol.send_priv_msg(
await uplink.Irc.Protocol.send_priv_msg(
nick_from=uplink.Config.SERVICE_NICKNAME,
msg=f'[REHASH] Module [{mod}] reloaded',
channel=uplink.Config.SERVICE_CHANLOG
)
uplink.Utils = sys.modules['core.utils']
uplink.Config = uplink.Loader.Config = uplink.Loader.ConfModule.Configuration(uplink.Loader).configuration_model
uplink.Config = uplink.ConfModule.Configuration(uplink).configuration_model
uplink.Config.HSID = config_model_bakcup.HSID
uplink.Config.DEFENDER_INIT = config_model_bakcup.DEFENDER_INIT
uplink.Config.DEFENDER_RESTART = config_model_bakcup.DEFENDER_RESTART
@@ -96,7 +95,7 @@ def rehash_service(uplink: 'Irc', nickname: str) -> None:
for key, value in conf_bkp_dict.items():
if config_dict[key] != value and key != 'COLORS':
uplink.Protocol.send_priv_msg(
await uplink.Irc.Protocol.send_priv_msg(
nick_from=uplink.Config.SERVICE_NICKNAME,
msg=f'[{key}]: {value} ==> {config_dict[key]}',
channel=uplink.Config.SERVICE_CHANLOG
@@ -105,27 +104,28 @@ def rehash_service(uplink: 'Irc', nickname: str) -> None:
restart_flag = True
if config_model_bakcup.SERVICE_NICKNAME != uplink.Config.SERVICE_NICKNAME:
uplink.Protocol.send_set_nick(uplink.Config.SERVICE_NICKNAME)
await uplink.Irc.Protocol.send_set_nick(uplink.Config.SERVICE_NICKNAME)
if restart_flag:
uplink.Config.SERVEUR_ID = config_model_bakcup.SERVEUR_ID
uplink.Protocol.send_priv_msg(
await uplink.Irc.Protocol.send_priv_msg(
nick_from=uplink.Config.SERVICE_NICKNAME,
channel=uplink.Config.SERVICE_CHANLOG,
msg='You need to restart defender !')
# Reload Main Commands Module
uplink.Commands = uplink.Loader.CommandModule.Command(uplink.Loader)
uplink.Loader.RpcServer = uplink.Loader.RpcServerModule.JSONRPCServer(uplink.Loader)
uplink.Loader.RpcServer.start_server()
uplink.Commands = uplink.CommandModule.Command(uplink)
uplink.Commands.DB_COMMANDS = uplink.Settings.get_cache('db_commands')
uplink.Loader.Base = uplink.Loader.BaseModule.Base(uplink.Loader)
uplink.Protocol = uplink.Loader.PFactory.get()
uplink.Protocol.register_command()
uplink.Base = uplink.BaseModule.Base(uplink)
uplink.Irc.Protocol = uplink.PFactory.get()
uplink.Irc.Protocol.register_command()
uplink.RpcServer = uplink.RpcServerModule.JSonRpcServer(uplink)
uplink.Base.create_asynctask(uplink.RpcServer.start_server())
# Reload Service modules
for module in uplink.ModuleUtils.model_get_loaded_modules().copy():
uplink.ModuleUtils.reload_one_module(uplink, module.module_name, nickname)
await uplink.ModuleUtils.reload_one_module(module.module_name, nickname)
return None

View File

@@ -0,0 +1 @@
__version__ = '1.0.0'

View File

@@ -1,8 +1,11 @@
import base64
import json
import logging
from starlette.applications import Starlette
from starlette.responses import JSONResponse
from starlette.requests import Request
from starlette.routing import Route
import uvicorn
from enum import Enum
from http.server import BaseHTTPRequestHandler, HTTPServer
from typing import TYPE_CHECKING, Any, Optional
from core.classes.modules.rpc.rpc_user import RPCUser
from core.classes.modules.rpc.rpc_channel import RPCChannel
@@ -11,120 +14,101 @@ from core.classes.modules.rpc.rpc_command import RPCCommand
if TYPE_CHECKING:
from core.loader import Loader
ProxyLoader: Optional['Loader'] = None
class JSonRpcServer:
class RPCRequestHandler(BaseHTTPRequestHandler):
def __init__(self, context: 'Loader', *, hostname: str = 'localhost', port: int = 5000):
self._ctx = context
self.live: bool = False
self.host = hostname
self.port = port
self.routes: list[Route] = []
self.server: Optional[uvicorn.Server] = None
def log_message(self, format, *args):
pass
def do_POST(self):
logs = ProxyLoader.Logs
self.server_version = 'Defender6'
self.sys_version = ProxyLoader.Config.CURRENT_VERSION
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
request_data: dict = json.loads(body)
rip, rport = self.client_address
if not self.authenticate(request_data):
return None
response_data = {
'jsonrpc': '2.0',
'id': request_data.get('id', 123)
self.methods: dict = {
'user.list': RPCUser(context).user_list,
'user.get': RPCUser(context).user_get,
'channel.list': RPCChannel(context).channel_list,
'command.list': RPCCommand(context).command_list,
'command.get.by.name': RPCCommand(context).command_get_by_name,
'command.get.by.module': RPCCommand(context).command_get_by_module
}
method = request_data.get("method")
async def start_server(self):
if not self.live:
self.routes = [Route('/api', self.request_handler, methods=['POST'])]
self.app_jsonrpc = Starlette(debug=False, routes=self.routes)
config = uvicorn.Config(self.app_jsonrpc, host=self.host, port=self.port, log_level=self._ctx.Config.DEBUG_LEVEL)
self.server = uvicorn.Server(config)
self.live = True
await self._ctx.Irc.Protocol.send_priv_msg(
self._ctx.Config.SERVICE_NICKNAME,
"[DEFENDER JSONRPC SERVER] RPC Server started!",
self._ctx.Config.SERVICE_CHANLOG
)
await self.server.serve()
self._ctx.Logs.debug("Server is going to shutdown!")
else:
self._ctx.Logs.debug("Server already running")
async def stop_server(self):
if self.server:
self.server.should_exit = True
await self.server.shutdown()
self.live = False
self._ctx.Logs.debug("JSON-RPC Server off!")
await self._ctx.Irc.Protocol.send_priv_msg(
self._ctx.Config.SERVICE_NICKNAME,
"[DEFENDER JSONRPC SERVER] RPC Server Stopped!",
self._ctx.Config.SERVICE_CHANLOG
)
async def request_handler(self, request: Request) -> JSONResponse:
request_data: dict = await request.json()
method = request_data.get("method", None)
params: dict[str, Any] = request_data.get("params", {})
auth: JSONResponse = self.authenticate(request.headers, request_data)
if not json.loads(auth.body).get('result', False):
return auth
response_data = {
"jsonrpc": "2.0",
"id": request_data.get('id', 123)
}
response_data['method'] = method
rip = request.client.host
rport = request.client.port
http_code = 200
match method:
case 'user.list':
user = RPCUser(ProxyLoader)
response_data['result'] = user.user_list()
logs.debug(f'[RPC] {method} recieved from {rip}:{rport}')
del user
case 'user.get':
user = RPCUser(ProxyLoader)
uid_or_nickname = params.get('uid_or_nickname', None)
response_data['result'] = user.user_get(uid_or_nickname)
logs.debug(f'[RPC] {method} recieved from {rip}:{rport}')
del user
if method in self.methods:
response_data['result'] = self.methods[method](**params)
return JSONResponse(response_data, http_code)
case 'channel.list':
channel = RPCChannel(ProxyLoader)
response_data['result'] = channel.channel_list()
logs.debug(f'[RPC] {method} recieved from {rip}:{rport}')
del channel
response_data['error'] = create_error_response(JSONRPCErrorCode.METHOD_NOT_FOUND)
self._ctx.Logs.debug(f'[RPC ERROR] {method} recieved from {rip}:{rport}')
http_code = 404
return JSONResponse(response_data, http_code)
case 'command.list':
command = RPCCommand(ProxyLoader)
response_data['result'] = command.command_list()
logs.debug(f'[RPC] {method} recieved from {rip}:{rport}')
del command
def authenticate(self, headers: dict, body: dict) -> JSONResponse:
ok_auth = {
'jsonrpc': '2.0',
'id': body.get('id', 123),
'result': True
}
case 'command.get.by.module':
command = RPCCommand(ProxyLoader)
module_name = params.get('name', None)
response_data['result'] = command.command_get_by_module(module_name)
logs.debug(f'[RPC] {method} recieved from {rip}:{rport}')
del command
case 'command.get.by.name':
command = RPCCommand(ProxyLoader)
command_name = params.get('name', None)
response_data['result'] = command.command_get_by_name(command_name)
logs.debug(f'[RPC] {method} recieved from {rip}:{rport}')
del command
case _:
response_data['error'] = create_error_response(JSONRPCErrorCode.METHOD_NOT_FOUND)
logs.debug(f'[RPC ERROR] {method} recieved from {rip}:{rport}')
http_code = 404
self.send_response(http_code)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(response_data).encode('utf-8'))
return None
def do_GET(self):
self.server_version = 'Defender6'
self.sys_version = ProxyLoader.Config.CURRENT_VERSION
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
request_data: dict = json.loads(body)
if not self.authenticate(request_data):
return None
response_data = {'jsonrpc': '2.0', 'id': request_data.get('id', 321),
'error': create_error_response(JSONRPCErrorCode.INVALID_REQUEST)}
self.send_response(404)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(response_data).encode('utf-8'))
return None
def authenticate(self, request_data: dict) -> bool:
logs = ProxyLoader.Logs
auth = self.headers.get('Authorization', None)
if auth is None:
self.send_auth_error(request_data)
return False
logs = self._ctx.Logs
auth: str = headers.get('Authorization', '')
if not auth:
return self.send_auth_error(body)
# Authorization header format: Basic base64(username:password)
auth_type, auth_string = auth.split(' ', 1)
if auth_type.lower() != 'basic':
self.send_auth_error(request_data)
return False
return self.send_auth_error(body)
try:
# Decode the base64-encoded username:password
@@ -132,70 +116,25 @@ class RPCRequestHandler(BaseHTTPRequestHandler):
username, password = decoded_credentials.split(":", 1)
# Check the username and password.
for rpcuser in ProxyLoader.Irc.Config.RPC_USERS:
for rpcuser in self._ctx.Config.RPC_USERS:
if rpcuser.get('USERNAME', None) == username and rpcuser.get('PASSWORD', None) == password:
return True
return JSONResponse(ok_auth)
self.send_auth_error(request_data)
return False
return self.send_auth_error(body)
except Exception as e:
self.send_auth_error(request_data)
logs.error(e)
return False
return self.send_auth_error(body)
def send_auth_error(self, request_data: dict) -> None:
def send_auth_error(self, request_data: dict) -> JSONResponse:
response_data = {
'jsonrpc': '2.0',
'id': request_data.get('id', 123),
'error': create_error_response(JSONRPCErrorCode.AUTHENTICATION_ERROR)
}
return JSONResponse(response_data)
self.send_response(401)
self.send_header('WWW-Authenticate', 'Basic realm="Authorization Required"')
self.end_headers()
self.wfile.write(json.dumps(response_data).encode('utf-8'))
class JSONRPCServer:
def __init__(self, loader: 'Loader'):
global ProxyLoader
ProxyLoader = loader
self._Loader = loader
self._Base = loader.Base
self._Logs = loader.Logs
self.rpc_server: Optional[HTTPServer] = None
self.connected: bool = False
def start_server(self, server_class=HTTPServer, handler_class=RPCRequestHandler, *, hostname: str = 'localhost', port: int = 5000):
logging.getLogger('http.server').setLevel(logging.CRITICAL)
server_address = (hostname, port)
self.rpc_server = server_class(server_address, handler_class)
self._Logs.debug(f"Server ready on http://{hostname}:{port}...")
self._Base.create_thread(self.thread_start_rpc_server, (), True)
def thread_start_rpc_server(self) -> None:
self._Loader.Irc.Protocol.send_priv_msg(
self._Loader.Config.SERVICE_NICKNAME, "Defender RPC Server has started successfuly!", self._Loader.Config.SERVICE_CHANLOG
)
self.connected = True
self.rpc_server.serve_forever()
ProxyLoader.Logs.debug(f"RPC Server down!")
def stop_server(self):
self._Base.create_thread(self.thread_stop_rpc_server)
def thread_stop_rpc_server(self):
self.rpc_server.shutdown()
ProxyLoader.Logs.debug(f"RPC Server shutdown!")
self.rpc_server.server_close()
ProxyLoader.Logs.debug(f"RPC Server clean-up!")
self._Base.garbage_collector_thread()
self._Loader.Irc.Protocol.send_priv_msg(
self._Loader.Config.SERVICE_NICKNAME, "Defender RPC Server has stopped successfuly!", self._Loader.Config.SERVICE_CHANLOG
)
self.connected = False
class JSONRPCErrorCode(Enum):
PARSE_ERROR = -32700 # Syntax error in the request (malformed JSON)

View File

@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Optional
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from core.loader import Loader
@@ -8,5 +8,5 @@ class RPCChannel:
self._Loader = loader
self._Channel = loader.Channel
def channel_list(self) -> list[dict]:
def channel_list(self, **kwargs) -> list[dict]:
return [chan.to_dict() for chan in self._Channel.UID_CHANNEL_DB]

View File

@@ -8,14 +8,22 @@ class RPCCommand:
self._Loader = loader
self._Command = loader.Commands
def command_list(self) -> list[dict]:
def command_list(self, **kwargs) -> list[dict]:
return [command.to_dict() for command in self._Command.DB_COMMANDS]
def command_get_by_module(self, module_name: str) -> list[dict]:
def command_get_by_module(self, **kwargs) -> list[dict]:
module_name = kwargs.get('module_name', None)
if module_name is None:
return []
return [command.to_dict() for command in self._Command.DB_COMMANDS if command.module_name.lower() == module_name.lower()]
def command_get_by_name(self, command_name: str) -> dict:
def command_get_by_name(self, **kwargs) -> dict:
command_name: str = kwargs.get('command_name', '')
if not command_name:
return dict()
for command in self._Command.DB_COMMANDS:
if command.command_name.lower() == command_name.lower():
return command.to_dict()
return {}
return dict()

View File

@@ -6,11 +6,10 @@ if TYPE_CHECKING:
class RPCUser:
def __init__(self, loader: 'Loader'):
self._Loader = loader
self._User = loader.User
self._ctx = loader
def user_list(self) -> list[dict]:
users = self._User.UID_DB.copy()
def user_list(self, **kwargs) -> list[dict]:
users = self._ctx.User.UID_DB.copy()
copy_users: list['MUser'] = []
for user in users:
@@ -20,8 +19,9 @@ class RPCUser:
return [user.to_dict() for user in copy_users]
def user_get(self, uidornickname: str) -> Optional[dict]:
user = self._User.get_user(uidornickname)
def user_get(self, **kwargs) -> Optional[dict]:
uidornickname = kwargs.get('uid_or_nickname', None)
user = self._ctx.User.get_user(uidornickname)
if user:
user_copy = user.copy()
user_copy.connexion_datetime = user_copy.connexion_datetime.strftime('%d-%m-%Y')