Full completed project
8
server/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
1
server/.idea/.name
generated
Normal file
@@ -0,0 +1 @@
|
||||
content.py
|
||||
6
server/.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
8
server/.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/server.iml" filepath="$PROJECT_DIR$/.idea/server.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
8
server/.idea/server.iml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
1
server/bugs.txt
Normal file
@@ -0,0 +1 @@
|
||||
time slot bugs when ticking past 12:00 pm | status: Fixed | Tested: FALSE
|
||||
BIN
server/builds/docker/beopen.tar
Normal file
6
server/builds/docker/docker-compose.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
version: "3"
|
||||
services:
|
||||
beopen:
|
||||
image: python-beopen
|
||||
ports:
|
||||
- '9999:9999'
|
||||
1
server/data/.cryptdatabase.db
Normal file
1
server/data/.salt.txt
Normal file
@@ -0,0 +1 @@
|
||||
<6<><36><EFBFBD>%<25>C<EFBFBD>q<EFBFBD><71><EFBFBD><EFBFBD><EFBFBD>
|
||||
40
server/data/config.ini
Normal file
@@ -0,0 +1,40 @@
|
||||
[authorisation]
|
||||
adminkey = secret
|
||||
registrationkey = secret
|
||||
usernamemaxlength = 20
|
||||
usernameminlength = 5
|
||||
passwordmaxlength = 30
|
||||
passwordminlength = 5
|
||||
tokenexpirytime = 2592000
|
||||
|
||||
[database]
|
||||
path = data/database.db
|
||||
encrypt = true
|
||||
shamirsecretsharing = true
|
||||
numberofshares = 6
|
||||
minimumshares = 3
|
||||
keypath = data/key.txt
|
||||
encryptedpath = data/.cryptdatabase.db
|
||||
encryptionconfigpath = data/encryptconfig.txt
|
||||
saltpath = data/.salt.txt
|
||||
sharespath = data/shares/
|
||||
|
||||
[user]
|
||||
defaultlevel = member
|
||||
defaultoccupationid = Null
|
||||
|
||||
[posts]
|
||||
posttimelimit = 5
|
||||
daystart = 9
|
||||
dayend = 17
|
||||
|
||||
[notifications]
|
||||
defaultexpiretime = 604800
|
||||
ntfyurl = https://ntfy.example.com
|
||||
|
||||
[networking]
|
||||
port = 9999
|
||||
|
||||
[miscellaneous]
|
||||
servercode = 12345
|
||||
|
||||
|
After Width: | Height: | Size: 303 KiB |
|
After Width: | Height: | Size: 322 KiB |
|
After Width: | Height: | Size: 283 KiB |
|
After Width: | Height: | Size: 292 KiB |
|
After Width: | Height: | Size: 263 KiB |
|
After Width: | Height: | Size: 274 KiB |
|
After Width: | Height: | Size: 260 KiB |
|
After Width: | Height: | Size: 286 KiB |
|
After Width: | Height: | Size: 382 KiB |
|
After Width: | Height: | Size: 324 KiB |
|
After Width: | Height: | Size: 356 KiB |
1
server/data/key.txt
Normal file
@@ -0,0 +1 @@
|
||||
gAAAAABluqjkgZxWuY8Ne1R2Mm1z3iyu_13kMDotTQade2biMZ_OP0l2jlRBubZes7WIMC19OOWB4Sn2G-lul5YJmFtuJGuv8IgM0ocyth1nQj7zN0sTLScCAryIoedEeFJ9SsqxoWNt
|
||||
1
server/data/log.txt
Normal file
5
server/data/shares/share-1.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Share number: 1
|
||||
Share secret: 13652
|
||||
Minimum share required: 3
|
||||
|
||||
IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server
|
||||
5
server/data/shares/share-2.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Share number: 2
|
||||
Share secret: 16091
|
||||
Minimum share required: 3
|
||||
|
||||
IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server
|
||||
5
server/data/shares/share-3.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Share number: 3
|
||||
Share secret: 19662
|
||||
Minimum share required: 3
|
||||
|
||||
IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server
|
||||
5
server/data/shares/share-4.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Share number: 4
|
||||
Share secret: 24365
|
||||
Minimum share required: 3
|
||||
|
||||
IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server
|
||||
5
server/data/shares/share-5.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Share number: 5
|
||||
Share secret: 30200
|
||||
Minimum share required: 3
|
||||
|
||||
IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server
|
||||
5
server/data/shares/share-6.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Share number: 6
|
||||
Share secret: 37167
|
||||
Minimum share required: 3
|
||||
|
||||
IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server
|
||||
6
server/dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM python:3.11-alpine
|
||||
WORKDIR /
|
||||
ADD main.py .
|
||||
ADD modules ./modules
|
||||
RUN pip install python-socketio eventlet pathlib configparser datetime pillow python-dotenv
|
||||
CMD python -u ./main.py
|
||||
44
server/docs/Guide to encrypting the database.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# Overview
|
||||
This document is designed to guide an administrator through setting up encryption on their BeOpen database. This can be a good idea for increased security and ease of response to a breach. If you have database encryption in event of a breach all you have to do is shutdown the server application, this encrypts the database immediately.
|
||||
|
||||
Remember you can (while logged in on an admin account) shutdown the server from your settings panel.
|
||||
|
||||
# Options
|
||||
Before enabling encryption and getting it setup you have to consider some options available to you. Standard encryption simply utilises a single master password which you can use to decrypt the database from any active client device while the server is in "decrypt" mode.
|
||||
|
||||
You also have the option of enabling Shamir secret sharing. This allows you to create a number of "shares", you can then hand out shares to trusted colleagues or friends, in the event you as the administrator ever loses the master password you can ask for a set number of these shares to be given back to you, inputting these shares into the "decrypt" screen of the client will decrypt the database and reconstruct your master password.
|
||||
|
||||
You can decide how many shares are required to reconstruct your master password and how many shares you want to create. Its completely up to you. The only limitations is that the number of shares created must be less than 20 and the number of shares needed for reconstruction must be less than 7. These parameters can be changed in the configuration file, under the database section.
|
||||
|
||||
# Guide
|
||||
1) Decide upon a master password, note your master password must be an integer. We recommend that this integer is made to be significantly large, short common integers may be easily guessed or easy to crack.
|
||||
2) Create a text file at the path "data/encryptconfig.txt" (This path is configurable in the database section of your configuration file) and type your master password into this file.
|
||||
3) Enable encryption in the configuration file by setting "EncryptDatabase" to "true".
|
||||
4) If you want Shamir secret sharing enable this in the same section of your configuration file by setting "ShamirSecretSharing" to "true". Additionally change the values of "MinimumShares" and "NumberOfShares" to your preferred values.
|
||||
5) Launch the server, if all goes according to plan the server will launch normally and you will be able to start any client and enter the decryption credentials.
|
||||
|
||||
## Distribution of shares
|
||||
If you used Shamir secret sharing your shares will now be sat as a collection of text files in (by default) data/shares. These text files will NOT be automatically deleted and so deleting these text files is left up to you as the administrator.
|
||||
|
||||
When you give someone a share make sure they remember their share number and share secret. If you know your share secret but cannot remember your share number it is not possible to use the share. The minimum shares required for reconstruction of the master password is considered public so this fact is also included on all shares. However this number is also stored in the configuration file of the server.
|
||||
|
||||
## Fail
|
||||
|
||||
### Encryption
|
||||
If the encryption fails in anyway the server will log the problem and shutdown. Have a good read of the server logs, the most common issue may be that the shares generated could not reconstruct the original key. If this is the case simply try again or use a shorter master password.
|
||||
|
||||
### Server shutdown (ungraceful)
|
||||
If the server suddenly lost power or was unable to perform a graceful shutdown for any reason the unencrypted database will be left on the system. This happens to avoid the risk of data loss. In this event:
|
||||
1) Backup the encrypted database and the unencrypted database
|
||||
2) Set encryption to false in the configuration file
|
||||
3) Decrypt the database from any client
|
||||
4) Shutdown the server again (gracefully)
|
||||
5) Delete the database in the server directory and replace it with the previous version you backed up
|
||||
6) You can then re-enable encryption and go through the process of setting that up again. If you use the same master password you do NOT have to re distribute the Shamir secret shares. However a set of new shares may be generated simply delete these files.
|
||||
|
||||
## Dos and don'ts
|
||||
If you use Shamir secret sharing do NOT change the "MinimumShares" configuration even after the encryption has successfully happened and the shares have been generated. If for some reason this option does change contact share holders to see if they or anyone else knows the correct value, without this value the master password cannot be re-constructed.
|
||||
|
||||
Do not manually change or alter any files unless instructed to do so by this guide. Changing configuration options while the server is running can lead to loss of data.
|
||||
|
||||
Do not share your master password with anyone else, if you wish to have a "backup" please use the Shamir secret sharing feature built into the server.
|
||||
477
server/main.py
Normal file
@@ -0,0 +1,477 @@
|
||||
import socketio
|
||||
import eventlet
|
||||
|
||||
# SESSION
|
||||
class server_session():
|
||||
def __init__(self):
|
||||
self.clients = []
|
||||
self.logged_in = []
|
||||
self.accepting_clients = True
|
||||
|
||||
self.mode = "normal"
|
||||
self.flags = []
|
||||
|
||||
self.encrypt_on_shutdown = True
|
||||
self.db_encrypted = True
|
||||
self.password = None
|
||||
|
||||
# this is a class object shared accross the server
|
||||
# it allows access to some basic infomation about the server's current status
|
||||
session = server_session()
|
||||
# SESSION
|
||||
|
||||
# STARTUP
|
||||
from modules.track.logging import log
|
||||
from modules.start.start import main as server_startup
|
||||
server_startup(session)
|
||||
# STARTUP
|
||||
|
||||
# MODULES
|
||||
from modules.auth import auth
|
||||
from modules.track import *
|
||||
send_status = logging.status.send_status
|
||||
|
||||
from modules.user import info as user_info
|
||||
from modules.handler.handler import *
|
||||
from modules.handler.tasks import user_service, server_service
|
||||
from modules.algorithms.univ import dict_key_verify
|
||||
from modules.data.datetime import timestamp
|
||||
from modules.data.config import read as config_read
|
||||
# MODULES
|
||||
|
||||
sio = socketio.Server()
|
||||
app = socketio.WSGIApp(sio)
|
||||
|
||||
|
||||
# CONNECT/DISCONNECT EVENTS
|
||||
@sio.event
|
||||
def connect(sid, environ, auth):
|
||||
if session.accepting_clients:
|
||||
sio.save_session(sid, {'id': None, 'level': None})
|
||||
log("INFO", f"client {sid} connected")
|
||||
session.clients.append(sid)
|
||||
else:
|
||||
# return status here, create interface etc
|
||||
sio.disconnect(sid)
|
||||
|
||||
@sio.event
|
||||
def disconnect(sid):
|
||||
log("INFO", f"client {sid} disconnected")
|
||||
session.clients.remove(sid)
|
||||
if sid in session.logged_in:
|
||||
session.logged_in.remove(sid)
|
||||
# CONNECT/DISCONNECT EVENTS
|
||||
|
||||
|
||||
# AUTH EVENTS
|
||||
@sio.event
|
||||
def login(sid, data):
|
||||
info = {'logged_in': False}
|
||||
status, user_id, level = auth.login(sio, sid, data)
|
||||
|
||||
with sio.session(sid) as client_session:
|
||||
# saves some infomation to the sid of a connected client
|
||||
# this sid can be passed to other functions to identify the client even if they havent provided specfic info to that event
|
||||
# as long as they have logged in
|
||||
client_session['id'] = user_id
|
||||
client_session['level'] = level
|
||||
|
||||
if auth.authorised(sio, sid, "member"):
|
||||
info['logged_in'] = True
|
||||
if sid not in session.logged_in:
|
||||
session.logged_in.append(sid)
|
||||
sio.start_background_task(user_service, sio, sid)
|
||||
|
||||
send_status(sio, sid, status)
|
||||
return True, info
|
||||
|
||||
# any event that returns infomation will return True as its first parameter
|
||||
# this is to let the client side function know that the infomation being returned is a "callback" from the server
|
||||
# without this the client side function would have no way of knowing if the function has been called by the server or the client itself
|
||||
|
||||
@sio.event
|
||||
def register(sid, data):
|
||||
status = auth.register(data)
|
||||
send_status(sio, sid, status)
|
||||
if status['level'] == "INFO":
|
||||
return True, {'is_registered': True}
|
||||
return True, {'is_registered': False}
|
||||
|
||||
@sio.event
|
||||
def admin_register(sid, data):
|
||||
status = auth.admin_register(data)
|
||||
send_status(sio, sid, status)
|
||||
if status['level'] == "INFO":
|
||||
return True, {'is_registered': True}
|
||||
return True, {'is_registered': False}
|
||||
|
||||
@sio.event
|
||||
def auth_get(sid, data=None):
|
||||
info, status = auth_handler(sio, sid, session, min_level='member', event_name='auth_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def auth_set(sid, data=None):
|
||||
info, status = auth_handler(sio, sid, session, min_level='member', event_name='auth_set').set(data)
|
||||
# AUTH EVENTS
|
||||
|
||||
|
||||
# PROFILE EVENTS
|
||||
@sio.event
|
||||
def profile_get(sid, data=None):
|
||||
info, status = profile_handler(sio, sid, session, min_level='member', event_name='profile_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def profile_get_permissions(sid, data=None):
|
||||
info, status = profile_handler(sio, sid, session, min_level='member', event_name='profile_get_permissions').get_permissions(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def profile_set(sid, data=None):
|
||||
info, status = profile_handler(sio, sid, session, min_level='member', event_name='profile_set').set(data)
|
||||
|
||||
@sio.event
|
||||
def profile_delete(sid, data=None):
|
||||
info, status = profile_handler(sio, sid, session, min_level='member', event_name='profile_delete').delete(data)
|
||||
# PROFILE EVENTS END
|
||||
|
||||
|
||||
# FRIEND EVENTS START
|
||||
@sio.event
|
||||
def friend_get(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def friend_get_requests(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_get_requests').get_requests(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def friend_get_recomendations(sid, data=None):
|
||||
friend_get_recomendations
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_get_recomendations').get_recomendations(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def friend_add_request(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_add_request').add_request(data)
|
||||
|
||||
@sio.event
|
||||
def friend_approve_request(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_approve_request').approve_request(data)
|
||||
|
||||
@sio.event
|
||||
def friend_remove_request(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_remove_request').remove_request(data)
|
||||
|
||||
@sio.event
|
||||
def friend_reject_request(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='friend_reject_request').reject_request(data)
|
||||
|
||||
@sio.event
|
||||
def friend_remove(sid, data=None):
|
||||
info, status = friend_handler(sio, sid, session, min_level='member', event_name='remove').remove(data)
|
||||
# FRIEND EVENTS END
|
||||
|
||||
|
||||
# OCCUAPTION EVENTS
|
||||
@sio.event
|
||||
def occupation_get(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def occupation_get_all(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_get_all').get_all(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def occupation_set(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_set').set(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_set_request(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_set_request').set_request(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_get_request(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_get_request').get_request(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def occupation_get_all_requests(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_get_all_request').get_all_request(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def occupation_delete_request(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='member', event_name='occupation_delete_request').delete_request(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_approve_request(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='management', event_name='occupation_approve_request').approve_request(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_reject_request(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='management', event_name='occupation_reject_request').reject_request(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_create(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='management', event_name='occupation_create').create(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_edit(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='management', event_name='occupation_edit').edit(data)
|
||||
|
||||
@sio.event
|
||||
def occupation_delete_occupation(sid, data=None):
|
||||
info, status = occupation_handler(sio, sid, session, min_level='management', event_name='occupation_delete_occupation').delete_occupation(data)
|
||||
# OCCUAPTION EVENTS
|
||||
|
||||
|
||||
# TEAM EVENTS
|
||||
@sio.event
|
||||
def team_get(sid, data=None):
|
||||
info, status = team_handler(sio, sid, session, min_level='member', event_name='team_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def team_get_all(sid, data=None):
|
||||
info, status = team_handler(sio, sid, session, min_level='member', event_name='team_get_all').get_all(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def team_get_leaders(sid, data=None):
|
||||
info, status = team_handler(sio, sid, session, min_level='member', event_name='team_get_leaders').get_leaders(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def team_get_members(sid, data=None):
|
||||
info, status = team_handler(sio, sid, session, min_level='member', event_name='team_get_members').get_members(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def team_set(sid, data=None):
|
||||
info, status = team_handler(sio, sid, session, min_level='member', event_name='team_set').set(data)
|
||||
|
||||
@sio.event
|
||||
def team_delete_leaders(sid, data=None):
|
||||
info, status = team_handler(sio, sid, session, min_level='member', event_name='team_delete_leaders').delete_leaders(data)
|
||||
# TEAM EVENTS
|
||||
|
||||
|
||||
# POST EVENTS
|
||||
@sio.event
|
||||
def post_get_feed(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get_feed').get_feed(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_get(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_get_memories(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get').get_memories(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_get_user(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get_user').get_user(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_get_friends(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get_friends').get_friends(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_get_team(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get_team').get_team(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_get_permissions(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_get_permissions').get_permissions(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_set(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_set').set(data)
|
||||
|
||||
@sio.event
|
||||
def post_delete(sid, data=None):
|
||||
info, status = post_handler(sio, sid, session, min_level='member', event_name='post_delete').delete(data)
|
||||
# POST EVENTS
|
||||
|
||||
|
||||
# COMMENT EVENTS
|
||||
@sio.event
|
||||
def comment_get(sid, data=None):
|
||||
info, status = comment_handler(sio, sid, session, min_level='member', event_name='comment_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_get_post(sid, data=None):
|
||||
info, status = comment_handler(sio, sid, session, min_level='member', event_name='comment_get_post').get_post(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_get_permissions(sid, data=None):
|
||||
info, status = comment_handler(sio, sid, session, min_level='member', event_name='comment_get_permissions').get_permissions(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_set(sid, data=None):
|
||||
info, status = comment_handler(sio, sid, session, min_level='member', event_name='comment_set').set(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_delete(sid, data=None):
|
||||
info, status = comment_handler(sio, sid, session, min_level='member', event_name='comment_delete').delete(data)
|
||||
return True, info
|
||||
# COMMENT EVENTS
|
||||
|
||||
|
||||
# IMPRESSION EVENTS
|
||||
@sio.event
|
||||
def post_impression_get(sid, data=None):
|
||||
info, status = post_impression_handler(sio, sid, session, min_level='member', event_name='post_impression_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_impression_get_post(sid, data=None):
|
||||
info, status = post_impression_handler(sio, sid, session, min_level='member', event_name='post_impression_get_post').get_post(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_impression_count(sid, data=None):
|
||||
info, status = post_impression_handler(sio, sid, session, min_level='member', event_name='post_impression_count').count(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_impression_set(sid, data=None):
|
||||
info, status = post_impression_handler(sio, sid, session, min_level='member', event_name='post_impression_set').set(data)
|
||||
|
||||
@sio.event
|
||||
def post_impression_delete(sid, data=None):
|
||||
info, status = post_impression_handler(sio, sid, session, min_level='member', event_name='post_impression_delete').delete(data)
|
||||
|
||||
@sio.event
|
||||
def comment_impression_get(sid, data=None):
|
||||
info, status = comment_impression_handler(sio, sid, session, min_level='member', event_name='comment_impression_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_impression_get_comment(sid, data=None):
|
||||
info, status = comment_impression_handler(sio, sid, session, min_level='member', event_name='comment_impression_get_comment').get_comment(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_impression_count(sid, data=None):
|
||||
info, status = comment_impression_handler(sio, sid, session, min_level='member', event_name='comment_impression_count').count(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def comment_impression_set(sid, data=None):
|
||||
info, status = comment_impression_handler(sio, sid, session, min_level='member', event_name='comment_impression_set').set(data)
|
||||
|
||||
@sio.event
|
||||
def comment_impression_delete(sid, data=None):
|
||||
info, status = comment_impression_handler(sio, sid, session, min_level='member', event_name='comment_impression_delete').delete(data)
|
||||
# IMPRESSION EVENTS END
|
||||
|
||||
|
||||
# NOTIFICATION EVENTS START
|
||||
@sio.event
|
||||
def notification_get(sid, data=None):
|
||||
info, status = notification_handler(sio, sid, session, min_level='member', event_name='notification_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def notification_create(sid, data=None):
|
||||
status = notification_handler(sio, sid, session, min_level='member', event_name='notification_create').create(data)
|
||||
|
||||
@sio.event
|
||||
def notification_delete(sid, data=None):
|
||||
status = notification_handler(sio, sid, session, min_level='member', event_name='notification_delete').delete(data)
|
||||
|
||||
@sio.event
|
||||
def notification_remove(sid, data=None):
|
||||
status = notification_handler(sio, sid, session, min_level='member', event_name='notification_remove').remove(data)
|
||||
# NOTIFICATION EVENTS END
|
||||
|
||||
|
||||
# OTHER EVENTS
|
||||
@sio.event
|
||||
def get_ntfy_topic(sid, data=None):
|
||||
info = {'topic': None}
|
||||
if sio.get_session(sid)['level']:
|
||||
user_id = sio.get_session(sid)['id']
|
||||
username = user_info.auth(user_id=user_id).get()['username']
|
||||
|
||||
nfty_topic = f"{username}-{user_id[:8]}"
|
||||
info['topic'] = nfty_topic
|
||||
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def server_code_get(sid, data=None):
|
||||
code = config_read('miscellaneous', 'servercode')
|
||||
info = {'server_code': code}
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def is_post_slot(sid, data=None):
|
||||
info = None
|
||||
if timestamp().is_valid_time():
|
||||
info = {'is_post_slot': True}
|
||||
else:
|
||||
info = {'is_post_slot': False}
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def get_date(sid, data=None):
|
||||
info = {'date':timestamp().date}
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def post_slot_get(sid, data=None):
|
||||
info, status = post_slot_handler(sio, sid, session, min_level='member', event_name='post_slot_get').get(data)
|
||||
return True, info
|
||||
|
||||
@sio.event
|
||||
def shutdown(sid, data=None):
|
||||
info, status = server(sio, sid, session, min_level='admin', event_name='shutdown').shutdown(data)
|
||||
# OTHER EVENTS
|
||||
|
||||
# ENCRYPTION EVENTS START
|
||||
@sio.event
|
||||
def decrypt(sid, data=None):
|
||||
success = encryption_handler(session).decrypt(data)
|
||||
return True, {'success': success}
|
||||
|
||||
@sio.event
|
||||
def get_mode(sid, data=None):
|
||||
sss_enabled = config_read('database', 'ShamirSecretSharing')
|
||||
min_shares = config_read('database', 'MinimumShares')
|
||||
info = {'mode': session.mode, 'password': True, 'sss': sss_enabled, 'min_shares': min_shares}
|
||||
return True, info
|
||||
# ENCRYPTION EVENTS END
|
||||
|
||||
def test():
|
||||
from modules.algorithms.recomend import main as recommend
|
||||
recommend()
|
||||
|
||||
def main():
|
||||
# add mode check + while loop to background tasks
|
||||
sio.start_background_task(server_service, session)
|
||||
open_port = int(config_read('networking', 'Port'))
|
||||
eventlet.wsgi.server(eventlet.listen(('', open_port)), app)
|
||||
server(sio, None, session, min_level='admin', event_name='shutdown').internal_shutdown({'time': 0.1})
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
server/modules/algorithms/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['uuid', 'univ', 'hash']
|
||||
BIN
server/modules/algorithms/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/algorithms/__pycache__/hash.cpython-311.pyc
Normal file
BIN
server/modules/algorithms/__pycache__/recomend.cpython-311.pyc
Normal file
BIN
server/modules/algorithms/__pycache__/univ.cpython-311.pyc
Normal file
BIN
server/modules/algorithms/__pycache__/uuid.cpython-311.pyc
Normal file
22
server/modules/algorithms/hash.cpp
Normal file
@@ -0,0 +1,22 @@
|
||||
# include<string.h>
|
||||
# include<string>
|
||||
# include<cmath>
|
||||
typedef long long int Lint;
|
||||
|
||||
extern "C" Lint hash(char* str) {
|
||||
Lint m = std::pow(10,7) + 7;
|
||||
int p = 97;
|
||||
Lint total = 0;
|
||||
|
||||
for (int i=0; i<strlen(str); i++) {
|
||||
total += (int(str[i]) - 32) * pow(p,i);
|
||||
}
|
||||
|
||||
Lint result = total % m;
|
||||
return result;
|
||||
}
|
||||
|
||||
extern "C" Lint printc(char* str) {
|
||||
int num = strlen(str);
|
||||
return num;
|
||||
}
|
||||
BIN
server/modules/algorithms/hash.o
Normal file
BIN
server/modules/algorithms/libcpphash.so
Normal file
155
server/modules/algorithms/recomend.py
Normal file
@@ -0,0 +1,155 @@
|
||||
from modules.user import info as user_info
|
||||
from modules.algorithms.univ import dict_key_verify
|
||||
from modules.algorithms.uuid import hash_string
|
||||
|
||||
class User():
|
||||
def __init__(self, username, origin=False):
|
||||
self.username = username
|
||||
self.friends = user_info.friend(username=username)
|
||||
self.origin = origin
|
||||
|
||||
self.exclude = []
|
||||
self.count = 1
|
||||
self.depth = 0
|
||||
self.score = 0
|
||||
|
||||
self.friend_list = []
|
||||
|
||||
def find_friends(self, exclude=[], **kwargs):
|
||||
self.exclude += exclude
|
||||
friends = self.friends.get()
|
||||
if dict_key_verify(friends, "friends"):
|
||||
self.__organise_friends(friends['friends'])
|
||||
|
||||
self.__find_excluded()
|
||||
|
||||
def __organise_friends(self, friends, **kwargs):
|
||||
# used to create the user objects of friends
|
||||
for friend in friends:
|
||||
if friend['username'] not in self.exclude:
|
||||
self.friend_list.append(User(friend['username']))
|
||||
|
||||
def __find_excluded(self):
|
||||
# gathers the users to be excluded from the next nodes neigbours and sets this list = to self.exclude
|
||||
# this exclude list includes the previously passed exclude list
|
||||
if self.username not in self.exclude:
|
||||
self.exclude.append(self.username)
|
||||
if self.origin:
|
||||
self.exclude = self.exclude + [friend.username for friend in self.friend_list]
|
||||
requests = self.friends.get_requests()
|
||||
if dict_key_verify(requests, "requests"):
|
||||
self.exclude = self.exclude + [request for request in requests["requests"]]
|
||||
|
||||
def __hash__(self):
|
||||
obj_hash = hash_string(self.username)
|
||||
return obj_hash
|
||||
|
||||
class Graph():
|
||||
def __init__(self, username):
|
||||
self.origin_user = User(username, True)
|
||||
self.graph = [[]] * (10**7+7)
|
||||
|
||||
self.friend_directory = [None] * (10**7+7)
|
||||
self.friend_directory[hash(self.origin_user)] = self.origin_user
|
||||
self.exclude = []
|
||||
|
||||
def generate(self, depth=1):
|
||||
self.origin_user.depth = depth-1
|
||||
self.__add_user_friends(self.origin_user, self.origin_user, depth)
|
||||
|
||||
def __add_user_friends(self, origin, source, depth):
|
||||
origin.find_friends(self.exclude + [source.username])
|
||||
if hash(self.origin_user) == hash(origin):
|
||||
self.exclude += origin.exclude
|
||||
|
||||
for friend in origin.friend_list:
|
||||
friend_hash = hash(friend)
|
||||
self.__add_edge(hash(origin), friend_hash)
|
||||
|
||||
# if this user already exists in the graph add to their count in the user's object
|
||||
# this count keeps track of how many other users friend lists a certain user is
|
||||
if self.friend_directory[friend_hash]:
|
||||
self.friend_directory[friend_hash].count += 1
|
||||
else:
|
||||
self.friend_directory[friend_hash] = friend
|
||||
|
||||
if depth-1 > 0:
|
||||
# recursively calls the function until the depth is 0.
|
||||
self.__add_user_friends(friend, origin, depth-1)
|
||||
|
||||
def __add_edge(self, node, edge):
|
||||
# using the + operator on the lists since .append() has some undefined behaviour on large arrays.
|
||||
self.graph[node] = self.graph[node] + [edge]
|
||||
|
||||
def bft(self):
|
||||
self.visted = []
|
||||
# adds the hash of the selected orgin user to the edge queue
|
||||
self.edge_queue = [hash(self.origin_user)]
|
||||
|
||||
self.__visit(self.edge_queue[0])
|
||||
|
||||
def __visit(self, origin):
|
||||
# the origin is a number and so can be used as an index for the graph array
|
||||
start_pos = self.graph[origin]
|
||||
self.__on_visit(origin)
|
||||
|
||||
# adds the current node to the vistsed lists and removes it from the queue
|
||||
self.edge_queue.pop(len(self.edge_queue)-1)
|
||||
self.visted.append(origin)
|
||||
|
||||
for neigbour in start_pos:
|
||||
neigbour_obj = self.friend_directory[neigbour]
|
||||
origin_obj = self.friend_directory[origin]
|
||||
|
||||
# checks if the node has been visted yet, if not adds it to the edge queue and assigns it a depth from the origin
|
||||
if neigbour not in self.visted and neigbour not in self.edge_queue:
|
||||
neigbour_obj.depth = origin_obj.depth - 1
|
||||
self.edge_queue = [neigbour] + self.edge_queue
|
||||
|
||||
if len(self.edge_queue) > 0:
|
||||
# recursively calls this method until the edge_queue is empty
|
||||
self.__visit(self.edge_queue[len(self.edge_queue)-1])
|
||||
|
||||
def __on_visit(self, origin):
|
||||
origin_obj = self.friend_directory[origin]
|
||||
# each node is only visited once in the graph so the count is calculated when constructing the graph
|
||||
origin_obj.score = origin_obj.depth * origin_obj.count
|
||||
|
||||
def recomend_friends(self):
|
||||
self.recomendations = []
|
||||
|
||||
# removing the user requesting the recomendations and their friends from the visited list
|
||||
# this is done so that the user or people who are already friends of the user dont get recomended
|
||||
possible = []
|
||||
for user in self.visted:
|
||||
user_obj = self.friend_directory[user]
|
||||
if user_obj.username not in self.exclude:
|
||||
possible = possible + [user]
|
||||
|
||||
while len(self.recomendations) != len(possible):
|
||||
largest = User(username="")
|
||||
largest.score = -1
|
||||
for friend in possible:
|
||||
friend_obj = self.friend_directory[friend]
|
||||
if friend_obj not in self.recomendations and friend_obj.score > largest.score:
|
||||
largest = friend_obj
|
||||
|
||||
self.recomendations.append(largest)
|
||||
|
||||
def recomend_friend(username, amount=1, depth=1):
|
||||
if not (depth >= 1 and depth <= 4):
|
||||
depth = 4
|
||||
|
||||
friend_graph = Graph(username)
|
||||
friend_graph.generate(depth)
|
||||
friend_graph.bft()
|
||||
friend_graph.recomend_friends()
|
||||
|
||||
recomended = [{'username': recomended.username} for recomended in friend_graph.recomendations[:amount]]
|
||||
return recomended
|
||||
|
||||
def main():
|
||||
result = recomend_friend("Jack", 3, 4)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
390
server/modules/algorithms/sss.cpp
Normal file
@@ -0,0 +1,390 @@
|
||||
#include <cstdlib>
|
||||
# include<iostream>
|
||||
# include<string>
|
||||
# include<random>
|
||||
# include<cmath>
|
||||
# include<array>
|
||||
#include <fstream>
|
||||
using namespace std;
|
||||
|
||||
typedef long int Lint; // 64 bits
|
||||
typedef double Ldouble;
|
||||
struct security {
|
||||
int num_shares;
|
||||
int num_required;
|
||||
};
|
||||
|
||||
struct shareStruct {
|
||||
int x;
|
||||
Lint y;
|
||||
};
|
||||
|
||||
bool isPrime(Lint n) {
|
||||
int flag = 0;
|
||||
for (int i = 2; i <= n / i; ++i) {
|
||||
if (n % i == 0) {
|
||||
flag = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (flag == 0) return true;
|
||||
else return false;
|
||||
}
|
||||
|
||||
Lint genRandInt(int n) {
|
||||
// Returns a random number
|
||||
// between 2**(n-1)+1 and 2**n-1
|
||||
//long max = (long)powl(2, n) - 1;
|
||||
//long min = (long)powl(2, n - 1) + 1;
|
||||
long max = (long)pow(2, n) - 1;
|
||||
long min = (long)pow(2, n - 1) + 1;
|
||||
Lint result = min + (rand() % ( max - min + 1 ) );
|
||||
return result;
|
||||
}
|
||||
|
||||
Lint genPrime() {
|
||||
Lint prime = 10;
|
||||
|
||||
while (isPrime(prime) == false) {
|
||||
int complexity = 50;
|
||||
prime = genRandInt(complexity);
|
||||
}
|
||||
return prime;
|
||||
}
|
||||
|
||||
int* encodeSecret(int* poly, const int secret, const int num_required) {
|
||||
poly[num_required-1] = secret;
|
||||
return poly;
|
||||
}
|
||||
|
||||
Lint getPolyY(const int* poly, int poly_len, int poly_x, const Lint prime) {
|
||||
Lint total = 0;
|
||||
Lint poly_y = 0;
|
||||
|
||||
for (int i=0; i<poly_len+1; i++) {
|
||||
int power = poly_len - i;
|
||||
int coefficient = poly[i];
|
||||
poly_y = coefficient * pow(poly_x, power);
|
||||
total = total + poly_y;
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
|
||||
shareStruct* genShares(int num_shares, int num_required, const int* poly, const Lint prime){
|
||||
shareStruct* shares = new shareStruct[num_shares];
|
||||
for (int i=1; i<=num_shares; i++) {
|
||||
shareStruct share;
|
||||
share.x = i;
|
||||
share.y = getPolyY(poly, num_required-1, share.x, prime);
|
||||
shares[i-1] = share;
|
||||
}
|
||||
return shares;
|
||||
}
|
||||
|
||||
int* genPoly(int degree, const Lint prime, const Lint secret) {
|
||||
int* poly = new int[degree];
|
||||
|
||||
for (int i = 0; i < degree; i++) {
|
||||
int random_num = genRandInt(10);
|
||||
poly[i] = prime % random_num;
|
||||
}
|
||||
return poly;
|
||||
}
|
||||
|
||||
// solving polynomials
|
||||
struct inputStruct {
|
||||
int required;
|
||||
shareStruct* shares;
|
||||
};
|
||||
|
||||
struct polyTerm {
|
||||
Lint coefficient;
|
||||
int power;
|
||||
};
|
||||
|
||||
struct linearEquation {
|
||||
shareStruct point;
|
||||
polyTerm* terms;
|
||||
};
|
||||
|
||||
linearEquation* constructEquations(const int required, shareStruct shares[]) {
|
||||
linearEquation* equations = new linearEquation[required];
|
||||
shareStruct share;
|
||||
polyTerm term;
|
||||
|
||||
for (int i = 0; i < required; i++) {
|
||||
share = shares[i];
|
||||
linearEquation equation;
|
||||
polyTerm* terms = new polyTerm[required];
|
||||
|
||||
for (int j = 0; j < required; j++) {
|
||||
term.power = required - 1 - j;
|
||||
terms[j] = term;
|
||||
}
|
||||
|
||||
equation.terms = terms;
|
||||
equation.point.x = share.x;
|
||||
equation.point.y = share.y;
|
||||
|
||||
equations[i] = equation;
|
||||
// dont delete terms from memory as its referanced in equations
|
||||
}
|
||||
return equations;
|
||||
}
|
||||
|
||||
struct matrix{
|
||||
Lint** matrix;
|
||||
int dimension_x;
|
||||
int dimension_y;
|
||||
};
|
||||
struct matrix_system {
|
||||
matrix A;
|
||||
matrix B;
|
||||
matrix X;
|
||||
};
|
||||
|
||||
matrix_system formMatrix(const linearEquation* equations, int required) {
|
||||
Lint** matrixA = new Lint*[required];
|
||||
Lint** matrixB = new Lint*[required];
|
||||
|
||||
for (int i=0; i < required; i++) {
|
||||
linearEquation equation = equations[i];
|
||||
Lint* lineA = new Lint[required];
|
||||
for (int j=0; j < required; j++) {
|
||||
lineA[j] = pow(equation.point.x, equation.terms[j].power);
|
||||
}
|
||||
matrixA[i] = lineA;
|
||||
|
||||
Lint* lineB = new Lint[1];
|
||||
lineB[0] = equation.point.y;
|
||||
matrixB[i] = lineB;
|
||||
}
|
||||
|
||||
matrix matrixA_data; matrix matrixB_data;
|
||||
matrixA_data.matrix = matrixA; matrixB_data.matrix = matrixB;
|
||||
|
||||
matrixA_data.dimension_x = required; matrixB_data.dimension_x = 1;
|
||||
matrixA_data.dimension_y = required; matrixB_data.dimension_y = required;
|
||||
|
||||
matrix_system matricies;
|
||||
matricies.A = matrixA_data; matricies.B = matrixB_data;
|
||||
|
||||
return matricies;
|
||||
}
|
||||
|
||||
Lint** findMinor(Lint** matrixA, const int dimension, const int pos_x, const int pos_y) {
|
||||
Lint** matrixB = new Lint*[dimension-1];
|
||||
int matrixB_pos_x = 0; int matrixB_pos_y = 0;
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Lint* line = new Lint[dimension-1];
|
||||
for (int j=0; j<dimension; j++) {
|
||||
if (i != pos_y and j != pos_x) {
|
||||
line[matrixB_pos_x] = matrixA[i][j];
|
||||
matrixB_pos_x++;
|
||||
}
|
||||
}
|
||||
if (matrixB_pos_x != 0) {
|
||||
matrixB[matrixB_pos_y] = line;
|
||||
matrixB_pos_y++;
|
||||
}
|
||||
else {
|
||||
delete[] line;
|
||||
}
|
||||
matrixB_pos_x = 0;
|
||||
}
|
||||
|
||||
return matrixB;
|
||||
}
|
||||
|
||||
Lint findDet(Lint** matrixA, const int dimension) {
|
||||
Lint det = 0;
|
||||
if (dimension == 0) {
|
||||
det = 1;
|
||||
}
|
||||
else if (dimension == 1) {
|
||||
det = matrixA[0][0];
|
||||
}
|
||||
else if (dimension == 2) {
|
||||
det = matrixA[0][0] * matrixA[1][1] - matrixA[0][1] * matrixA[1][0];
|
||||
}
|
||||
else {
|
||||
for (int i=0; i<dimension; i++) {
|
||||
// reuse form matrix? pottentially split it up into formMatrixA and formMatrixB?
|
||||
Lint** matrixB = findMinor(matrixA, dimension, i, 0);
|
||||
Lint matrixB_det = findDet(matrixB, dimension-1);
|
||||
Lint term = matrixA[0][i] * matrixB_det;
|
||||
|
||||
if ((i+1)%2 == 0) {
|
||||
term = 0-term;
|
||||
}
|
||||
det = det + term;
|
||||
}
|
||||
}
|
||||
|
||||
return det;
|
||||
}
|
||||
|
||||
matrix formMatrixCofactors(Lint** matrixA, const int dimension) {
|
||||
Lint** matrixB = new Lint*[dimension];
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Lint* line = new Lint[dimension];
|
||||
|
||||
int sign = 1;
|
||||
if ((i+1)%2 == 0) {
|
||||
sign = -1;
|
||||
}
|
||||
for (int j=0; j<dimension; j++) {
|
||||
Lint** minor = findMinor(matrixA, dimension, j, i);
|
||||
Lint cofactor = findDet(minor, dimension-1) * sign;
|
||||
sign = -sign;
|
||||
line[j] = cofactor;
|
||||
}
|
||||
matrixB[i] = line;
|
||||
}
|
||||
|
||||
matrix matrix_data; matrix_data.matrix = matrixB;
|
||||
matrix_data.dimension_x = dimension; matrix_data.dimension_y = dimension;
|
||||
return matrix_data;
|
||||
}
|
||||
|
||||
matrix transposeMatrix(Lint** cofactors, const int dimension) {
|
||||
Lint** matrixB = new Lint*[dimension];
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Lint* line = new Lint[dimension];
|
||||
for (int j=0; j<dimension; j++) {
|
||||
line[j] = cofactors[j][i];
|
||||
}
|
||||
matrixB[i] = line;
|
||||
}
|
||||
|
||||
matrix matrixB_data; matrixB_data.matrix = matrixB;
|
||||
matrixB_data.dimension_x = dimension; matrixB_data.dimension_y = dimension;
|
||||
return matrixB_data;
|
||||
}
|
||||
|
||||
struct float_matrix{
|
||||
Ldouble** matrix;
|
||||
int dimension_x;
|
||||
int dimension_y;
|
||||
};
|
||||
struct float_matrix_system {
|
||||
matrix A;
|
||||
matrix B;
|
||||
matrix X;
|
||||
};
|
||||
|
||||
float_matrix multiplyConstant(matrix matrixA_data, const int dimension, const Lint det) {
|
||||
Ldouble** matrixB = new Ldouble*[dimension];
|
||||
Lint** matrixA = matrixA_data.matrix;
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Ldouble* line = new Ldouble[dimension];
|
||||
for (int j=0; j<dimension; j++) {
|
||||
line[j] = (1.0/det) * matrixA[i][j];
|
||||
}
|
||||
matrixB[i] = line;
|
||||
}
|
||||
float_matrix matrixB_data; matrixB_data.matrix = matrixB;
|
||||
matrixB_data.dimension_x = matrixA_data.dimension_x; matrixB_data.dimension_y = matrixA_data.dimension_y;
|
||||
|
||||
return matrixB_data;
|
||||
}
|
||||
|
||||
float_matrix multiplyMatricies(float_matrix inverseA_data, matrix matrixB_data) {
|
||||
int dimension_x = inverseA_data.dimension_x;
|
||||
int dimension_y = inverseA_data.dimension_y;
|
||||
|
||||
Ldouble** matrixC = new Ldouble*[matrixB_data.dimension_y];
|
||||
Ldouble** inverseA = inverseA_data.matrix;
|
||||
Lint** matrixB = matrixB_data.matrix;
|
||||
|
||||
for (int i=0; i<dimension_y; i++) {
|
||||
Ldouble* line = new Ldouble[0];
|
||||
Ldouble result = 0;
|
||||
for (int j=0; j<dimension_x; j++) {
|
||||
result = result + inverseA[i][j] * matrixB[j][0];
|
||||
}
|
||||
line[0] = result;
|
||||
matrixC[i] = line;
|
||||
}
|
||||
float_matrix matrixC_data; matrixC_data.matrix = matrixC;
|
||||
matrixC_data.dimension_x = matrixB_data.dimension_x; matrixC_data.dimension_y = matrixB_data.dimension_y;
|
||||
|
||||
return matrixC_data;
|
||||
}
|
||||
|
||||
Lint** StructToArray(shareStruct* struct_array, int len_array) {
|
||||
Lint** array = new Lint*[len_array];
|
||||
for (int i=0; i<len_array; i++) {
|
||||
array[i] = new Lint[2];
|
||||
array[i][0] = struct_array[i].x;
|
||||
array[i][1] = struct_array[i].y;
|
||||
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
shareStruct* ArrayToStruct(Lint** array, int len_array) {
|
||||
shareStruct* share_array = new shareStruct[len_array];
|
||||
for (int i=0; i<len_array; i++) {
|
||||
shareStruct share;
|
||||
share.x = array[i][0];
|
||||
share.y = array[i][1];
|
||||
share_array[i] = share;
|
||||
}
|
||||
return share_array;
|
||||
}
|
||||
|
||||
void writeShares(shareStruct* shares, const int num_shares, const int num_required, string root_path) {
|
||||
cout << root_path << endl;
|
||||
for (int i=0; i<num_shares; i++) {
|
||||
shareStruct share = shares[i];
|
||||
string share_path = root_path + "share-" + to_string(share.x) + ".txt";
|
||||
ofstream share_file(share_path);
|
||||
share_file << "Share number: " << share.x << endl;
|
||||
share_file << "Share secret: " << share.y << endl;
|
||||
share_file << "Minimum share required: " << to_string(num_required) << endl << endl;
|
||||
share_file << "IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server";
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" Lint solveInternal(shareStruct* shares, int required) {
|
||||
inputStruct inputs;
|
||||
inputs.shares = shares;
|
||||
inputs.required = required;
|
||||
|
||||
linearEquation* equations = new linearEquation[inputs.required];
|
||||
equations = constructEquations(inputs.required, inputs.shares);
|
||||
|
||||
matrix_system matricies = formMatrix(equations, inputs.required);
|
||||
delete[] equations;
|
||||
Lint det = findDet(matricies.A.matrix, matricies.A.dimension_x);
|
||||
|
||||
matrix cofactors = formMatrixCofactors(matricies.A.matrix, matricies.A.dimension_x);
|
||||
matrix transposition = transposeMatrix(cofactors.matrix, cofactors.dimension_x);
|
||||
|
||||
float_matrix inverseA = multiplyConstant(transposition, transposition.dimension_x, det);
|
||||
float_matrix matrixC = multiplyMatricies(inverseA, matricies.B);
|
||||
|
||||
Lint secret = matrixC.matrix[matrixC.dimension_y-1][0];
|
||||
return secret;
|
||||
}
|
||||
|
||||
extern "C" void newSecretInternal(const Lint secret, const int num_shares, const int num_required, char* root_path) {
|
||||
string str(root_path);
|
||||
const Lint prime = genPrime();
|
||||
int* poly = genPoly(num_required-1, prime, secret);
|
||||
|
||||
poly = encodeSecret(poly, secret, num_required);
|
||||
shareStruct* shares = genShares(num_shares, num_required, poly, prime);
|
||||
|
||||
writeShares(shares, num_shares, num_required, root_path);
|
||||
}
|
||||
|
||||
|
||||
int main() {
|
||||
}
|
||||
47
server/modules/algorithms/univ.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# checks a string for illegal characters
|
||||
# string = string to be checked
|
||||
# allow_chars = allowed characters should be passed as a string
|
||||
def char_check(string, allow_chars):
|
||||
|
||||
# default allow_chars value
|
||||
if allow_chars == None:
|
||||
allow_chars = ascii_letters + digits
|
||||
|
||||
#allowed_char = ascii_letters + digits + "_" + "-"
|
||||
if set(string).difference(allow_chars):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def dict_key_verify(dictionary, keys, mode="and", *args, **kwargs):
|
||||
# checks if the dictionary exists, if the key exists as a field and if that fields value is not none
|
||||
# can be used to check if multiple keys exist
|
||||
if mode != "and" and mode != "or":
|
||||
mode = "and"
|
||||
if type(keys) != list:
|
||||
keys = [keys]
|
||||
|
||||
verified = []
|
||||
if type(keys) != list:
|
||||
keys = [keys]
|
||||
|
||||
for key in keys:
|
||||
if type(dictionary) != dict or key not in dictionary or not dictionary[key]:
|
||||
verified.append(False)
|
||||
else:
|
||||
verified.append(True)
|
||||
|
||||
if mode == "and":
|
||||
if all(verified) == True:
|
||||
return True
|
||||
if mode == "or":
|
||||
if True in verified:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
data = {'name': "joe", 'job': "cuck", 'age': "69"}
|
||||
answer = dict_key_verify(data, ['job', 'names'], "and")
|
||||
print(answer)
|
||||
|
||||
102
server/modules/algorithms/uuid.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import random
|
||||
import ctypes
|
||||
import pathlib
|
||||
import hashlib
|
||||
|
||||
# RBP
|
||||
import time
|
||||
# RBP
|
||||
|
||||
def bin_to_hex(byte):
|
||||
byte_hex = ""
|
||||
total = 0
|
||||
for i, bit in enumerate(byte):
|
||||
total += int(bit) * 2 ** i
|
||||
first_place = total // 16
|
||||
second_place = total - first_place * 16
|
||||
|
||||
places = [first_place, second_place]
|
||||
for i, place in enumerate(places):
|
||||
if place < 10:
|
||||
byte_hex += str(place)
|
||||
else:
|
||||
byte_hex += chr(65 + place - 10)
|
||||
|
||||
return byte_hex.lower()
|
||||
|
||||
def den_to_bin(number):
|
||||
byte_string = ""
|
||||
result = 2
|
||||
power = 0
|
||||
|
||||
# finds the greatest power of 2 that can fit in the number
|
||||
# this defines the length of the binary number
|
||||
while result > 0:
|
||||
result = number // 2**power
|
||||
if result == 0:
|
||||
break
|
||||
power += 1
|
||||
|
||||
for i in range(power-1, -1, -1):
|
||||
bit = number // 2**i
|
||||
number -= bit * 2**i
|
||||
byte_string += str(bit)
|
||||
|
||||
return byte_string
|
||||
|
||||
def set_bits(binary, num_bits):
|
||||
for i in range(num_bits - len(binary)):
|
||||
binary += "0"
|
||||
return binary
|
||||
|
||||
#uuid START
|
||||
def generate():
|
||||
byte_list = []
|
||||
|
||||
# generates 16 8 bit numbers as strings
|
||||
for i in range(16):
|
||||
number = random.randint(0, 255)
|
||||
bits = den_to_bin(number)
|
||||
byte = set_bits(bits , 8)
|
||||
byte_list.append(byte)
|
||||
|
||||
# setting certain places as pre-defined, as stated by the UUID4 spec (see apendix)
|
||||
byte_list[6] = byte_list[6][:4] + "0010"
|
||||
byte_list[8] = byte_list[8][:6] + "01"
|
||||
|
||||
# UUIDs are always shown in terms of hex
|
||||
hex_string = ""
|
||||
for byte_index, byte in enumerate(byte_list):
|
||||
byte_hex = bin_to_hex(byte)
|
||||
# adds the dashes in the indexes as required by the UUID4 spec
|
||||
if byte_index in [4, 6, 8, 10]:
|
||||
hex_string += "-"
|
||||
hex_string += byte_hex
|
||||
|
||||
return hex_string
|
||||
#uuid END
|
||||
|
||||
#string hash START
|
||||
def hash_string(string):
|
||||
string = string.replace("-", "0")
|
||||
string = string.replace("_", "0")
|
||||
libname = pathlib.Path().absolute() / "modules/algorithms/libcpphash.so"
|
||||
c_lib = ctypes.CDLL(libname)
|
||||
|
||||
charptr = ctypes.POINTER(ctypes.c_char)
|
||||
c_lib.printc.argtypes = [charptr]
|
||||
c_lib.printc.restypes = int
|
||||
|
||||
result = c_lib.hash(ctypes.c_char_p(string.encode('utf-8')))
|
||||
return result
|
||||
|
||||
def long_hash(string):
|
||||
result = hashlib.sha256(string.encode('utf-8'))
|
||||
result = result.hexdigest()
|
||||
return result
|
||||
|
||||
# string hash END
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = hash_string("hello")
|
||||
print(result)
|
||||
1
server/modules/auth/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['auth']
|
||||
BIN
server/modules/auth/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/auth/__pycache__/auth.cpython-311.pyc
Normal file
294
server/modules/auth/auth.py
Normal file
@@ -0,0 +1,294 @@
|
||||
# BEFORE PRODUCTION PUSH
|
||||
### Need to uncomment the try and exept build into fuctions:
|
||||
#### login, register, admin_register
|
||||
|
||||
import sqlite3
|
||||
import time
|
||||
from string import ascii_letters, ascii_lowercase, digits
|
||||
|
||||
### MODULES
|
||||
from modules.track import *
|
||||
|
||||
from modules.user.generate import main as user_generate
|
||||
from modules.user import info as user_info
|
||||
|
||||
from modules.data.database import connect as db_connect
|
||||
from modules.data.config import read as config_read
|
||||
from modules.data.datetime import timestamp
|
||||
|
||||
from modules.algorithms.uuid import long_hash as hash_string
|
||||
from modules.algorithms.uuid import generate as uuid_generate
|
||||
from modules.algorithms.univ import char_check
|
||||
### MODULES
|
||||
|
||||
# need to change this to path
|
||||
database_name = config_read("database", "Path")
|
||||
|
||||
class reg_cred():
|
||||
def __init__(self, cred):
|
||||
self.level = config_read("user", "DefaultLevel")
|
||||
|
||||
self.key = cred['key']
|
||||
self.username = cred['username']
|
||||
self.password= cred['password']
|
||||
self.repassword= cred['repassword']
|
||||
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
logging.status("INFO", "registration initialised").status_update(self)
|
||||
|
||||
def exec(self):
|
||||
# CHECKS
|
||||
check_processes = [self.username_verify, self.username_bans, self.username_clash_check, self.password_verify]
|
||||
for check in check_processes:
|
||||
check()
|
||||
if self.status['level'] == "FAIL":
|
||||
return
|
||||
if not self.key_verify():
|
||||
return
|
||||
logging.status("INFO", "credential verification successful").status_update(self)
|
||||
# CHECKS
|
||||
|
||||
self.id = user_generate(self.username, self.password, self.level)
|
||||
#self.db.close()
|
||||
|
||||
logging.status("INFO", "registration successful").status_update(self)
|
||||
|
||||
def username_verify(self):
|
||||
# This will be configurable
|
||||
min_len = 3
|
||||
max_len = 25
|
||||
|
||||
if self.username == None:
|
||||
logging.status("FAIL", "username cannot be null").status_update(self)
|
||||
|
||||
elif len(self.username) < min_len or len(self.username) > max_len:
|
||||
logging.status("FAIL", f"username cant be shorter than {min_len} characters or longer than {max_len} characters").status_update(self)
|
||||
|
||||
elif char_check(self.username, ascii_letters + digits + "_" + "-") == True:
|
||||
logging.status("FAIL", f"username contains invalid characters").status_update(self)
|
||||
|
||||
def username_bans(self):
|
||||
servercode = config_read('miscellaneous', 'servercode')
|
||||
if servercode in self.username:
|
||||
logging.status("FAIL", "usernames contains servercode").status_update(self)
|
||||
|
||||
def username_clash_check(self):
|
||||
self.cur.execute("SELECT username FROM auth_credentials WHERE username = ?", (self.username,))
|
||||
|
||||
if self.cur.fetchall():
|
||||
logging.status("FAIL", "username is already in use").status_update(self)
|
||||
|
||||
def password_verify(self):
|
||||
# This will be configurable
|
||||
min_len = 4
|
||||
max_len = 100
|
||||
|
||||
if self.password == None:
|
||||
logging.status("FAIL", "password cannot be null").status_update(self)
|
||||
|
||||
elif len(self.password) < min_len or len(self.password) > max_len:
|
||||
logging.status("FAIL", f"password cant be shorter than {min_len} characters or longer than {max_len}").status_update(self)
|
||||
|
||||
elif self.password != self.repassword:
|
||||
logging.status("FAIL", f"passwords do not match").status_update(self)
|
||||
|
||||
def key_verify(self):
|
||||
if self.key == config_read('authorisation', 'RegistrationKey'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
logging.status("FAIL", "registration code is incorrect").status_update(self)
|
||||
|
||||
class reg_admin(reg_cred):
|
||||
def __init__(self, cred):
|
||||
super().__init__(cred)
|
||||
self.level = "admin"
|
||||
|
||||
logging.status("INFO", "admin registration initialised").status_update(self)
|
||||
|
||||
def key_verify(self):
|
||||
if self.key == config_read('authorisation', 'AdminKey'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def first_time(self):
|
||||
self.cur.execute("SELECT user_id FROM auth_credentials WHERE level = ?", (self.level,))
|
||||
value = self.cur.fetchone()
|
||||
|
||||
if value:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
class login_cred():
|
||||
def __init__(self, sio, sid, cred):
|
||||
self.username = cred['username']
|
||||
self.password = cred['password']
|
||||
|
||||
self.sio = sio
|
||||
self.sid = sid
|
||||
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
logging.status("INFO", "credential login initialised").status_update(self)
|
||||
|
||||
def exec(self):
|
||||
self.process_password()
|
||||
|
||||
self.cur.execute("SELECT user_id FROM auth_credentials WHERE username = ? AND password = ?", (self.username, self.password_hash))
|
||||
self.id = self.cur.fetchone()
|
||||
|
||||
if self.id:
|
||||
self.id = self.id[0]
|
||||
|
||||
logging.status("INFO", "valid login credentials").status_update(self)
|
||||
login_token.create_token(self)
|
||||
login_token.send_token(self)
|
||||
logging.status("INFO", "login successful").status_update(self)
|
||||
else:
|
||||
logging.status("FAIL", "invalid login credentials").status_update(self)
|
||||
|
||||
self.db.close()
|
||||
|
||||
def process_password(self):
|
||||
self.cur.execute("SELECT user_id FROM auth_credentials WHERE username = ?", (self.username,))
|
||||
user_id = self.cur.fetchone()
|
||||
if user_id:
|
||||
self.password_hash = hash_string(self.password + user_id[0])
|
||||
else:
|
||||
self.password_hash = None
|
||||
|
||||
class login_token():
|
||||
def __init__(self, cred):
|
||||
self.token = cred['token']
|
||||
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
logging.status("INFO", "token login initialised").status_update(self)
|
||||
|
||||
def exec(self):
|
||||
self.token_hash = hash_string(self.token)
|
||||
self.cur.execute("SELECT user_id, token_expire FROM auth_tokens WHERE token = ?", (self.token_hash,))
|
||||
fetch_data = self.cur.fetchall()
|
||||
self.id = None
|
||||
|
||||
if fetch_data:
|
||||
self.id, self.token_expire = fetch_data[0][0], fetch_data[0][1]
|
||||
|
||||
if self.token_expire > timestamp().now:
|
||||
logging.status("INFO", "valid token").status_update(self)
|
||||
else:
|
||||
logging.status("FAIL", "invalid token").status_update(self)
|
||||
|
||||
else:
|
||||
logging.status("FAIL", "invalid token").status_update(self)
|
||||
|
||||
self.db.close()
|
||||
|
||||
@staticmethod
|
||||
def create_token(self):
|
||||
expire_time = float(config_read("authorisation", "tokenexpirytime"))
|
||||
|
||||
self.token = uuid_generate()
|
||||
self.token_hash = hash_string(self.token)
|
||||
self.token_expire = timestamp().now + expire_time
|
||||
|
||||
### ALL NEEDS CHANGING
|
||||
self.cur.execute("INSERT INTO auth_tokens (user_id, token, token_expire) VALUES (?, ?, ?)", (self.id, self.token_hash, self.token_expire))
|
||||
self.db.commit()
|
||||
|
||||
logging.status("INFO", "authentication token created").status_update(self)
|
||||
|
||||
@staticmethod
|
||||
def send_token(self):
|
||||
self.sio.emit('recv_token', {'token':self.token, 'expire': self.token_expire}, room=self.sid)
|
||||
logging.status("INFO", "token sent").status_update(self)
|
||||
|
||||
|
||||
class error_process():
|
||||
def __init__(self):
|
||||
logging.status("WARNING", "something went wrong").status_update(self)
|
||||
self.id = None
|
||||
|
||||
def login(sio, sid, cred):
|
||||
if "token" in cred:
|
||||
|
||||
try:
|
||||
client = login_token(cred)
|
||||
client.exec()
|
||||
except:
|
||||
logging.status("FAIL", "token not authorised").status_update(client)
|
||||
|
||||
|
||||
elif all(param in cred for param in ['username', 'password']):
|
||||
|
||||
try:
|
||||
client = login_cred(sio, sid, cred)
|
||||
client.exec()
|
||||
except:
|
||||
logging.status("FAIL", "login failed").status_update(client)
|
||||
|
||||
else:
|
||||
client = error_process()
|
||||
logging.status("FAIL", "no credentials provided").status_update(client)
|
||||
|
||||
client.level = user_info.level(user_id=client.id).get()
|
||||
if client.level:
|
||||
client.level = client.level['level']
|
||||
return client.status, client.id, client.level
|
||||
|
||||
def register(cred):
|
||||
if all(param in cred for param in ['username', 'password', 'repassword', 'key']):
|
||||
|
||||
try:
|
||||
client = reg_cred(cred)
|
||||
client.exec()
|
||||
except:
|
||||
logging.status("FAIL", "registration failed").status_update(client)
|
||||
|
||||
else:
|
||||
client = generic_process()
|
||||
logging.status("FAIL", "no credentials provided").status_update(client)
|
||||
|
||||
return client.status
|
||||
|
||||
def admin_register(cred):
|
||||
if all(param in cred for param in ['username', 'password', 'repassword', 'key']):
|
||||
try:
|
||||
client = reg_admin(cred)
|
||||
if client.key_verify() == True and client.first_time() == True:
|
||||
client.exec()
|
||||
else:
|
||||
logging.status("FAIL", "admin key does not match/admin already exists").status_update(client)
|
||||
except:
|
||||
logging.status("FAIL", "registration failed").status_update(client)
|
||||
|
||||
else:
|
||||
client = error_process()
|
||||
logging.status("FAIL", "no credentials provided").status_update(client)
|
||||
|
||||
return client.status
|
||||
|
||||
def authorised(sio, sid, min_level='admin'):
|
||||
level_list = ['member', 'management', 'admin']
|
||||
|
||||
allow_levels = level_list[level_list.index(min_level):]
|
||||
level = sio.get_session(sid)['level']
|
||||
|
||||
if level in allow_levels:
|
||||
user_authorised = True
|
||||
else:
|
||||
user_authorised = False
|
||||
|
||||
return user_authorised
|
||||
|
||||
def main():
|
||||
error = error_process()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
server/modules/data/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['config','database', 'datetime']
|
||||
BIN
server/modules/data/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
server/modules/data/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/data/__pycache__/config.cpython-310.pyc
Normal file
BIN
server/modules/data/__pycache__/config.cpython-311.pyc
Normal file
BIN
server/modules/data/__pycache__/database.cpython-310.pyc
Normal file
BIN
server/modules/data/__pycache__/database.cpython-311.pyc
Normal file
BIN
server/modules/data/__pycache__/datetime.cpython-310.pyc
Normal file
BIN
server/modules/data/__pycache__/datetime.cpython-311.pyc
Normal file
85
server/modules/data/config.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import configparser
|
||||
from modules.track.logging import log
|
||||
|
||||
path = "data/config.ini"
|
||||
|
||||
def create():
|
||||
try:
|
||||
file = open(path, 'r')
|
||||
log("INFO", "Config already exists")
|
||||
return
|
||||
except FileNotFoundError as e:
|
||||
log("INFO", "Creating config file")
|
||||
pass
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
config.add_section('authorisation')
|
||||
# change this to a randomly generated string
|
||||
config.set('authorisation', 'AdminKey', 'secret')
|
||||
config.set('authorisation', 'RegistrationKey', 'secret')
|
||||
config.set('authorisation', 'UsernameMaxLength', '20')
|
||||
config.set('authorisation', 'UsernameMinLength', '5')
|
||||
config.set('authorisation', 'PasswordMaxLength', '30')
|
||||
config.set('authorisation', 'PasswordMinLength', '5')
|
||||
config.set('authorisation', 'TokenExpiryTime', '2592000')
|
||||
|
||||
config.add_section('database')
|
||||
config.set('database', 'Path', 'data/database.db')
|
||||
config.set('database', 'Encrypt', 'false')
|
||||
config.set('database', 'ShamirSecretSharing', 'false')
|
||||
config.set('database', 'NumberOfShares', '5')
|
||||
config.set('database', 'MinimumShares', '3')
|
||||
config.set('database', 'KeyPath', 'data/key.txt')
|
||||
config.set('database', 'EncryptedPath', 'data/.cryptdatabase.db')
|
||||
config.set('database', 'EncryptionConfigPath', 'data/encryptconfig.txt')
|
||||
config.set('database', 'SaltPath', 'data/.salt.txt')
|
||||
config.set('database', 'SharesPath', 'data/shares/')
|
||||
|
||||
config.add_section('user')
|
||||
config.set('user', 'DefaultLevel', 'member')
|
||||
config.set('user', 'DefaultOccupationID', 'Null')
|
||||
|
||||
config.add_section('posts')
|
||||
config.set('posts', 'PostTimeLimit', '5') # miniutes
|
||||
config.set('posts', 'DayStart', '9') #24 hour time
|
||||
config.set('posts', 'DayEnd', '17') #24 hour time
|
||||
|
||||
config.add_section('notifications')
|
||||
config.set('notifications', 'DefaultExpireTime', '604800')
|
||||
config.set('notifications', 'ntfyUrl', 'https://ntfy.example.com')
|
||||
|
||||
config.add_section('networking')
|
||||
config.set('networking', 'Port', '9999')
|
||||
|
||||
config.add_section('miscellaneous')
|
||||
config.set('miscellaneous', 'ServerCode', '12345')
|
||||
|
||||
with open(path, 'w') as configfile:
|
||||
config.write(configfile)
|
||||
log("INFO", "Created config file")
|
||||
|
||||
def read(section, key, *args, **kwargs):
|
||||
config = configparser.ConfigParser()
|
||||
config.read(path)
|
||||
|
||||
if section not in config:
|
||||
return None
|
||||
if key not in config[section]:
|
||||
return None
|
||||
|
||||
info = config[section][key]
|
||||
if info == "false":
|
||||
info = False
|
||||
if info == "true":
|
||||
info = True
|
||||
|
||||
return info
|
||||
|
||||
def main():
|
||||
create()
|
||||
info = read("users", "DefaultOccupation")
|
||||
print(info)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
693
server/modules/data/database.py
Normal file
@@ -0,0 +1,693 @@
|
||||
import sqlite3
|
||||
import os
|
||||
import ctypes
|
||||
import pathlib
|
||||
|
||||
import base64
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
|
||||
# db encrypt
|
||||
from cryptography.fernet import Fernet
|
||||
#from pysqlcipher3 import dbapi2 as sqlite3
|
||||
# db encrypt
|
||||
|
||||
from modules.track.logging import log
|
||||
from modules.data.config import read as config_read
|
||||
from modules.algorithms.uuid import generate as uuid_generate
|
||||
|
||||
class connect():
|
||||
def __init__(self):
|
||||
self.path = config_read("database", "Path")
|
||||
|
||||
def create(self, obj):
|
||||
self.con = sqlite3.connect(self.path)
|
||||
self.cur = self.con.cursor()
|
||||
|
||||
if obj != None:
|
||||
obj.con = self.con
|
||||
obj.cur = self.cur
|
||||
|
||||
def commit(self):
|
||||
self.con.commit()
|
||||
|
||||
def close(self):
|
||||
self.con.commit()
|
||||
self.con.close()
|
||||
|
||||
def execute(self, command, values=None):
|
||||
cur = self.con.cursor()
|
||||
cur.execute(command, values)
|
||||
self.close()
|
||||
|
||||
# Table creation
|
||||
class create():
|
||||
def __init__(self):
|
||||
self.path = config_read("database", "Path")
|
||||
self.en_path = config_read("database", "EncryptedPath")
|
||||
|
||||
def tables(self):
|
||||
decrypted_database = os.path.exists(self.path)
|
||||
encrypted_database = os.path.exists(self.en_path)
|
||||
if decrypted_database or encrypted_database:
|
||||
return
|
||||
|
||||
con = sqlite3.connect(self.path)
|
||||
self.cur = con.cursor()
|
||||
|
||||
tables = [self.auth_credentials, self.auth_tokens, self.profile , self.friends, self.occupations, self.occupation_requests, self.teams, self.team_leaders, self.posts, self.comments, self.post_impressions, self.comment_impressions, self.time_slots, self.notifications, self.notifications_sent]
|
||||
for table in tables:
|
||||
table()
|
||||
|
||||
def auth_credentials(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS auth_credentials (
|
||||
user_id TEXT NOT NULL PRIMARY KEY,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
level TEXT NOT NULL,
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def auth_tokens(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS auth_tokens(
|
||||
user_id TEXT NOT NULL,
|
||||
token TEXT NOT NULL PRIMARY KEY,
|
||||
token_expire REAL NOT NULL,
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES auth_credentials (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def profile(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS profile (
|
||||
user_id TEXT NOT NULL PRIMARY KEY,
|
||||
occupation_id TEXT,
|
||||
name TEXT,
|
||||
picture TEXT,
|
||||
biography TEXT,
|
||||
role TEXT,
|
||||
num_friends INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (occupation_id)
|
||||
REFERENCES occupations (occupation_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE SET NULL
|
||||
)
|
||||
""")
|
||||
|
||||
def friends(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS friends (
|
||||
user_id TEXT NOT NULL,
|
||||
friend_id TEXT NOT NULL,
|
||||
approved BOOLEAN,
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (friend_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
PRIMARY KEY (user_id, friend_id)
|
||||
)
|
||||
""")
|
||||
|
||||
def occupations(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS occupations (
|
||||
occupation_id TEXT NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
def occupation_requests(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS occupation_requests (
|
||||
user_id TEXT NOT NULL PRIMARY KEY,
|
||||
occupation_id TEXT NOT NULL,
|
||||
approved BOOLEAN DEFAULT False NOT NULL,
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (occupation_id)
|
||||
REFERENCES occupations (occupation_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def teams(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS teams (
|
||||
team_id TEXT NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
occupation_id TEXT,
|
||||
user_id TEXT,
|
||||
FOREIGN KEY (occupation_id)
|
||||
REFERENCES occupations (occupation_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def team_leaders(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS team_leaders (
|
||||
user_id TEXT NOT NULL,
|
||||
team_id TEXT NOT NULL,
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (team_id)
|
||||
REFERENCES teams (team_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
PRIMARY KEY (user_id, team_id)
|
||||
)
|
||||
""")
|
||||
|
||||
def posts(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
post_id TEXT NOT NULL PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
caption TEXT,
|
||||
date TEXT NOT NULL,
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def comments(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS comments (
|
||||
comment_id TEXT NOT NULL PRIMARY KEY,
|
||||
post_id TEXT NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
FOREIGN KEY (post_id)
|
||||
REFERENCES posts (post_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def post_impressions(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS post_impressions (
|
||||
impression_id TEXT NOT NULL PRIMARY KEY,
|
||||
post_id NOT NULL,
|
||||
user_id NOT NULL,
|
||||
type NOT NULL,
|
||||
FOREIGN KEY (post_id)
|
||||
REFERENCES posts (post_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def comment_impressions(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS comment_impressions (
|
||||
impression_id TEXT NOT NULL PRIMARY KEY,
|
||||
comment_id NOT NULL,
|
||||
user_id NOT NULL,
|
||||
type NOT NULL,
|
||||
FOREIGN KEY (comment_id)
|
||||
REFERENCES comments (comment_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
def time_slots(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS time_slots (
|
||||
date TEXT NOT NULL PRIMARY KEY,
|
||||
start FLOAT NOT NULL,
|
||||
end FLOAT NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
def notifications(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS notifications (
|
||||
notification_id TEXT NOT NULL PRIMARY KEY,
|
||||
target_id TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
time_created FLOAT NOT NULL,
|
||||
expire_after FLOAT NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
def notifications_sent(self):
|
||||
self.cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS notifications_sent (
|
||||
notification_id TEXT NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
time_sent FLOAT,
|
||||
sent BOOLEAN DEFAULT False NOT NULL,
|
||||
PRIMARY KEY (notification_id, user_id)
|
||||
FOREIGN KEY (notification_id)
|
||||
REFERENCES notifications (notification_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES profile (user_id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
class encryption():
|
||||
def __init__(self, session):
|
||||
self.key = key()
|
||||
# needs to pass num_shares and min_shares
|
||||
self.session = session
|
||||
|
||||
self.sss_enabled = config_read("database", "ShamirSecretSharing")
|
||||
self.en_config_path = config_read("database", "EncryptionConfigPath")
|
||||
self.db_path = config_read("database", "Path")
|
||||
self.en_db_path = config_read("database", "EncryptedPath")
|
||||
|
||||
def mode(self):
|
||||
# uses a large amount of logic statements to figure out what mode the server should enter on launch
|
||||
# additionally what flags it should launch with
|
||||
encryption_enabled = config_read("database", "Encrypt")
|
||||
db_encrypted = self.key.is_db_encrypted()
|
||||
|
||||
mode = None
|
||||
flags = []
|
||||
if encryption_enabled:
|
||||
if db_encrypted:
|
||||
mode = "decrypt"
|
||||
else:
|
||||
success = self.encrypt()
|
||||
if success:
|
||||
mode = "decrypt"
|
||||
else:
|
||||
exit()
|
||||
else:
|
||||
if db_encrypted:
|
||||
mode = "decrypt"
|
||||
flags = ["forever"]
|
||||
else:
|
||||
mode = "normal"
|
||||
self.session.db_encrypted = False
|
||||
|
||||
self.session.mode = mode
|
||||
self.session.flags = flags
|
||||
|
||||
def encrypt(self, flags=[]):
|
||||
if self.session.password:
|
||||
password = self.session.password
|
||||
else:
|
||||
password = self._generate()
|
||||
|
||||
if not password:
|
||||
log("FAIL", "Could not encrypt database, something went wrong, see logs for details")
|
||||
return False
|
||||
|
||||
scheme = self.key.read_db_scheme(password)
|
||||
with open(self.db_path, "rb") as db:
|
||||
db_data = db.read()
|
||||
|
||||
# create new encrypted database
|
||||
log("INFO", "Encrypting database")
|
||||
en_db_data = scheme.encrypt(db_data)
|
||||
with open(self.en_db_path, "wb") as en_db:
|
||||
en_db.write(en_db_data)
|
||||
|
||||
# delete unecrypted database
|
||||
os.remove("data/database.db")
|
||||
log("INFO", "Deleted unencrypted database")
|
||||
return True
|
||||
|
||||
def decrypt(self, data, flags=[]):
|
||||
min_shares = config_read('database', 'MinimumShares')
|
||||
if "sss" in flags:
|
||||
password = int(shares(min_shares).get_key(data['shares']))
|
||||
else:
|
||||
password = int(data['password'])
|
||||
|
||||
scheme = self.key.read_db_scheme(password)
|
||||
if not scheme:
|
||||
return False
|
||||
|
||||
# decrypting the databsae raw bytes
|
||||
with open(self.en_db_path, "rb") as en_db:
|
||||
en_db_data = en_db.read()
|
||||
|
||||
db_data = scheme.decrypt(en_db_data)
|
||||
with open(self.db_path, "wb") as db:
|
||||
db.write(db_data)
|
||||
|
||||
if not self._database_read():
|
||||
log("FAIL", "Decryption of database failed, see logs for details")
|
||||
return False
|
||||
log("INFO", "Decryption of database successful")
|
||||
|
||||
self.session.password = password
|
||||
for flag in flags:
|
||||
if flag == "forever":
|
||||
log("WARN", "Permanent decryption of the database")
|
||||
self.session.encrypt_on_shutdown = False
|
||||
self.key.delete()
|
||||
elif flag == "sss":
|
||||
with open(self.en_config_path, "w") as en_config:
|
||||
en_config.write(str(password))
|
||||
log("WARN", f"You decrypted the database using Shamir secret shares, your master password has been reconstructed and can be found on the server at the location: {self.en_config_path}. Please remember to delete this file after reading")
|
||||
|
||||
self.session.db_encrypted = False
|
||||
self.session.mode = "normal"
|
||||
return True
|
||||
|
||||
def _generate(self):
|
||||
options = self._read_config()
|
||||
if not self._config_check(options):
|
||||
log("FAIL", "Could not generate encryption scheme, something wrong in config file or with maseter password")
|
||||
return None
|
||||
else:
|
||||
options['password'] = int(options['password'])
|
||||
if self.sss_enabled:
|
||||
options['num_shares'] = int(options['num_shares'])
|
||||
options['min_shares'] = int(options['min_shares'])
|
||||
|
||||
self.key.generate_key_file(options['password'])
|
||||
|
||||
if self.sss_enabled:
|
||||
log("INFO", "Shamir Secret Sharing enabled, generating shares")
|
||||
sss = shares(options['min_shares'], options['num_shares'])
|
||||
sss_success = sss.generate_shares(options['password'])
|
||||
if not sss_success:
|
||||
log("FAIL", "Something went wrong generating shamir secret shares, see log for details")
|
||||
return None
|
||||
|
||||
log("INFO", "Deleting encryption configuration file containing master password")
|
||||
os.remove(self.en_config_path)
|
||||
return options['password']
|
||||
|
||||
def _read_config(self):
|
||||
num_shares = config_read("database", "NumberOfShares")
|
||||
min_shares = config_read("database", "MinimumShares")
|
||||
options = {}
|
||||
try:
|
||||
with open(self.en_config_path, "r") as config:
|
||||
log("INFO","Reading encryption configuration file")
|
||||
options['password'] = config.read()
|
||||
|
||||
if self.sss_enabled:
|
||||
options['num_shares'] = num_shares
|
||||
options['min_shares'] = min_shares
|
||||
except:
|
||||
return None
|
||||
|
||||
return options
|
||||
|
||||
def _config_check(self, options):
|
||||
# checking if the file exists
|
||||
try:
|
||||
en_config = open(self.en_config_path, "r")
|
||||
en_config.close()
|
||||
except:
|
||||
log("FAIL", f"Encryption config could not be found at {self.en_config_path}")
|
||||
return False
|
||||
|
||||
# check config contents
|
||||
try:
|
||||
log("INFO", "Testing master password type (must be int)")
|
||||
master_pass = int(options['password'])
|
||||
if len(options) == 1:
|
||||
return True
|
||||
elif self.sss_enabled and len(options) == 3:
|
||||
log("INFO", "Testing number of shares type (must be integer)")
|
||||
num_shares = int(options['num_shares'])
|
||||
log("INFO", "Testing minimum shares type (must be integer)")
|
||||
min_shares = int(options['min_shares'])
|
||||
|
||||
if num_shares < 20 and min_shares < 7:
|
||||
return True
|
||||
else:
|
||||
log("WARN", "SSS number of shares is to large or minimum shares is to large")
|
||||
return False
|
||||
else:
|
||||
log("WARN", "Something went wrong reading config file, check the docs for a guide")
|
||||
return False
|
||||
except:
|
||||
log("WARN", "The master password, number of shares and minimum shares all must be integers")
|
||||
return False
|
||||
|
||||
def _database_read(self):
|
||||
try:
|
||||
db = connect()
|
||||
db.create(self)
|
||||
db.cur.execute("SELECT * FROM time_slots")
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
class key():
|
||||
def __init__(self):
|
||||
self.key_path = config_read("database", "KeyPath")
|
||||
self.db_path = config_read("database", "Path")
|
||||
self.en_db_path = config_read("database", "EncryptedPath")
|
||||
self.salt_path = config_read("database", "SaltPath")
|
||||
|
||||
def _save_salt(self, salt):
|
||||
with open(self.salt_path, "wb") as salt_file:
|
||||
salt_file.write(salt)
|
||||
|
||||
def _read_salt(self):
|
||||
try:
|
||||
with open(self.salt_path, "rb") as salt_file:
|
||||
salt = salt_file.read()
|
||||
return salt
|
||||
except:
|
||||
return None
|
||||
|
||||
def _pass_to_scheme(self, password):
|
||||
password = str(password).encode()
|
||||
salt = self._read_salt()
|
||||
if not salt:
|
||||
salt = os.urandom(16)
|
||||
self._save_salt(salt)
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=480000,
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(password))
|
||||
scheme = Fernet(key)
|
||||
|
||||
return scheme
|
||||
|
||||
def read_db_scheme(self, password):
|
||||
file_scheme = self._pass_to_scheme(password)
|
||||
|
||||
with open(self.key_path, "r") as key_file:
|
||||
en_password = key_file.read()
|
||||
|
||||
db_scheme = None
|
||||
try:
|
||||
password = file_scheme.decrypt(en_password)
|
||||
db_scheme = self._pass_to_scheme(password)
|
||||
except:
|
||||
log("WARN", "Provided password is wrong or something is wrong with the database key")
|
||||
return db_scheme
|
||||
|
||||
def generate_key_file(self, password):
|
||||
#db_password = bytes(uuid_generate().replace("-", "").encode())
|
||||
db_password = uuid_generate().replace("-", "").encode()
|
||||
file_scheme = self._pass_to_scheme(password)
|
||||
en_db_password = str(file_scheme.encrypt(db_password).decode())
|
||||
with open(self.key_path, "w") as key_file:
|
||||
key_file.write(en_db_password)
|
||||
|
||||
def delete(self):
|
||||
os.remove(self.salt_path)
|
||||
os.remove(self.key_path)
|
||||
os.remove(self.en_db_path)
|
||||
|
||||
def is_db_encrypted(self):
|
||||
try:
|
||||
db = open(self.en_db_path, "rb")
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
class ShareStruct(ctypes.Structure):
|
||||
__fields__ = [("y", ctypes.c_longlong), ("x", ctypes.c_int)]
|
||||
|
||||
# this class is mainly geared towards acting as an interface for hte c++ code
|
||||
class shares():
|
||||
def __init__(self, min_shares, num_shares=None):
|
||||
if num_shares:
|
||||
self.num_shares = int(num_shares)
|
||||
self.min_shares = int(min_shares)
|
||||
self.shares_path = config_read("database", "SharesPath")
|
||||
|
||||
def _dict_to_c_array(self, share_list):
|
||||
c_share_array = ((ctypes.c_longlong*2)*self.min_shares)
|
||||
share_array = []
|
||||
|
||||
for i in range(len(share_list)):
|
||||
c_share = (ctypes.c_longlong*2)(*[share_list[i]['num'], share_list[i]['secret']])
|
||||
share_array.append(c_share)
|
||||
|
||||
c_share_array = ((ctypes.c_longlong*2)*len(share_list))(*share_array)
|
||||
return c_share_array
|
||||
|
||||
def generate_shares(self, password):
|
||||
libname = pathlib.Path().absolute() / "modules/data/libcppsss.so"
|
||||
c_lib = ctypes.CDLL(libname)
|
||||
|
||||
c_lib.newSecretInternal.argtypes = [ctypes.c_longlong, ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_char)]
|
||||
c_lib.newSecretInternal.restypes = None
|
||||
|
||||
path_ptr = ctypes.c_char_p(self.shares_path.encode('utf-8'))
|
||||
c_lib.newSecretInternal(password, self.num_shares, self.min_shares, path_ptr)
|
||||
|
||||
success = self.verify(password)
|
||||
return success
|
||||
|
||||
def get_key(self, share_list):
|
||||
libname = pathlib.Path().absolute() / "modules/data/libcppsss.so"
|
||||
c_lib = ctypes.CDLL(libname)
|
||||
|
||||
c_share_array = ((ctypes.c_longlong*2)*self.min_shares)
|
||||
c_share_array_pointer = ctypes.POINTER(c_share_array)
|
||||
|
||||
c_lib.solveInternal.argtypes = [c_share_array_pointer, ctypes.c_int]
|
||||
c_lib.solveInternal.restypes = int
|
||||
|
||||
new_share_array = ctypes.pointer(self._dict_to_c_array(share_list))
|
||||
result = c_lib.solveInternal(new_share_array, self.min_shares)
|
||||
return result
|
||||
|
||||
def verify(self, password):
|
||||
# used to verify that the shamir secret shares generated can be used to reconstruct the original key
|
||||
log("INFO", "Verifying share integrity")
|
||||
# we essentially take a sample of the shares
|
||||
# if all these samples work we assume any combination of said samples will
|
||||
# this works well since we test the combination of all hte smallest numbers and all teh largest
|
||||
# the only reason a set of shares wouldnt work is because they have become to large and c++ starts to lose accuracy
|
||||
# if this doesnt happen then its safe to assume all shares work
|
||||
shifts = self.num_shares - self.min_shares
|
||||
|
||||
for i in range(shifts):
|
||||
top = i + self.min_shares
|
||||
|
||||
shares_used = ""
|
||||
for num_share in range(i, top):
|
||||
shares_used += str(num_share) + ", "
|
||||
shares_used = shares_used[:-2]
|
||||
|
||||
log("INFO", f"Attempting to generate original password with shares: {shares_used}")
|
||||
share_list = []
|
||||
|
||||
for j in range(i, top):
|
||||
# reads the shares from their files
|
||||
path = self.shares_path + f"share-{j+1}.txt"
|
||||
with open(path, "r") as share:
|
||||
|
||||
try:
|
||||
x = int((share.readline().split(": "))[1])
|
||||
y = int((share.readline().split(": "))[1])
|
||||
share_list.append({'num': x, 'secret': y})
|
||||
except:
|
||||
log("WARN", "Something went wrong reading one of the shares, have they been altered?")
|
||||
break
|
||||
|
||||
result = self.get_key(share_list)
|
||||
|
||||
if result != password:
|
||||
log("WARN", "A set of shares could not be used to generate the original password, try again or use a diffrent password")
|
||||
return False
|
||||
else:
|
||||
log("INFO", f"{i+1}/{shifts} sets of shares successfully used to generate the original password")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
db = create()
|
||||
db.path = "database.db"
|
||||
db.tables()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
class retrieve():
|
||||
def __init__(self):
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
def level(self, identifier):
|
||||
|
||||
self.cur.execute("SELECT level FROM auth_credentials WHERE username = ? OR user_id = ?", (identifier, identifier))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
return rez[0]
|
||||
|
||||
def user_id(self, username):
|
||||
|
||||
self.cur.execute("SELECT user_id FROM auth_credentials WHERE username = ?", (username,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
rez = rez[0]
|
||||
|
||||
return rez
|
||||
|
||||
|
||||
def username(self, user_id):
|
||||
|
||||
self.cur.execute("SELECT username FROM auth_credentials WHERE user_id = ?", (user_id,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
rez = rez[0]
|
||||
|
||||
return rez
|
||||
|
||||
def occupation_id(self, user_id):
|
||||
|
||||
self.cur.execute("SELECT occupation_id FROM profile WHERE user_id = ?", (user_id,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
rez = rez[0]
|
||||
|
||||
return rez
|
||||
125
server/modules/data/datetime.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from datetime import date, timedelta, datetime
|
||||
import random
|
||||
import eventlet
|
||||
# MODULES
|
||||
from modules.data.config import read as config_read
|
||||
from modules.data.database import connect as db_connect
|
||||
from modules.handler import outgoing
|
||||
from modules.track.logging import log
|
||||
|
||||
# MODULES
|
||||
|
||||
class timestamp():
|
||||
def __init__(self):
|
||||
self.time_limit = float(config_read("posts", "PostTimeLimit")) * 60
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
@property
|
||||
def start(self):
|
||||
value = self.get_date_timestamp()
|
||||
self._start = value
|
||||
return self._start
|
||||
@start.setter
|
||||
def start(self, value):
|
||||
value = self.get_date_timestamp()
|
||||
self._start = value
|
||||
|
||||
@property
|
||||
def end(self):
|
||||
value = self.get_date_timestamp(day_mod=1) - 1
|
||||
self._end = value
|
||||
return self._end
|
||||
@end.setter
|
||||
def end(self, value):
|
||||
value = self.get_date_timestamp(day_mod=1) - 1
|
||||
self._end = value
|
||||
|
||||
@property
|
||||
def now(self):
|
||||
value = self.get_timestamp()
|
||||
self._now = value
|
||||
return value
|
||||
@now.setter
|
||||
def now(self, value):
|
||||
value = self.get_timestamp()
|
||||
self._now = value
|
||||
|
||||
@property
|
||||
def post_slot_start(self):
|
||||
value = self.get_slot()['start']
|
||||
self._post_slot_start = value
|
||||
return self._post_slot_start
|
||||
@post_slot_start.setter
|
||||
def post_slot_start(self, value):
|
||||
self._post_slot_start = self._post_slot_start
|
||||
|
||||
@property
|
||||
def post_slot_end(self):
|
||||
value = self.get_slot()['end']
|
||||
self._post_slot_end = value
|
||||
return self._post_slot_end
|
||||
@post_slot_end.setter
|
||||
def post_slot_end(self, value):
|
||||
self._post_slot_end = self._post_slot_end
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
date = str(datetime.now().date())
|
||||
self._date = date
|
||||
return self._date
|
||||
@date.setter
|
||||
def date(self, value):
|
||||
self._date = value
|
||||
|
||||
def get_date_timestamp(self, year_mod=0, month_mod=0, day_mod=0, *args, **kwargs):
|
||||
modifier = [year_mod, month_mod, day_mod]
|
||||
|
||||
now_mod = (datetime.now()+timedelta(days=day_mod))
|
||||
date = (str(now_mod.date()).replace("-0", "-")).split("-")
|
||||
date = [int(string) for string in date]
|
||||
|
||||
timestamp = datetime(date[0], date[1], date[2]).timestamp()
|
||||
|
||||
return timestamp
|
||||
|
||||
def get_timestamp(self):
|
||||
now = (float(datetime.now().timestamp()))
|
||||
return now
|
||||
|
||||
def generate_slot(self, data=None):
|
||||
for i in range(2):
|
||||
if i == 0:
|
||||
date = str(datetime.now().date())
|
||||
start = self.get_date_timestamp()
|
||||
else:
|
||||
now_mod = (datetime.now()+timedelta(days=1))
|
||||
date = (str(now_mod.date()))
|
||||
start = self.get_date_timestamp(0, 0, 1)
|
||||
|
||||
self.cur.execute("SELECT date FROM time_slots WHERE date=?", (date,))
|
||||
if not self.cur.fetchone():
|
||||
log("INFO", f"Generating time slot for {date}")
|
||||
|
||||
day_start = start + int(config_read("posts", "DayStart")) * 60 * 60
|
||||
day_end = start + int(config_read("posts", "DayStart")) * 60 * 60
|
||||
slot_start = random.randint(day_start, day_end)
|
||||
slot_end = slot_start + self.time_limit
|
||||
self.cur.execute("INSERT INTO time_slots (date, start, end) VALUES (?, ?, ?)", (date, slot_start, slot_end))
|
||||
self.db.commit()
|
||||
|
||||
def get_slot(self):
|
||||
info = None
|
||||
|
||||
date = str(datetime.now().date())
|
||||
self.cur.execute("SELECT start, end FROM time_slots WHERE date=?", (date,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
info = {'start':rez[0], 'end':rez[1]}
|
||||
return info
|
||||
|
||||
def is_valid_time(self):
|
||||
if (self.now < self.post_slot_end) and (self.now >= self.post_slot_start):
|
||||
return True
|
||||
return False
|
||||
|
||||
BIN
server/modules/data/libcppsss.so
Normal file
390
server/modules/data/sss.cpp
Normal file
@@ -0,0 +1,390 @@
|
||||
#include <cstdlib>
|
||||
# include<iostream>
|
||||
# include<string>
|
||||
# include<random>
|
||||
# include<cmath>
|
||||
# include<array>
|
||||
#include <fstream>
|
||||
using namespace std;
|
||||
|
||||
typedef long long int Lint; // 64 bits
|
||||
typedef double Ldouble;
|
||||
struct security {
|
||||
int num_shares;
|
||||
int num_required;
|
||||
};
|
||||
|
||||
struct shareStruct {
|
||||
int x;
|
||||
Lint y;
|
||||
};
|
||||
|
||||
bool isPrime(Lint n) {
|
||||
int flag = 0;
|
||||
for (int i = 2; i <= n / i; ++i) {
|
||||
if (n % i == 0) {
|
||||
flag = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (flag == 0) return true;
|
||||
else return false;
|
||||
}
|
||||
|
||||
Lint genRandInt(int n) {
|
||||
// Returns a random number
|
||||
// between 2**(n-1)+1 and 2**n-1
|
||||
//long max = (long)powl(2, n) - 1;
|
||||
//long min = (long)powl(2, n - 1) + 1;
|
||||
long max = (long)pow(2, n) - 1;
|
||||
long min = (long)pow(2, n - 1) + 1;
|
||||
Lint result = min + (rand() % ( max - min + 1 ) );
|
||||
return result;
|
||||
}
|
||||
|
||||
Lint genPrime() {
|
||||
Lint prime = 10;
|
||||
|
||||
while (isPrime(prime) == false) {
|
||||
int complexity = 50;
|
||||
prime = genRandInt(complexity);
|
||||
}
|
||||
return prime;
|
||||
}
|
||||
|
||||
int* encodeSecret(int* poly, const int secret, const int num_required) {
|
||||
poly[num_required-1] = secret;
|
||||
return poly;
|
||||
}
|
||||
|
||||
Lint getPolyY(const int* poly, int poly_len, int poly_x, const Lint prime) {
|
||||
Lint total = 0;
|
||||
Lint poly_y = 0;
|
||||
|
||||
for (int i=0; i<poly_len+1; i++) {
|
||||
int power = poly_len - i;
|
||||
int coefficient = poly[i];
|
||||
poly_y = coefficient * pow(poly_x, power);
|
||||
total = total + poly_y;
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
|
||||
shareStruct* genShares(int num_shares, int num_required, const int* poly, const Lint prime){
|
||||
shareStruct* shares = new shareStruct[num_shares];
|
||||
for (int i=1; i<=num_shares; i++) {
|
||||
shareStruct share;
|
||||
share.x = i;
|
||||
share.y = getPolyY(poly, num_required-1, share.x, prime);
|
||||
shares[i-1] = share;
|
||||
}
|
||||
return shares;
|
||||
}
|
||||
|
||||
int* genPoly(int degree, const Lint prime, const Lint secret) {
|
||||
int* poly = new int[degree];
|
||||
|
||||
for (int i = 0; i < degree; i++) {
|
||||
int random_num = genRandInt(10);
|
||||
poly[i] = prime % random_num;
|
||||
}
|
||||
return poly;
|
||||
}
|
||||
|
||||
// solving polynomials
|
||||
struct inputStruct {
|
||||
int required;
|
||||
shareStruct* shares;
|
||||
};
|
||||
|
||||
struct polyTerm {
|
||||
Lint coefficient;
|
||||
int power;
|
||||
};
|
||||
|
||||
struct linearEquation {
|
||||
shareStruct point;
|
||||
polyTerm* terms;
|
||||
};
|
||||
|
||||
linearEquation* constructEquations(const int required, shareStruct shares[]) {
|
||||
linearEquation* equations = new linearEquation[required];
|
||||
shareStruct share;
|
||||
polyTerm term;
|
||||
|
||||
for (int i = 0; i < required; i++) {
|
||||
share = shares[i];
|
||||
linearEquation equation;
|
||||
polyTerm* terms = new polyTerm[required];
|
||||
|
||||
for (int j = 0; j < required; j++) {
|
||||
term.power = required - 1 - j;
|
||||
terms[j] = term;
|
||||
}
|
||||
|
||||
equation.terms = terms;
|
||||
equation.point.x = share.x;
|
||||
equation.point.y = share.y;
|
||||
|
||||
equations[i] = equation;
|
||||
// dont delete terms from memory as its referanced in equations
|
||||
}
|
||||
return equations;
|
||||
}
|
||||
|
||||
struct matrix{
|
||||
Lint** matrix;
|
||||
int dimension_x;
|
||||
int dimension_y;
|
||||
};
|
||||
struct matrix_system {
|
||||
matrix A;
|
||||
matrix B;
|
||||
matrix X;
|
||||
};
|
||||
|
||||
matrix_system formMatrix(const linearEquation* equations, int required) {
|
||||
Lint** matrixA = new Lint*[required];
|
||||
Lint** matrixB = new Lint*[required];
|
||||
|
||||
for (int i=0; i < required; i++) {
|
||||
linearEquation equation = equations[i];
|
||||
Lint* lineA = new Lint[required];
|
||||
for (int j=0; j < required; j++) {
|
||||
lineA[j] = pow(equation.point.x, equation.terms[j].power);
|
||||
}
|
||||
matrixA[i] = lineA;
|
||||
|
||||
Lint* lineB = new Lint[1];
|
||||
lineB[0] = equation.point.y;
|
||||
matrixB[i] = lineB;
|
||||
}
|
||||
|
||||
matrix matrixA_data; matrix matrixB_data;
|
||||
matrixA_data.matrix = matrixA; matrixB_data.matrix = matrixB;
|
||||
|
||||
matrixA_data.dimension_x = required; matrixB_data.dimension_x = 1;
|
||||
matrixA_data.dimension_y = required; matrixB_data.dimension_y = required;
|
||||
|
||||
matrix_system matricies;
|
||||
matricies.A = matrixA_data; matricies.B = matrixB_data;
|
||||
|
||||
return matricies;
|
||||
}
|
||||
|
||||
Lint** findMinor(Lint** matrixA, const int dimension, const int pos_x, const int pos_y) {
|
||||
Lint** matrixB = new Lint*[dimension-1];
|
||||
int matrixB_pos_x = 0; int matrixB_pos_y = 0;
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Lint* line = new Lint[dimension-1];
|
||||
for (int j=0; j<dimension; j++) {
|
||||
if (i != pos_y and j != pos_x) {
|
||||
line[matrixB_pos_x] = matrixA[i][j];
|
||||
matrixB_pos_x++;
|
||||
}
|
||||
}
|
||||
if (matrixB_pos_x != 0) {
|
||||
matrixB[matrixB_pos_y] = line;
|
||||
matrixB_pos_y++;
|
||||
}
|
||||
else {
|
||||
delete[] line;
|
||||
}
|
||||
matrixB_pos_x = 0;
|
||||
}
|
||||
|
||||
return matrixB;
|
||||
}
|
||||
|
||||
Lint findDet(Lint** matrixA, const int dimension) {
|
||||
Lint det = 0;
|
||||
if (dimension == 0) {
|
||||
det = 1;
|
||||
}
|
||||
else if (dimension == 1) {
|
||||
det = matrixA[0][0];
|
||||
}
|
||||
else if (dimension == 2) {
|
||||
det = matrixA[0][0] * matrixA[1][1] - matrixA[0][1] * matrixA[1][0];
|
||||
}
|
||||
else {
|
||||
for (int i=0; i<dimension; i++) {
|
||||
// reuse form matrix? pottentially split it up into formMatrixA and formMatrixB?
|
||||
Lint** matrixB = findMinor(matrixA, dimension, i, 0);
|
||||
Lint matrixB_det = findDet(matrixB, dimension-1);
|
||||
Lint term = matrixA[0][i] * matrixB_det;
|
||||
|
||||
if ((i+1)%2 == 0) {
|
||||
term = 0-term;
|
||||
}
|
||||
det = det + term;
|
||||
}
|
||||
}
|
||||
|
||||
return det;
|
||||
}
|
||||
|
||||
matrix formMatrixCofactors(Lint** matrixA, const int dimension) {
|
||||
Lint** matrixB = new Lint*[dimension];
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Lint* line = new Lint[dimension];
|
||||
|
||||
int sign = 1;
|
||||
if ((i+1)%2 == 0) {
|
||||
sign = -1;
|
||||
}
|
||||
for (int j=0; j<dimension; j++) {
|
||||
Lint** minor = findMinor(matrixA, dimension, j, i);
|
||||
Lint cofactor = findDet(minor, dimension-1) * sign;
|
||||
sign = -sign;
|
||||
line[j] = cofactor;
|
||||
}
|
||||
matrixB[i] = line;
|
||||
}
|
||||
|
||||
matrix matrix_data; matrix_data.matrix = matrixB;
|
||||
matrix_data.dimension_x = dimension; matrix_data.dimension_y = dimension;
|
||||
return matrix_data;
|
||||
}
|
||||
|
||||
matrix transposeMatrix(Lint** cofactors, const int dimension) {
|
||||
Lint** matrixB = new Lint*[dimension];
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Lint* line = new Lint[dimension];
|
||||
for (int j=0; j<dimension; j++) {
|
||||
line[j] = cofactors[j][i];
|
||||
}
|
||||
matrixB[i] = line;
|
||||
}
|
||||
|
||||
matrix matrixB_data; matrixB_data.matrix = matrixB;
|
||||
matrixB_data.dimension_x = dimension; matrixB_data.dimension_y = dimension;
|
||||
return matrixB_data;
|
||||
}
|
||||
|
||||
struct float_matrix{
|
||||
Ldouble** matrix;
|
||||
int dimension_x;
|
||||
int dimension_y;
|
||||
};
|
||||
struct float_matrix_system {
|
||||
matrix A;
|
||||
matrix B;
|
||||
matrix X;
|
||||
};
|
||||
|
||||
float_matrix multiplyConstant(matrix matrixA_data, const int dimension, const Lint det) {
|
||||
Ldouble** matrixB = new Ldouble*[dimension];
|
||||
Lint** matrixA = matrixA_data.matrix;
|
||||
|
||||
for (int i=0; i<dimension; i++) {
|
||||
Ldouble* line = new Ldouble[dimension];
|
||||
for (int j=0; j<dimension; j++) {
|
||||
line[j] = (1.0/det) * matrixA[i][j];
|
||||
}
|
||||
matrixB[i] = line;
|
||||
}
|
||||
float_matrix matrixB_data; matrixB_data.matrix = matrixB;
|
||||
matrixB_data.dimension_x = matrixA_data.dimension_x; matrixB_data.dimension_y = matrixA_data.dimension_y;
|
||||
|
||||
return matrixB_data;
|
||||
}
|
||||
|
||||
float_matrix multiplyMatricies(float_matrix inverseA_data, matrix matrixB_data) {
|
||||
int dimension_x = inverseA_data.dimension_x;
|
||||
int dimension_y = inverseA_data.dimension_y;
|
||||
|
||||
Ldouble** matrixC = new Ldouble*[matrixB_data.dimension_y];
|
||||
Ldouble** inverseA = inverseA_data.matrix;
|
||||
Lint** matrixB = matrixB_data.matrix;
|
||||
|
||||
for (int i=0; i<dimension_y; i++) {
|
||||
Ldouble* line = new Ldouble[0];
|
||||
Ldouble result = 0;
|
||||
for (int j=0; j<dimension_x; j++) {
|
||||
result = result + inverseA[i][j] * matrixB[j][0];
|
||||
}
|
||||
line[0] = result;
|
||||
matrixC[i] = line;
|
||||
}
|
||||
float_matrix matrixC_data; matrixC_data.matrix = matrixC;
|
||||
matrixC_data.dimension_x = matrixB_data.dimension_x; matrixC_data.dimension_y = matrixB_data.dimension_y;
|
||||
|
||||
return matrixC_data;
|
||||
}
|
||||
|
||||
Lint** StructToArray(shareStruct* struct_array, int len_array) {
|
||||
Lint** array = new Lint*[len_array];
|
||||
for (int i=0; i<len_array; i++) {
|
||||
array[i] = new Lint[2];
|
||||
array[i][0] = struct_array[i].x;
|
||||
array[i][1] = struct_array[i].y;
|
||||
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
shareStruct* ArrayToStruct(Lint** array, int len_array) {
|
||||
shareStruct* share_array = new shareStruct[len_array];
|
||||
for (int i=0; i<len_array; i++) {
|
||||
shareStruct share;
|
||||
share.x = array[i][0];
|
||||
share.y = array[i][1];
|
||||
share_array[i] = share;
|
||||
}
|
||||
return share_array;
|
||||
}
|
||||
|
||||
void writeShares(shareStruct* shares, const int num_shares, const int num_required, string root_path) {
|
||||
cout << root_path << endl;
|
||||
for (int i=0; i<num_shares; i++) {
|
||||
shareStruct share = shares[i];
|
||||
string share_path = root_path + "share-" + to_string(share.x) + ".txt";
|
||||
ofstream share_file(share_path);
|
||||
share_file << "Share number: " << share.x << endl;
|
||||
share_file << "Share secret: " << share.y << endl;
|
||||
share_file << "Minimum share required: " << to_string(num_required) << endl << endl;
|
||||
share_file << "IMPORTANT: Please remind your admin that its there job to distribute and delete shares from the server";
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" Lint solveInternal(shareStruct* shares, int required) {
|
||||
inputStruct inputs;
|
||||
inputs.shares = shares;
|
||||
inputs.required = required;
|
||||
|
||||
linearEquation* equations = new linearEquation[inputs.required];
|
||||
equations = constructEquations(inputs.required, inputs.shares);
|
||||
|
||||
matrix_system matricies = formMatrix(equations, inputs.required);
|
||||
delete[] equations;
|
||||
Lint det = findDet(matricies.A.matrix, matricies.A.dimension_x);
|
||||
|
||||
matrix cofactors = formMatrixCofactors(matricies.A.matrix, matricies.A.dimension_x);
|
||||
matrix transposition = transposeMatrix(cofactors.matrix, cofactors.dimension_x);
|
||||
|
||||
float_matrix inverseA = multiplyConstant(transposition, transposition.dimension_x, det);
|
||||
float_matrix matrixC = multiplyMatricies(inverseA, matricies.B);
|
||||
|
||||
Lint secret = matrixC.matrix[matrixC.dimension_y-1][0];
|
||||
return secret;
|
||||
}
|
||||
|
||||
extern "C" void newSecretInternal(const Lint secret, const int num_shares, const int num_required, char* root_path) {
|
||||
string str(root_path);
|
||||
const Lint prime = genPrime();
|
||||
int* poly = genPoly(num_required-1, prime, secret);
|
||||
|
||||
poly = encodeSecret(poly, secret, num_required);
|
||||
shareStruct* shares = genShares(num_shares, num_required, poly, prime);
|
||||
|
||||
writeShares(shares, num_shares, num_required, root_path);
|
||||
}
|
||||
|
||||
|
||||
int main() {
|
||||
}
|
||||
1
server/modules/handler/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['handler','outgoing', 'tasks']
|
||||
BIN
server/modules/handler/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/handler/__pycache__/handler.cpython-311.pyc
Normal file
BIN
server/modules/handler/__pycache__/outgoing.cpython-311.pyc
Normal file
BIN
server/modules/handler/__pycache__/tasks.cpython-311.pyc
Normal file
BIN
server/modules/handler/__pycache__/threading.cpython-311.pyc
Normal file
1007
server/modules/handler/handler.py
Normal file
36
server/modules/handler/outgoing.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from modules.data.config import read as config_read
|
||||
|
||||
import requests
|
||||
|
||||
def post_slot(sio, sid=None):
|
||||
if sid:
|
||||
sio.emit("post_slot", room=sid)
|
||||
else:
|
||||
sio.emit("post_slot")
|
||||
|
||||
def send_ntfy(sio, info, sid, username):
|
||||
url = config_read("notifications", "ntfyUrl")
|
||||
if url == "https://ntfy.example.com":
|
||||
return
|
||||
|
||||
user_id = sio.get_session(sid)['id']
|
||||
nfty_topic = f"{username}-{user_id[:8]}"
|
||||
if url[-1] != "/":
|
||||
url = url + "/"
|
||||
print(url + ntfy_topic)
|
||||
|
||||
message = info['message'].encode(encoding='utf-8')
|
||||
title = info['title']
|
||||
|
||||
print(f"ntfy: {nfty_topic}")
|
||||
try:
|
||||
requests.post(f"{url}{ntfy_topic}", data=message, headers={"Title": title})
|
||||
except:
|
||||
log("WARN", "Notification server cannot be reached, ensure ntfy is up and that the provided url is correct")
|
||||
|
||||
def send_notification(sio, info, sid=None, username=None):
|
||||
if sid:
|
||||
send_ntfy(sio, info, sid, username)
|
||||
sio.emit("notification", info, room=sid)
|
||||
else:
|
||||
sio.emit("notification", info)
|
||||
100
server/modules/handler/tasks.py
Normal file
@@ -0,0 +1,100 @@
|
||||
import eventlet
|
||||
from modules.track.logging import log
|
||||
|
||||
from modules.data.database import connect as db_connect
|
||||
from modules.data.config import read as config_read
|
||||
from modules.data.datetime import timestamp
|
||||
from modules.handler import outgoing
|
||||
from modules.user.content import notification
|
||||
from modules.user.info import auth as auth_info
|
||||
|
||||
from modules.start.start import final_startup
|
||||
|
||||
def user_service(sio, sid):
|
||||
user_id = sio.get_session(sid)['id']
|
||||
|
||||
db = db_connect()
|
||||
db.create(None)
|
||||
log("INFO", f"Starting user service for {user_id}")
|
||||
|
||||
while True:
|
||||
eventlet.sleep(30)
|
||||
user_notification_service(db, sio, sid, user_id)
|
||||
|
||||
def user_notification_service(db_con, sio, sid, user_id):
|
||||
notifications = notification(user_id=user_id)
|
||||
notifications.columns = ['notification_id','title', 'content']
|
||||
username = auth_info(user_id=user_id).get()['username']
|
||||
|
||||
notif_queue = notifications.get_unsent()['notifications']
|
||||
if notif_queue:
|
||||
for notif in notif_queue:
|
||||
outgoing.send_notification(sio, notif, sid, username)
|
||||
db_con.cur.execute("UPDATE notifications_sent SET time_sent = ?, sent = ? WHERE user_id = ? AND notification_id = ?", (timestamp().now, True, user_id, notif['notification_id']))
|
||||
db_con.commit()
|
||||
|
||||
def post_time_notification():
|
||||
if timestamp().is_valid_time():
|
||||
log("INFO", "Sending post time notifications")
|
||||
post_time_limit = int(config_read('posts', 'posttimelimit'))
|
||||
title = "post-" + config_read('miscellaneous', 'ServerCode')
|
||||
content = f"you have {post_time_limit} minutes to post"
|
||||
target = "all-" + config_read('miscellaneous', 'ServerCode')
|
||||
# notifications has a sepcial code for sending notifications accross the server
|
||||
# if the target is set to "all-<unique server code>" the entire server is notified
|
||||
|
||||
notification_data = {'title': title, 'content': content, 'target_id': target, "expire_after": post_time_limit*60}
|
||||
notification().create(notification_data)
|
||||
notification_created = True
|
||||
log("INFO", "Sent post time notifications")
|
||||
else:
|
||||
notification_created = False
|
||||
|
||||
return notification_created
|
||||
|
||||
def notification_remove(db_con):
|
||||
db_con.cur.execute("SELECT notification_id, time_created, expire_after FROM notifications")
|
||||
rez = db_con.cur.fetchall()
|
||||
if rez:
|
||||
for notif in rez:
|
||||
if notif[1] + notif[2] < timestamp().now:
|
||||
notification(notification_id=notif[0]).delete()
|
||||
|
||||
def startup_notif():
|
||||
server_code = config_read('miscellaneous', 'servercode')
|
||||
notif_data = {'target_id': "all-"+server_code, 'title': "Server is up", 'content': "The server is now on and functioning", 'expire_after': 180}
|
||||
notification().create(notif_data)
|
||||
|
||||
def server_service(session):
|
||||
db = db_connect()
|
||||
db.create(None)
|
||||
log("INFO", f"Starting server background service")
|
||||
|
||||
while session.mode != "normal":
|
||||
eventlet.sleep(1)
|
||||
log("INFO", "Server mode normal, continuing startup")
|
||||
|
||||
final_startup(session)
|
||||
startup_notif()
|
||||
while True:
|
||||
# keeps the service running forever
|
||||
post_notification = False
|
||||
post_notif_title = "post-" + config_read('miscellaneous','ServerCode')
|
||||
db.cur.execute("SELECT time_created FROM notifications WHERE title=?", (post_notif_title,))
|
||||
rez = db.cur.fetchall()
|
||||
if rez:
|
||||
for notif in rez:
|
||||
if timestamp().start < notif[0] and timestamp().end > notif[0]:
|
||||
post_notification = True
|
||||
|
||||
today_end = timestamp().end
|
||||
|
||||
while timestamp().now < today_end:
|
||||
# keeps running this loop until the end of the day then it returns to the while above on the start of the new day
|
||||
#print(f"now: {timestamp().now}")
|
||||
eventlet.sleep(10)
|
||||
# removes expired notifications
|
||||
notification_remove(db)
|
||||
|
||||
if not post_notification:
|
||||
post_notification = post_time_notification()
|
||||
1
server/modules/start/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['start']
|
||||
BIN
server/modules/start/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/start/__pycache__/start.cpython-311.pyc
Normal file
36
server/modules/start/start.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from modules.data.database import create as db_create
|
||||
from modules.data.database import encryption
|
||||
from modules.data.config import read as config_read
|
||||
from modules.track.logging import log
|
||||
import os
|
||||
|
||||
def main(session):
|
||||
create_directories()
|
||||
log("INFO", "Ensuring server directories")
|
||||
|
||||
from modules.data.config import create as config_create
|
||||
log("INFO", "Ensuring config file")
|
||||
config_create()
|
||||
|
||||
log("INFO", "Ensuring database")
|
||||
db_create().tables()
|
||||
|
||||
if session.db_encrypted:
|
||||
log("INFO", "Checking encryption")
|
||||
encryption(session).mode()
|
||||
|
||||
def final_startup(session):
|
||||
from modules.data.datetime import timestamp as datetime_timestamp
|
||||
datetime_timestamp().generate_slot()
|
||||
|
||||
def create_directories():
|
||||
paths = ["data", "data/images"]
|
||||
if config_read("database", "ShamirSecretSharing"):
|
||||
paths = ["data", "data/images", "data/shares/"]
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
log("INFO", f"Created new directory: {path}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
server/modules/track/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['logging']
|
||||
BIN
server/modules/track/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/track/__pycache__/logging.cpython-311.pyc
Normal file
109
server/modules/track/logging.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from datetime import datetime
|
||||
from os.path import exists
|
||||
|
||||
class log():
|
||||
def __init__(self, level, message):
|
||||
if not hasattr(self, "message_type"):
|
||||
self.message_type = "log"
|
||||
self.time = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
self.level = level
|
||||
self.message = message
|
||||
self.path = "data/log.txt"
|
||||
|
||||
if self.message_type == "log":
|
||||
self._create()
|
||||
|
||||
def log_file_exist(self):
|
||||
file_exists = exists(self.path)
|
||||
return file_exists
|
||||
|
||||
def create(self):
|
||||
# here for legacy support
|
||||
# old version required a specific call to log(*info).create
|
||||
# this has since been revambed
|
||||
pass
|
||||
|
||||
def _create(self, log_string=None):
|
||||
if not log_string:
|
||||
log_string = f"{self.time} | {self.level} | {self.message}"
|
||||
|
||||
if not self.log_file_exist():
|
||||
with open(self.path, 'w') as log_file:
|
||||
log_file.write(f"{self.time} | INFO | Log file created at '{self.path}'")
|
||||
else:
|
||||
with open(self.path, 'a') as log_file:
|
||||
log_file.write(log_string)
|
||||
self.output(log_string)
|
||||
|
||||
def read(self, amount):
|
||||
with open(self.path, 'r') as log_file:
|
||||
entries = log_file.readlines()
|
||||
if amount == None:
|
||||
return entries
|
||||
entries = entries[len(entries)-amount:]
|
||||
return entries
|
||||
|
||||
def output(self, log_string):
|
||||
if self.message_type == "log":
|
||||
print(log_string)
|
||||
|
||||
class status(log):
|
||||
def __init__(self, level, message, interface=None):
|
||||
self.message_type = "status"
|
||||
super().__init__(level, message)
|
||||
self.status = {"time":self.time, "level":self.level, "message":self.message}
|
||||
if interface:
|
||||
self.interface = interface
|
||||
self.process()
|
||||
|
||||
# LEGACY METHODS
|
||||
def status_update(self, obj):
|
||||
status = {"time":self.time, "level":self.level, "message":self.message}
|
||||
if obj != None:
|
||||
obj.status = status
|
||||
obj.status_string = f"{self.time} | {self.level} | {self.message}"
|
||||
|
||||
return status
|
||||
|
||||
@staticmethod
|
||||
def send_status(sio, sid, status):
|
||||
sio.emit('recv_status', status, room=sid)
|
||||
# LEGACY METHODS
|
||||
|
||||
def process(self):
|
||||
self.__format()
|
||||
self.__object_update()
|
||||
|
||||
self._create(self.log_string)
|
||||
self.interface.send_status(self.status)
|
||||
|
||||
def __object_update(self):
|
||||
if self.interface.obj != None:
|
||||
self.interface.obj.status = self.status
|
||||
self.interface.obj.status_string = self.status_string
|
||||
|
||||
def __format(self):
|
||||
self.status = {"time":self.time, "level":self.level, "message":self.message}
|
||||
self.status_string = f"{self.time} | {self.level} | {self.message}"
|
||||
|
||||
user_id = self.interface.user_id
|
||||
sid = self.interface.sid
|
||||
self.log_string = f"{self.time} | {self.level} | {self.interface.user_id} | {self.message}"
|
||||
|
||||
class status_interface(log):
|
||||
def __init__(self, sio, sid, user_id="Unknown", obj=None):
|
||||
self.sio = sio
|
||||
self.sid = sid
|
||||
self.user_id = user_id
|
||||
self.obj = obj
|
||||
self.path = "data/actions_log.txt"
|
||||
|
||||
def send_status(self, status):
|
||||
self.sio.emit('recv_status', status, room=self.sid)
|
||||
|
||||
def main():
|
||||
entry = logging("INFO", "test log creation")
|
||||
entry.path = "log.txt"
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
server/modules/user/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__all__ = ['info', 'generate', 'content', 'start']
|
||||
BIN
server/modules/user/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
server/modules/user/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
server/modules/user/__pycache__/content.cpython-311.pyc
Normal file
BIN
server/modules/user/__pycache__/generate.cpython-311.pyc
Normal file
BIN
server/modules/user/__pycache__/info.cpython-310.pyc
Normal file
BIN
server/modules/user/__pycache__/info.cpython-311.pyc
Normal file
BIN
server/modules/user/__pycache__/start.cpython-311.pyc
Normal file
1077
server/modules/user/content.py
Normal file
58
server/modules/user/generate.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# This will essentially run one a successful registration happens
|
||||
# This could become a class or function in the user_info file
|
||||
## Since this file is essentially just going to be using that one
|
||||
## However it will also create some of its own database entries
|
||||
|
||||
import sqlite3
|
||||
from modules.data.config import read as config_read
|
||||
from modules.data.database import connect as db_connect
|
||||
|
||||
from modules.algorithms.uuid import generate as uuid_generate
|
||||
from modules.algorithms.uuid import long_hash as hash_string
|
||||
|
||||
from modules.user import info
|
||||
|
||||
db = db_connect()
|
||||
db.create(None)
|
||||
cur = db.cur
|
||||
|
||||
|
||||
def auth_credentials(user_id, username, password, level):
|
||||
cur.execute("INSERT INTO auth_credentials (user_id, username, password, level) VALUES (?, ?, ?, ?)", (user_id, username, password, level))
|
||||
|
||||
return user_id
|
||||
|
||||
def profile(user_id):
|
||||
cur.execute("INSERT INTO profile (user_id) VALUES (?)", (user_id,))
|
||||
|
||||
occupation_id = config_read("user", "DeafultOccupation")
|
||||
|
||||
# finds wether or not the occupation_id exists
|
||||
cur.execute("SELECT name FROM occupations WHERE occupation_id = ?", (occupation_id,))
|
||||
rez = cur.fetchone()
|
||||
if rez:
|
||||
info.occupation(user_id).set({"occupation_id":occupation_id})
|
||||
|
||||
def team(user_id, name="friends"):
|
||||
team_id = uuid_generate()
|
||||
|
||||
cur.execute("INSERT INTO teams (team_id, name, user_id) VALUES (?, ?, ?)", (team_id, name, user_id))
|
||||
cur.execute("INSERT INTO team_leaders (team_id, user_id) VALUES (?, ?)", (team_id, user_id))
|
||||
|
||||
|
||||
def main(username, password, level):
|
||||
user_id = uuid_generate()
|
||||
password_hash = hash_string(password + user_id)
|
||||
|
||||
auth_credentials(user_id, username, password_hash, level)
|
||||
profile(user_id)
|
||||
team(user_id)
|
||||
|
||||
db.commit()
|
||||
return user_id
|
||||
|
||||
if __name__ == "__main__":
|
||||
main("test_user", "test_password")
|
||||
|
||||
|
||||
|
||||
713
server/modules/user/info.py
Normal file
@@ -0,0 +1,713 @@
|
||||
from modules.track.logging import log, status
|
||||
from modules.data.config import read as config_read
|
||||
from modules.data.database import retrieve
|
||||
from modules.data.database import connect as db_connect
|
||||
from modules.data.datetime import timestamp
|
||||
from modules.algorithms.uuid import generate as uuid_generate
|
||||
from modules.algorithms.univ import dict_key_verify
|
||||
from modules.algorithms.recomend import recomend_friend
|
||||
|
||||
class table():
|
||||
def __init__(self, user_id=None, username=None, occupation_id=None, allowed_columns=None, *args, **kwargs):
|
||||
self.statface = None
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
self.id = user_id
|
||||
self.username = username
|
||||
self.occupation_id = occupation_id
|
||||
if not self.allowed_columns:
|
||||
self.allowed_columns = allowed_columns
|
||||
self.columns = self.allowed_columns
|
||||
self.server_code = config_read('miscellaneous', 'servercode')
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
@id.setter
|
||||
def id(self, value):
|
||||
if type(value) == str:
|
||||
self.cur.execute("SELECT username FROM auth_credentials WHERE user_id = ?", (value,))
|
||||
if not self.cur.fetchone():
|
||||
value = None
|
||||
else:
|
||||
value = None
|
||||
self._id = value
|
||||
|
||||
@property
|
||||
def username(self):
|
||||
return self._username
|
||||
@username.setter
|
||||
def username(self, value):
|
||||
self.cur.execute("SELECT user_id FROM auth_credentials WHERE username = ?", (value,))
|
||||
if not self.cur.fetchone():
|
||||
value = None
|
||||
|
||||
self._username = value
|
||||
|
||||
if value:
|
||||
u_id = user_id(username=value).get()['user_id']
|
||||
if self.id != u_id:
|
||||
self.id = u_id
|
||||
|
||||
@property
|
||||
def occupation_id(self):
|
||||
return self._occupation_id
|
||||
@occupation_id.setter
|
||||
def occupation_id(self, value):
|
||||
self.cur.execute("SELECT name FROM occupations WHERE occupation_id = ?", (value,))
|
||||
if not self.cur.fetchone():
|
||||
value = None
|
||||
self._occupation_id = value
|
||||
|
||||
@property
|
||||
def team_id(self):
|
||||
return self._team_id
|
||||
@team_id.setter
|
||||
def team_id(self, value):
|
||||
self.cur.execute("SELECT name FROM teams WHERE team_id = ?", (value,))
|
||||
if not self.cur.fetchone():
|
||||
value = None
|
||||
self._team_id = value
|
||||
|
||||
@property
|
||||
def columns(self):
|
||||
return self._columns
|
||||
@columns.setter
|
||||
def columns(self, value):
|
||||
valid = []
|
||||
|
||||
if type(value) == list:
|
||||
for column in value:
|
||||
if column in self.allowed_columns:
|
||||
valid.append(column)
|
||||
|
||||
self._columns = valid
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
self._date = timestamp().date
|
||||
return self._date
|
||||
@date.setter
|
||||
def date(self, value):
|
||||
self._date = value
|
||||
|
||||
class user_id():
|
||||
def __init__(self, username=None, *args, **kwargs):
|
||||
self.username = username
|
||||
self.db = db_connect()
|
||||
self.db.create(self)
|
||||
|
||||
def get(self):
|
||||
info = {'user_id':None}
|
||||
|
||||
self.cur.execute(f"SELECT user_id FROM auth_credentials WHERE username = ?", (self.username,))
|
||||
rez = self.cur.fetchone()
|
||||
|
||||
if rez:
|
||||
info = {"user_id":rez[0]}
|
||||
|
||||
else:
|
||||
info = None
|
||||
return info
|
||||
|
||||
class auth(table):
|
||||
def __init__(self, user_id=None, username=None, *args, **kwargs):
|
||||
self.allowed_columns = ["username", "level"]
|
||||
super().__init__(user_id=user_id, username=username)
|
||||
|
||||
def get(self):
|
||||
info = {}
|
||||
|
||||
for column in self.columns:
|
||||
info[column] = None
|
||||
|
||||
self.cur.execute(f"SELECT {column} FROM auth_credentials WHERE user_id = ?", (self.id,))
|
||||
rez = self.cur.fetchone()
|
||||
|
||||
if rez:
|
||||
info[column] = rez[0]
|
||||
|
||||
if not self.id:
|
||||
status("FAIL", "Invalid username provided", self.statface)
|
||||
info = None
|
||||
else:
|
||||
status("INFO", "Authorisation info successfully fetched", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def set(self, data):
|
||||
for column in self.columns:
|
||||
value = data[column]
|
||||
rez = None
|
||||
|
||||
if column == 'username':
|
||||
# the select statement was here instead of update i have no idea why
|
||||
# ive replaced it with an update since updating a username is completetly fine
|
||||
#self.cur.execute("SELECT username FROM auth_credentials WHERE username = ?", (value,))
|
||||
self.cur.execute("UPDATE auth_credentials SET username = ? WHERE user_id = ?", (value, self.id))
|
||||
status("INFO", "Successfully changed username", self.statface)
|
||||
if column == 'level':
|
||||
self.cur.execute("UPDATE auth_credentials SET level = ? WHERE user_id = ?", (value ,self.id))
|
||||
status("INFO", "Successfully changed level", self.statface)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
# V RBP: I think this is depricated and no longer in use
|
||||
class level(auth):
|
||||
def __init__(self, user_id):
|
||||
super().__init__(user_id=user_id)
|
||||
self.columns = ["level"]
|
||||
# ^ RBP: I think this is depricated and no longer in use
|
||||
|
||||
class team(table):
|
||||
def __init__(self, user_id=None, username=None, occupation_id=None, team_id=None, *args, **kwargs):
|
||||
self.allowed_columns = ['team_id', 'name', 'occupation_id', 'user_id']
|
||||
super().__init__()
|
||||
if user_id:
|
||||
self.id = user_id
|
||||
if username:
|
||||
self.username = username
|
||||
if occupation_id:
|
||||
self.occupation_id = occupation_id
|
||||
if team_id:
|
||||
self.team_id = team_id
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
@id.setter
|
||||
def id(self, value):
|
||||
occupation_value = None
|
||||
self.cur.execute("SELECT username FROM auth_credentials WHERE user_id = ?", (value,))
|
||||
if not self.cur.fetchone():
|
||||
value = None
|
||||
else:
|
||||
self.cur.execute("SELECT occupation_id FROM profile WHERE user_id = ?", (value,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
occupation_value = rez[0]
|
||||
|
||||
self.occupation_id = occupation_value
|
||||
self._id = value
|
||||
|
||||
@property
|
||||
def occupation_id(self):
|
||||
return self._occupation_id
|
||||
@occupation_id.setter
|
||||
def occupation_id(self, value):
|
||||
team_value = None
|
||||
self.cur.execute("SELECT name FROM occupations WHERE occupation_id = ?", (value,))
|
||||
if not self.cur.fetchone():
|
||||
value = None
|
||||
else:
|
||||
self.cur.execute("SELECT team_id FROM teams WHERE occupation_id = ?", (value,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
team_value = rez[0]
|
||||
|
||||
self.team_id = team_value
|
||||
self._occupation_id = value
|
||||
|
||||
def get(self):
|
||||
info = {column: None for column in self.columns}
|
||||
for column in self.columns:
|
||||
self.cur.execute(f"SELECT {column} FROM teams WHERE team_id = ?", (self.team_id,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
info[column] = rez[0]
|
||||
|
||||
if not all(info.values()) and not(self.team_id):
|
||||
info = None
|
||||
status("FAIL", "Team data could not be fetched, invalid data provided", self.statface)
|
||||
else:
|
||||
status("INFO", "Team data successfully fetched", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def get_all(self):
|
||||
info = {'teams': None}
|
||||
|
||||
for column in self.columns:
|
||||
self.cur.execute(f"SELECT {column} FROM teams WHERE user_id IS NULL")
|
||||
rez = self.cur.fetchall()
|
||||
if rez:
|
||||
if not info['teams']:
|
||||
info['teams'] = [{} for i in range(len(rez))]
|
||||
for i, items in enumerate(rez):
|
||||
info['teams'][i][column] = items[0]
|
||||
status("INFO", "Team(s) successfully fetched", self.statface)
|
||||
else:
|
||||
status("FAIL", "Team(s) could not be fetched, something went wrong", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def get_members(self):
|
||||
info = {'members': None}
|
||||
|
||||
self.cur.execute("""SELECT auth_credentials.username FROM auth_credentials
|
||||
INNER JOIN profile USING(user_id)
|
||||
CROSS JOIN teams ON profile.occupation_id = teams.occupation_id
|
||||
WHERE teams.team_id=?""", (self.team_id,))
|
||||
|
||||
rez = self.cur.fetchall()
|
||||
if rez:
|
||||
info['members'] = [{'username': member[0]} for member in rez]
|
||||
status("INFO", "Team members successfully fetched", self.statface)
|
||||
|
||||
if not self.team_id:
|
||||
status("FAIL", "Team members could not be fetched, invalid data provided")
|
||||
info = None
|
||||
|
||||
return info
|
||||
|
||||
def get_leaders(self):
|
||||
info = {'leaders': None}
|
||||
|
||||
self.cur.execute("SELECT user_id FROM team_leaders WHERE team_id = ?", (self.team_id,))
|
||||
rez = self.cur.fetchall()
|
||||
if rez:
|
||||
info['leaders'] = [{'username':(auth(user_id=user_id).get())['username'] for user_id in leader} for leader in rez]
|
||||
status("INFO", "Team leaders successfully fetched", self.statface)
|
||||
else:
|
||||
status("FAIL", "Team leaders could not be fetched, invalid data provided", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def set(self, data):
|
||||
for column in self.columns:
|
||||
if column == "name" and dict_key_verify(data, 'name'):
|
||||
self.cur.execute("UPDATE teams SET name=? where team_id=?", (data['name'] ,self.team_id))
|
||||
self.db.commit()
|
||||
status("INFO", "Team data successfully changed", self.statface)
|
||||
|
||||
if dict_key_verify(data, 'leaders'):
|
||||
current_leaders = (self.get_leaders())['leaders']
|
||||
for leader in data['leaders']:
|
||||
|
||||
exists = False
|
||||
if current_leaders:
|
||||
for current_leader in current_leaders:
|
||||
if current_leader['username'] == leader['username']:
|
||||
exists = True
|
||||
|
||||
if not exists:
|
||||
self.cur.execute("SELECT user_id FROM auth_credentials WHERE username = ?", (leader['username'],))
|
||||
info = user_id(username=leader['username']).get()
|
||||
if info:
|
||||
self.cur.execute("INSERT INTO team_leaders (user_id, team_id) VALUES (?, ?)", (info['user_id'], self.team_id))
|
||||
self.db.commit()
|
||||
status("INFO", "New leader successfully added to team", self.statface)
|
||||
else:
|
||||
status("FAIL", "Leader not set, user does not exist", self.statface)
|
||||
else:
|
||||
status("WARN", "This user already exists as a leader of the team", self.statface)
|
||||
|
||||
def delete_leaders(self, data):
|
||||
leaders = data['leaders']
|
||||
current_leaders = self.get_leaders()['leaders']
|
||||
if type(leaders) == str:
|
||||
leaders = [leaders]
|
||||
for leader in leaders:
|
||||
|
||||
exists = False
|
||||
if current_leaders:
|
||||
for current_leader in current_leaders:
|
||||
if current_leader['username'] == leader['username']:
|
||||
exists = True
|
||||
|
||||
if exists:
|
||||
self.cur.execute("DELETE FROM team_leaders WHERE user_id=? AND team_id=?", (user_id(username=leader['username']).get()['user_id'],self.team_id,))
|
||||
self.db.commit()
|
||||
status("INFO", "User {leader['username']} removed as a leader from this team", self.statface)
|
||||
else:
|
||||
status("WARN", "User {leader['username']} does not exist as a leader of this team", self.statface)
|
||||
|
||||
class friend(table):
|
||||
@property
|
||||
def friend_username(self):
|
||||
return self._friend_username
|
||||
@friend_username.setter
|
||||
def friend_username(self, value):
|
||||
obj = friend(username=value)
|
||||
if not obj.username:
|
||||
value = None
|
||||
else:
|
||||
self.friend_id = user_id(username=value).get()['user_id']
|
||||
self._friend_username = value
|
||||
|
||||
@property
|
||||
def friend_id(self):
|
||||
return self._friend_id
|
||||
@friend_id.setter
|
||||
def friend_id(self, value):
|
||||
obj = friend(user_id=value)
|
||||
if not obj.id:
|
||||
value = None
|
||||
self._friend_id = value
|
||||
|
||||
@property
|
||||
def mode(self):
|
||||
return self._mode
|
||||
@mode.setter
|
||||
def mode(self, value):
|
||||
if value not in ['incoming', 'outgoing']:
|
||||
value = "incoming"
|
||||
self._mode = value
|
||||
|
||||
def __init__(self, user_id=None, username=None, *args, **kwargs):
|
||||
self.allowed_columns = ['username', 'friend_username']
|
||||
self.mode = "outgoing"
|
||||
super().__init__(user_id=user_id, username=username)
|
||||
|
||||
def get(self):
|
||||
info = {'friends':None}
|
||||
|
||||
self.cur.execute("SELECT friend_id FROM friends WHERE user_id = ? AND approved = ?", (self.id, True))
|
||||
rez = self.cur.fetchall()
|
||||
|
||||
info['friends'] = [auth(user_id=user[0]).get() for user in rez]
|
||||
|
||||
if not self.id:
|
||||
info = None
|
||||
status("FAIL", "Friends not fetched, invalid data provided", self.statface)
|
||||
else:
|
||||
status("INFO", "Friends successfully fetched", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def get_requests(self):
|
||||
info = {'requests': None}
|
||||
|
||||
if self.mode == 'incoming':
|
||||
self.cur.execute("SELECT user_id FROM friends WHERE friend_id = ? AND approved = ?", (self.id, False))
|
||||
else:
|
||||
self.cur.execute("SELECT friend_id FROM friends WHERE user_id = ? AND approved = ?", (self.id, False))
|
||||
rez = self.cur.fetchall()
|
||||
|
||||
if rez:
|
||||
users = [auth(user_id=user[0]).get()['username'] for user in rez]
|
||||
info['requests'] = users
|
||||
status("INFO", f"Successfully fetched {self.mode} friend request(s)", self.statface)
|
||||
elif not self.id:
|
||||
status("FAIL", f"Could not fetch {self.mode} friend request(s), invalid data provided", self.statface)
|
||||
info = None
|
||||
|
||||
return info
|
||||
|
||||
def get_recomendations(self, data):
|
||||
info = {'recomended': None}
|
||||
if dict_key_verify(data, 'amount') and isinstance(data['amount'], int):
|
||||
amount = data['amount']
|
||||
else:
|
||||
status("FAIL", "Could not fetch friend recomendation(s), invalid amount provided or data is in wrong format", self.statface)
|
||||
return None
|
||||
|
||||
depth = 3
|
||||
username = auth(user_id=self.id).get()['username']
|
||||
recomendations = recomend_friend(username, amount, depth)
|
||||
if recomendations:
|
||||
info['recomended'] = recomendations
|
||||
status("INFO", "Successfully fetched friend recomendations", self.statface)
|
||||
else:
|
||||
status("INFO", "No recomendations could be generated, add some friends first", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def add_request(self, data):
|
||||
approved = False
|
||||
|
||||
if dict_key_verify(data, 'friend_username'):
|
||||
self.friend_username = data['friend_username']
|
||||
friend_id = user_id(data['friend_username']).get()['user_id']
|
||||
|
||||
if friend_id:
|
||||
# checks if the other person has added them as a friend
|
||||
# if so it accepts the other persons request and creates their own approved request
|
||||
self.cur.execute("SELECT user_id FROM friends WHERE friend_id = ? AND user_id = ?", (self.id, friend_id))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
self.cur.execute("UPDATE friends SET approved = True WHERE friend_id = ? AND user_id = ?", (self.id, friend_id))
|
||||
approved = True
|
||||
|
||||
# checks to see if this friend request already exists (wether accepted or rejected)
|
||||
# if not then it makes a new unaproved friend request
|
||||
self.cur.execute("SELECT approved FROM friends WHERE user_id = ? AND friend_id = ?", (self.id, friend_id))
|
||||
rez = self.cur.fetchone()
|
||||
if not rez:
|
||||
self.cur.execute("INSERT INTO friends (user_id, friend_id, approved) VALUES (?, ?, ?)", (self.id, friend_id, approved))
|
||||
status("INFO", "Friend request successfully created", self.statface)
|
||||
elif rez[0] == False:
|
||||
status("WARN", "User already has an active friend request to this user", self.statface)
|
||||
elif rez[0] == True:
|
||||
status("WARN", "User is already friends with other user", self.statface)
|
||||
else:
|
||||
status("FAIL", "Could not create friend request, invalid data provided")
|
||||
|
||||
self.db.commit()
|
||||
|
||||
def approve_request(self, data):
|
||||
if dict_key_verify(data, 'friend_username'):
|
||||
self.friend_username = data['friend_username']
|
||||
self.cur.execute("SELECT approved FROM friends WHERE friend_id = ? AND user_id = ?", (self.id, self.friend_id))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
self.add_request(data)
|
||||
else:
|
||||
status("FAIL", "Friend request does not exist", self.statface)
|
||||
|
||||
def reject_request(self, data):
|
||||
self.remove(data)
|
||||
|
||||
def delete_request(self, data):
|
||||
self.remove(data)
|
||||
|
||||
def remove(self, data):
|
||||
if dict_key_verify(data, 'friend_username'):
|
||||
self.friend_username = data['friend_username']
|
||||
|
||||
if self.friend_id:
|
||||
self.cur.execute("DELETE FROM friends WHERE user_id = ? AND friend_id = ?", (self.id, self.friend_id))
|
||||
self.cur.execute("DELETE FROM friends WHERE friend_id = ? AND user_id = ?", (self.id, self.friend_id))
|
||||
status("INFO", "Friend/friend request successfully removed/rejected", self.statface)
|
||||
else:
|
||||
status("FAIL", "Friend/friend request could not be removed/rejected, invalid data provided", self.statface)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
class profile(table):
|
||||
@property
|
||||
def target_username(self):
|
||||
return self._target_username
|
||||
@target_username.setter
|
||||
def target_username(self, value):
|
||||
prof = profile(username=value)
|
||||
if not prof.username:
|
||||
value = None
|
||||
self._target_username = value
|
||||
|
||||
def __init__(self, user_id=None, username=None, *args, **kwargs):
|
||||
self.allowed_columns = ["biography", "role", "name", "occupation_id"]
|
||||
super().__init__(user_id=user_id, username=username)
|
||||
|
||||
def get(self):
|
||||
info = {}
|
||||
|
||||
for column in self.columns:
|
||||
info[column] = None
|
||||
|
||||
self.cur.execute(f"SELECT {column} FROM profile WHERE user_id = ?", (self.id,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
info[column] = rez[0]
|
||||
|
||||
if not self.id:
|
||||
status("FAIL", "Invalid username provided profile unable to be fetched")
|
||||
info = None
|
||||
else:
|
||||
status("INFO", "Profile infomation successfully fetched")
|
||||
|
||||
return info
|
||||
|
||||
def get_permissions(self):
|
||||
info = {"delete": False, "edit": False}
|
||||
|
||||
subject = auth(user_id=self.id, items=['level', 'username']).get()
|
||||
if subject['level'] == "management" or subject['level'] == "admin":
|
||||
info['delete'] = True
|
||||
info['edit'] = True
|
||||
|
||||
if self.target_username:
|
||||
if subject['username'] == self.target_username:
|
||||
info['delete'] = True
|
||||
info['edit'] = True
|
||||
|
||||
target_team_info = team(username=self.target_username).get_leaders()
|
||||
if dict_key_verify(target_team_info, 'leaders'):
|
||||
target_leaders = target_team_info['leaders']
|
||||
if subject['username'] in target_leaders:
|
||||
info['delete'] = True
|
||||
|
||||
if not self.id or not self.target_username:
|
||||
status("FAIL", "Invalid username or data provided", self.statface)
|
||||
info = None
|
||||
else:
|
||||
status("INFO", "Permissions successfully fetched", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def set(self, data):
|
||||
for column in self.columns:
|
||||
item = data[column]
|
||||
|
||||
self.cur.execute(f"UPDATE profile SET {column} = ? WHERE user_id = ?", (item, self.id))
|
||||
status("INFO", "Successfully changed/deleted {column}", self.statface)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
def delete(self):
|
||||
data = {}
|
||||
|
||||
for column in self.columns:
|
||||
data[column] = None
|
||||
self.set(data)
|
||||
|
||||
class occupation(table):
|
||||
def __init__(self, user_id=None, username=None, occupation_id=None, *args, **kwargs):
|
||||
self.allowed_columns = ["occupation_id", "name", "description"]
|
||||
super().__init__(user_id=user_id, username=username, occupation_id=occupation_id)
|
||||
|
||||
def get(self):
|
||||
info = {column: None for column in self.columns}
|
||||
|
||||
if not self.occupation_id:
|
||||
self.cur.execute("SELECT occupations.occupation_id, occupations.name, description FROM profile INNER JOIN occupations USING(occupation_id) WHERE user_id = ?", (self.id,))
|
||||
else:
|
||||
self.cur.execute("SELECT occupation_id, name, description FROM occupations WHERE occupation_id = ?", (self.occupation_id,))
|
||||
rez = self.cur.fetchone()
|
||||
|
||||
if rez:
|
||||
occupation = {'occupation_id':rez[0], 'name':rez[1], 'description':rez[2]}
|
||||
|
||||
for column in self.columns:
|
||||
info[column] = occupation[column]
|
||||
|
||||
if not rez and not self.id:
|
||||
status("FAIL", "Occupation could not be fetched: invalid data provided", self.statface)
|
||||
info = None
|
||||
else:
|
||||
status("INFO", "Occupation successfully fetched", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def get_request(self):
|
||||
info = {'occupation_id': None, 'approved': None}
|
||||
|
||||
self.cur.execute("SELECT occupation_id, approved FROM occupation_requests WHERE user_id = ?", (self.id,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
info['occupation_id'] = rez[0]
|
||||
info['approved'] = rez[1]
|
||||
status("INFO", "Occupation requests fetched successfully")
|
||||
else:
|
||||
status("FAIL", "Occupation requests could not be fetched invalid data provided", self.statface)
|
||||
info = None
|
||||
|
||||
return info
|
||||
|
||||
def get_all_requests(self):
|
||||
info = {'requests': None}
|
||||
|
||||
self.cur.execute("SELECT user_id, occupation_id FROM occupation_requests WHERE approved = ?", (False,))
|
||||
rez = self.cur.fetchall()
|
||||
|
||||
if rez:
|
||||
info['requests'] = [{'username': auth(user_id=request[0]).get()['username'], 'occupation_id': request[1]} for request in rez]
|
||||
status("INFO", "Occupation requests successfully fetched", self.statface)
|
||||
else:
|
||||
status("FAIL", "Occupation requests could not be fetched something went wrong", self.statface)
|
||||
return info
|
||||
|
||||
def set(self, data):
|
||||
occupation_id = data["occupation_id"]
|
||||
|
||||
self.cur.execute("SELECT name FROM occupations WHERE occupation_id = ?", (occupation_id,))
|
||||
if self.cur.fetchone():
|
||||
self.cur.execute("UPDATE profile SET occupation_id = ? WHERE user_id = ?", (occupation_id, self.id))
|
||||
status("INFO", "Occupation successfully updated", self.statface)
|
||||
else:
|
||||
status("FAIL", "Occupation could not be updated invalid data provided", self.statface)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
def set_request(self, data):
|
||||
occupation_id = data['occupation_id']
|
||||
|
||||
self.cur.execute("SELECT approved FROM occupation_requests WHERE user_id = ?", (self.id,))
|
||||
if self.cur.fetchone():
|
||||
self.delete_request()
|
||||
status("INFO", "Removing previous occupation change request", self.statface)
|
||||
|
||||
self.cur.execute("SELECT name FROM occupations WHERE occupation_id = ?", (occupation_id,))
|
||||
if self.cur.fetchone():
|
||||
self.cur.execute("INSERT INTO occupation_requests (user_id, occupation_id, approved) VALUES (?, ?, ?)", (self.id, occupation_id, False))
|
||||
else:
|
||||
status("FAIL", "Occupation change request could not be made invalid occupation_id provided", self.statface)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
def approve_request(self):
|
||||
self.cur.execute("SELECT occupation_id FROM occupation_requests WHERE approved = ? AND user_id = ?", (False, self.id,))
|
||||
if self.cur.fetchone():
|
||||
self.cur.execute("UPDATE occupation_requests SET approved = ? WHERE user_id = ?", (True, self.id))
|
||||
self.cur.execute("SELECT occupation_id FROM occupation_requests WHERE user_id = ?", (self.id,))
|
||||
rez = self.cur.fetchone()
|
||||
if rez:
|
||||
self.set({'occupation_id': rez[0]})
|
||||
status("INFO", "Occupation change request successfully approved", self.statface)
|
||||
else:
|
||||
status("CRIT", "Occupation change request approved but not changed in the user entry, contact admin", self.statface)
|
||||
else:
|
||||
status("FAIL", "Occupation change request from that user does not exist or has already been approved", self.statface)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
def reject_request(self):
|
||||
self.delete_request()
|
||||
|
||||
def delete(self):
|
||||
self.cur.execute("UPDATE profile SET occupation_id = ? WHERE user_id = ?", (None, self.id))
|
||||
self.db.commit()
|
||||
status("INFO", "Occupation no longer associated with user", self.statface)
|
||||
|
||||
def delete_request(self):
|
||||
self.cur.execute("DELETE FROM occupation_requests WHERE user_id = ?", (self.id,))
|
||||
self.db.commit()
|
||||
status("INFO", "Occupation change request successfully deleted", self.statface)
|
||||
|
||||
def get_all(self):
|
||||
info = {'occupations':None}
|
||||
|
||||
self.cur.execute("SELECT occupation_id, name, description FROM occupations")
|
||||
rez = self.cur.fetchall()
|
||||
|
||||
if rez:
|
||||
occupations = [{'occupation_id':occupation[0], 'name':occupation[1], 'description': occupation[2]} for occupation in rez]
|
||||
info['occupations'] = occupations
|
||||
status("INFO", "Occupation(s) successfully fetched", self.statface)
|
||||
else:
|
||||
status("FAIL", "Occupation(s) could not be fetched", self.statface)
|
||||
|
||||
return info
|
||||
|
||||
def create(self, data={'name': None, 'description': None}):
|
||||
occupation_uuid = uuid_generate()
|
||||
team_uuid = uuid_generate()
|
||||
name = data['name']
|
||||
description = data['description']
|
||||
|
||||
self.cur.execute("INSERT INTO occupations(occupation_id, name, description) VALUES (?, ?, ?)", (occupation_uuid, name, description))
|
||||
self.cur.execute("INSERT INTO teams (team_id, name, occupation_id) VALUES (?, ?, ?)", (team_uuid, name, occupation_uuid))
|
||||
self.db.commit()
|
||||
|
||||
def edit(self, data):
|
||||
if 'occupation_id' in data and not self.occupation_id:
|
||||
self.occupation_id = data['occupation_id']
|
||||
for column in self.columns:
|
||||
if column == "occupation_id":
|
||||
continue
|
||||
value = data[column]
|
||||
self.cur.execute(f"UPDATE occupations SET {column} = ? WHERE occupation_id = ?", (value, self.occupation_id))
|
||||
self.db.commit()
|
||||
|
||||
def delete_occupation(self, data=None):
|
||||
if dict_key_verify(data, "occupation_id") and not self.occupation_id:
|
||||
self.occupation_id = data['occupation_id']
|
||||
self.cur.execute("DELETE FROM occupations WHERE occupation_id = ?", (self.occupation_id,))
|
||||
self.db.commit()
|
||||
|
||||
def main():
|
||||
log("WARN", "modules/user/info.py has been called as main. This file is not intended to run solo. Please use main.py or modules/handler/handler.py")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
17
server/modules/user/start.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# this file is depricated and set to be removed
|
||||
## move all dependancies to ./modules/start/start.py
|
||||
## removal date set for 30-11-2023
|
||||
|
||||
from modules.handler import outgoing
|
||||
from modules.data.datetime import timestamp
|
||||
|
||||
def post_slot(sio, sid):
|
||||
if timestamp().is_valid_time():
|
||||
outgoing.post_slot(sio, sid)
|
||||
|
||||
def main(sio, sid):
|
||||
post_slot(sio, sid)
|
||||
notifications(sio, sid)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
7
server/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
python-socketio==5.8.0
|
||||
eventlet==0.33.3
|
||||
pathlib==1.0.1
|
||||
configparser
|
||||
datetime
|
||||
pillow==10.0.1
|
||||
python-dotenv==1.0.0
|
||||
40
server/work-data/config.ini
Normal file
@@ -0,0 +1,40 @@
|
||||
[authorisation]
|
||||
adminkey = secret
|
||||
registrationkey = secret
|
||||
usernamemaxlength = 20
|
||||
usernameminlength = 5
|
||||
passwordmaxlength = 30
|
||||
passwordminlength = 5
|
||||
tokenexpirytime = 2592000
|
||||
|
||||
[database]
|
||||
path = data/database.db
|
||||
encrypt = false
|
||||
shamirsecretsharing = false
|
||||
numberofshares = 5
|
||||
minimumshares = 3
|
||||
keypath = data/key.txt
|
||||
encryptedpath = data/.cryptdatabase.db
|
||||
encryptionconfigpath = data/encryptconfig.txt
|
||||
saltpath = data/.salt.txt
|
||||
sharespath = data/shares/
|
||||
|
||||
[user]
|
||||
defaultlevel = member
|
||||
defaultoccupationid = Null
|
||||
|
||||
[posts]
|
||||
posttimelimit = 5
|
||||
daystart = 9
|
||||
dayend = 17
|
||||
|
||||
[notifications]
|
||||
defaultexpiretime = 604800
|
||||
ntfyurl = https://ntfy.example.com
|
||||
|
||||
[networking]
|
||||
port = 9999
|
||||
|
||||
[miscellaneous]
|
||||
servercode = 12345
|
||||
|
||||
BIN
server/work-data/database.db
Normal file
|
After Width: | Height: | Size: 283 KiB |
|
After Width: | Height: | Size: 263 KiB |
|
After Width: | Height: | Size: 286 KiB |
|
After Width: | Height: | Size: 324 KiB |