Did not move to Gunicorn, but now working in Docker

This commit is contained in:
Lucas Mathews
2024-06-15 20:49:18 +02:00
parent 1c614f23b6
commit 15c0d62523
5 changed files with 13 additions and 20 deletions

View File

@@ -7,4 +7,3 @@ faker
customtkinter
schedule
psycopg2-binary
gunicorn

View File

@@ -5,12 +5,12 @@
### Modules ###
###############
import threading
import subprocess
import connexion # Imports connexion module
from config import CONFIG # Imports the configuration file
from manager import * # Imports the Manager file that contains the functions for the API
from flask_session import Session # Imports the session module
from scheduler import run_schedule # Imports the scheduler module
#################
### Connexion ###
@@ -20,12 +20,11 @@ def create_app():
"""Creates the API using Connexion."""
app = connexion.FlaskApp(__name__)
app.add_api(CONFIG["server"]["api_file"])
flask_app = app.app
flask_app.config['SECRET_KEY'] = CONFIG["sessions"]["secret_key"]
flask_app.config['SESSION_TYPE'] = 'filesystem'
Session(flask_app)
event_logger("Session initialised.")
return app
def API():
@@ -33,6 +32,7 @@ def API():
app = create_app()
debug_value = CONFIG["server"]["debug"]
debug = False if debug_value.lower() == 'false' else True
event_logger("API started.")
app.run(host=CONFIG["server"]["host"], port=CONFIG["server"]["port"], debug=debug)
################
@@ -40,14 +40,12 @@ def API():
################
if __name__ == "__main__":
event_logger("Starting API...") # Create a thread that will run the run_schedule function in the background
event_logger("Starting API...")
scheduler = CONFIG["server"]["scheduler"]
scheduler = False if scheduler.lower() == 'false' else True
if scheduler:
thread = threading.Thread(target=run_schedule)
thread.daemon = True # Set the thread as a daemon thread
thread.start()
subprocess.Popen(["python", "scheduler.py"])
event_logger("Scheduler started.")
API()
event_logger("API stopped.") # This line will only be reached if the API is stopped
event_logger("API stopped.")

View File

@@ -44,4 +44,4 @@ finally:
if 'session' in locals():
session.close() # Close the session when done
event_logger("Database operations completed.")
event_logger("Database initialised.")

View File

@@ -69,7 +69,7 @@ def get_current_client():
def verify_otp(client_id:str, otp:int):
"""Verifies a one time password for a client. Returns True if the OTP is correct and False otherwise."""
if CONFIG["smtp"]["true"] == "False":
if CONFIG["smtp"]["enabled"] == "False":
return True
if client_id in otps:
stored_otp, creation_time = otps[client_id]
@@ -147,7 +147,7 @@ def admin_required(f):
@login_required
def generate_otp(client_id: str):
"""Generates a one-time password for a client and sends it to their email address. Returns a success message if the OTP is generated and an error message otherwise."""
if CONFIG["smtp"]["true"] == "False":
if CONFIG["smtp"]["enabled"] == "False":
return format_response(True, "OTP generation disabled as SMTP is not enabled."), 200
current_client_id, is_admin = get_current_client()
if not is_admin and client_id != current_client_id:

View File

@@ -15,19 +15,15 @@ def run_schedule():
def clean_otp():
"""Cleans the OTP table."""
event_logger("Starting to clean OTPs...")
print("Cleaning OTPs...")
from manager import clean_expired_otps
removed_otps = clean_expired_otps()
event_logger(f"Removed {removed_otps} expired OTPs.")
event_logger("Finished cleaning OTPs.")
schedule.every(300).seconds.do(clean_otp)
thread = threading.Thread(target=run_schedule)
thread.daemon = True # Set the thread as a daemon thread
thread.start()
try:
while True: # Keep the main program running
time.sleep(1)
except KeyboardInterrupt:
stop_event.set() # Signal the thread to stop