From 088999e5d172ee8494af9ca21017b1e58da3c59a Mon Sep 17 00:00:00 2001 From: Cappy Ishihara Date: Nov 23 2021 16:43:08 +0000 Subject: added various modules --- diff --git a/lapis/auth.py b/lapis/auth.py new file mode 100644 index 0000000..1d8df70 --- /dev/null +++ b/lapis/auth.py @@ -0,0 +1,44 @@ +#Authentication for Lapis +import hashlib +import hmac +import lapis.config as config +import json +import lapis.db as database +import lapis.logger as log + +def passHash(password: str) -> str: + return hashlib.sha256(password.encode()).hexdigest() + + +def isValid(username: str, password: str) -> bool: + """ + Checks if the username and password are valid + """ + if not username or not password: + return {'success': False, 'error': 'No username or password provided'} + user = database.getUser(username) + if not user: + return {'success': False, 'error': 'User not found'} + if user['password'] == passHash(password): + return {'success': True, 'user': user} + else: + return {'success': False, 'error': 'Invalid password'} + + +def checkWorkerToken(token: str) -> dict: + # Check token validity from database + if not token: + return {'success': False, 'error': 'No token provided'} + user = database.workers.get_by_token(token) + if not user: + return {'success': False, 'error': 'Invalid token'} + return {'success': True, 'user': user} + +def checkUserToken(token: str) -> dict: + # Check token validity from database + if not token: + return {'success': False, 'error': 'No token provided'} + user = database.users.get_by_token(token) + if not user: + return {'success': False, 'error': 'Invalid token'} + return {'success': True, 'user': user} \ No newline at end of file diff --git a/lapis/builds.py b/lapis/builds.py deleted file mode 100644 index 40dd9a0..0000000 --- a/lapis/builds.py +++ /dev/null @@ -1,44 +0,0 @@ -# export module as lapis.builds - -# set up the database -import postgresql -import json -import os -import time - - -#JSON schema for builds -schema = { - 'id': int, - 'name': str, - 'description': str, - 'source': str, - 'status': str, - 'started_at': str, #ISO 8601 - 'finished_at': str, # ISO 8601 - 'duration': int, - 'output': [ - { - 'name': str, - 'path': str, - 'size': int, - 'mtime': str, # ISO 8601 - } - ] -} -# Postgresql schema setup -schema_sql = """ -CREATE TABLE IF NOT EXISTS builds ( - id serial PRIMARY KEY, - name varchar(255) NOT NULL, - description varchar(255) NOT NULL, - source varchar(255) NOT NULL, - status varchar(255) NOT NULL, - started_at timestamp NOT NULL, - finished_at timestamp, - duration int, - output jsonb NOT NULL -); -""" - -#export module as lapis.builds diff --git a/lapis/config.py b/lapis/config.py index fbef62c..70e4d09 100644 --- a/lapis/config.py +++ b/lapis/config.py @@ -7,7 +7,7 @@ import sys default = { 'port': 8080, 'debug': True, - 'host': 'localhost', + 'host': '0.0.0.0', 'tempdir': '/tmp', 'datadir': '/srv/lapis', 'database': 'lapis', @@ -15,27 +15,34 @@ default = { 'database_password': 'lapis', 'database_host': 'localhost', 'database_port': 5432, - 'database_type': 'postgresql', 'database_schema': 'public', + 'database_mode': 'local', 'database_ssl': False, 'database_ssl_key': '', 'database_ssl_cert': '', 'database_ssl_ca': '', 'baseurl': '/api', - 'secret': '', + 'secret': 'obamas-last-name', 'logfile': '/var/log/lapis/lapis.log', 'logfile_level': 'DEBUG', 'logfile_max_size': 10485760, 'logfile_max_backups': 5, 'logfile_max_age': 7, 'datadir': '/srv/lapis', + 'standalone': False, + 'threaded':True, } # if --config or -c is not specified, use default config if len(sys.argv) > 1 and (sys.argv[1] == '-c' or sys.argv[1] == '--config'): config_file = sys.argv[2] + # else check for environment variable "LAPIS_CONFIG" +elif 'LAPIS_CONFIG' in os.environ: + config_file = os.environ['LAPIS_CONFIG'] else: - config_file = '/etc/lapis/backend.conf' + config_file = '/etc/lapis/lapis.conf' + + # load config from file with configparser config = configparser.ConfigParser() diff --git a/lapis/db.py b/lapis/db.py new file mode 100644 index 0000000..51fc5d4 --- /dev/null +++ b/lapis/db.py @@ -0,0 +1,458 @@ +#Database code for Lapis +import lapis.config as config +import lapis.logger +import lapis.util as util +import psycopg2 +import time +import datetime +import json +import os +import secrets + +# load schema from SQL file in assets/ +def lapis_schema(): + with open(os.path.join(os.path.dirname(__file__), "asset/schema.sql"), "r") as f: + return f.read() +# copilot commenting intensifies +# ================================ +# set up the database +# if the database doesn't exist, create it +# if the database mode is set to local, use the socket +# if the database mode is set to remote, use the host and port +# use config.get('') to get the value of a key +# use config.set('') to set the value of a key +# get the database connection + +def connection(): + try: + if config.get('database_mode') == 'local': + # use socket + conn = psycopg2.connect(database=config.get('database'), user=config.get('database_user'), password=config.get('database_password')) + else: + conn = psycopg2.connect(database=config.get('database'), user=config.get('database_user'), + password=config.get('database_password'), + sslmode=config.get('database_ssl'), sslkey=config.get('database_ssl_key'), + sslcert=config.get('database_ssl_cert')) + except Exception as e: + # error and return null if there's a problem + print(e) + finally: + # now check if the schema exists + # if it doesn't, create it from the schema file + # if it does, check if the schema is up to date + # if it isn't, update it + # create a schema called lapis if it doesn't exist + try: + cursor = conn.cursor() + lapis.logger.debug(cursor.execute("SELECT EXISTS(SELECT 1 FROM information_schema.tables WHERE table_name=%s)", ["task"])) + if cursor.fetchone()[0] == 0: + # schema doesn't exist + cursor.execute(lapis_schema()) + else: + # schema exists + cursor.execute("SELECT version FROM version") + if cursor.fetchone()[0] == "1.0.0": + # schema is up to date + pass + else: + # schema is out of date + cursor.execute(lapis_schema()) + except Exception as e: + # error and return null if there's a problem + lapis.logger.error(e) + return None + finally: + return conn + +# now get the schema in the database +# if there's nothing there, create the schema +# if there's something there, check to see if it's the same as the schema in the code +# if it's different, update the schema +# if it's the same, do nothing +# this is to prevent schema changes from breaking the database + +def initialize(): + conn = connection() + if conn is not None: + print("Initializing database..") + cursor = conn.cursor() + cursor.execute(lapis_schema()) + conn.commit() + conn.close() +# ================================ +# database functions +# ================================ + +# list the current builds, with an optional amount of builds to list +# if no amount is specified, list all the builds +# if an amount is specified, list the most recent builds +class build: + # Insert build into the database + def insert(build: json): + if config.get('debug') == True: + lapis.logger.debug("Inserting build into database") + lapis.logger.debug(build) + try: + conn = connection() + cur = conn.cursor() + build["output"] = json.dumps(build["output"]) + cur.execute("INSERT INTO builds (id,name,description,source,status,started_at,finished_at,duration,output) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)", + (build["id"], + build["name"], + build["description"], + build["source"], + build["status"], + build["started_at"], + build["finished_at"], + build["duration"], + build["output"])) + conn.commit() + cur.close() + conn.close() + except Exception as e: + lapis.logger.error("Error inserting build into database: " + str(e)) + + def remove(id): + conn = connection() + cur = conn.cursor() + cur.execute("DELETE FROM builds WHERE id=%s", (id,)) + conn.commit() + cur.close() + conn.close() + + def list(amount=None): + conn = connection() + cur = conn.cursor() + if amount is None: + cur.execute("SELECT * FROM builds") + else: + cur.execute("SELECT * FROM builds ORDER BY id DESC LIMIT %s", (amount,)) + builds = cur.fetchall() + conn.close() + return builds + + def get(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM builds WHERE id=%s", (id,)) + build = cur.fetchone() + conn.close() + return build + + def update(id, build): + conn = connection() + cur = conn.cursor() + build["output"] = json.dumps(build["output"]) + # only update the fields that are specified + cur.execute("UPDATE builds SET name=%s, description=%s, source=%s, status=%s, started_at=%s, finished_at=%s, duration=%s, output=%s WHERE id=%s", + (build["name"], + build["description"], + build["source"], + build["status"], + build["started_at"], + build["finished_at"], + build["duration"], + build["output"], + id)) + conn.commit() + cur.close() + conn.close() + + def status(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT status FROM builds WHERE id=%s", (id,)) + status = cur.fetchone() + conn.close() + return status[0] + + +class tasks: + def insert(task): + try: + conn = connection() + cur = conn.cursor() + cur.execute("INSERT INTO tasks (id,type,build_id,status,payload) VALUES (%s,%s,%s,%s,%s)", + (task["id"], + task["type"], + task["build_id"], + task["status"], + json.dumps(task["payload"]))) + conn.commit() + cur.close() + conn.close() + except Exception as e: + lapis.logger.error("Error inserting task into database: " + str(e)) + + def remove(id): + conn = connection() + cur = conn.cursor() + cur.execute("DELETE FROM tasks WHERE id=%s", (id,)) + conn.commit() + cur.close() + conn.close() + + def list(type="pending"): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM tasks WHERE status=%s", (type,)) + tasks = cur.fetchall() + conn.close() + # return tasks as an array of dictionaries + return tasks + + def get(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM tasks WHERE id=%s", (id,)) + task = cur.fetchone() + conn.close() + return task + + def take(worker_id,id): + conn = connection() + cur = conn.cursor() + cur.execute("UPDATE tasks SET status='running' WHERE id=%s", (id,)) + # assign task to the worker that took it + cur.execute("UPDATE tasks SET worker_id=%s WHERE id=%s", (lapis.worker.id, id)) + conn.commit() + cur.close() + conn.close() + + def update(id, task): + conn = connection() + cur = conn.cursor() + # only update the fields that are specified + cur.execute("UPDATE tasks SET status=%s, payload=%s WHERE id=%s", + (task["status"], + json.dumps(task["payload"]), + id)) + conn.commit() + cur.close() + conn.close() + +class workers: + # Workers should update their last seen time (ping the server) every now and then + def ping(token): + conn = connection() + cur = conn.cursor() + cur.execute("UPDATE workers SET last_seen=NOW() WHERE token=%s", (token,)) + conn.commit() + cur.close() + conn.close() + + def insert(worker): + lapis.logger.debug + try: + conn = connection() + cur = conn.cursor() + cur.execute("INSERT INTO workers (id,name,type,status,token) VALUES (%s,%s,%s,%s,%s)", + (worker["id"], + worker["name"], + worker["type"], + worker["status"], + worker["token"])) + conn.commit() + cur.close() + conn.close() + except Exception as e: + lapis.logger.error("Error inserting worker into database: " + str(e)) + + def remove(id): + conn = connection() + cur = conn.cursor() + cur.execute("DELETE FROM workers WHERE id=%s", (id,)) + conn.commit() + cur.close() + conn.close() + + def list(): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM workers") + workers = cur.fetchall() + conn.close() + return workers + + def get(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM workers WHERE id=%s", (id,)) + worker = cur.fetchone() + conn.close() + return worker + + # Chekov's gun, it's a surprise tool that'll help us later. + def get_by_token(token): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM workers WHERE token=%s", (token,)) + worker = cur.fetchone() + conn.close() + return worker + + def update(id, worker): + conn = connection() + cur = conn.cursor() + # only update the fields that are specified + cur.execute("UPDATE workers SET name=%s, type=%s, status=%s, token=%s WHERE id=%s", + (worker["name"], + worker["type"], + worker["status"], + worker["token"], + id)) + conn.commit() + cur.close() + conn.close() + + +class user: + def insert(user): + try: + conn = connection() + cur = conn.cursor() + cur.execute("INSERT INTO users (id,name,email,password) VALUES (%s,%s,%s,%s)", + (user["id"], + user["name"], + user["email"], + user["password"])) + conn.commit() + cur.close() + conn.close() + except Exception as e: + lapis.logger.error("Error inserting user into database: " + str(e)) + + def remove(id): + conn = connection() + cur = conn.cursor() + cur.execute("DELETE FROM users WHERE id=%s", (id,)) + conn.commit() + cur.close() + conn.close() + + def list(): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM users") + users = cur.fetchall() + conn.close() + return users + + def get(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM users WHERE id=%s", (id,)) + user = cur.fetchone() + conn.close() + return user + + def update(id, user): + conn = connection() + cur = conn.cursor() + # only update the fields that are specified + cur.execute("UPDATE users SET name=%s, email=%s, password=%s WHERE id=%s", + (user["name"], + user["email"], + user["password"], + id)) + conn.commit() + cur.close() + conn.close() + +class buildroot: + def insert(buildroot): + try: + conn = connection() + cur = conn.cursor() + cur.execute("INSERT INTO buildroots (id,name,type,status) VALUES (%s,%s,%s,%s)", + (buildroot["id"], + buildroot["name"], + buildroot["type"], + buildroot["status"])) + conn.commit() + cur.close() + conn.close() + except Exception as e: + lapis.logger.error("Error inserting buildroot into database: " + str(e)) + + def remove(id): + conn = connection() + cur = conn.cursor() + cur.execute("DELETE FROM buildroots WHERE id=%s", (id,)) + conn.commit() + cur.close() + conn.close() + + def list(): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM buildroots") + buildroots = cur.fetchall() + conn.close() + return buildroots + + def get(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM buildroots WHERE id=%s", (id,)) + buildroot = cur.fetchone() + conn.close() + return buildroot + + def update(id, buildroot): + conn = connection() + cur = conn.cursor() + # only update the fields that are specified + cur.execute("UPDATE buildroots SET name=%s, type=%s, status=%s WHERE id=%s", + (buildroot["name"], + buildroot["type"], + buildroot["status"], + id)) + conn.commit() + cur.close() + conn.close() + +class sessions: + def add(session): + conn = connection() + cur = conn.cursor() + cur.execute("INSERT INTO sessions (id,user_id,token,created) VALUES (%s,%s,%s,%s)", + (session["id"], + session["user_id"], + session["token"], + session["created"])) + conn.commit() + cur.close() + conn.close() + # Kick the user out of the session by UID + def kick(id): + conn = connection() + cur = conn.cursor() + cur.execute("DELETE FROM sessions WHERE user_id=%s", (id,)) + conn.commit() + cur.close() + conn.close() + # list sessions by UID + def list(id): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM sessions WHERE user_id=%s", (id,)) + sessions = cur.fetchall() + conn.close() + return sessions + # list all sessions + def list_all(): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM sessions") + sessions = cur.fetchall() + conn.close() + return sessions + # get session by token + def get(token): + conn = connection() + cur = conn.cursor() + cur.execute("SELECT * FROM sessions WHERE token=%s", (token,)) + session = cur.fetchone() + conn.close() + return session \ No newline at end of file diff --git a/lapis/logger.py b/lapis/logger.py new file mode 100644 index 0000000..a5f4790 --- /dev/null +++ b/lapis/logger.py @@ -0,0 +1,52 @@ +# logging component for lapis +import logging +import os +from re import match +import sys +import lapis.config +import logging.handlers +# get log from stdout +log = logging.getLogger(__name__) + +#if argument --verbose is passed, print to stdout too + +if '--verbose' in sys.argv or '-V' in sys.argv: + log.setLevel(logging.DEBUG) + # in color + formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') + # in color + console = logging.StreamHandler() + console.setLevel(logging.DEBUG) + console.setFormatter(formatter) + log.addHandler(console) + + + + +# setup log file + +try: + log_file = os.path.join(os.path.dirname(sys.argv[0]), lapis.config.get('logfile')) + log_file_handler = logging.FileHandler(log_file) + log_file_handler.setLevel(lapis.config.get('logfile_level').upper()) + log_file_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) + log.addHandler(log_file_handler) +except Exception as e: + logging.error(e) + log_file_handler = None + +#logger functions +def debug(msg): + log.debug(msg) + +def info(msg): + log.info(msg) + +def error(msg): + log.error(msg) + +def critical(msg): + log.critical(msg) + +def warning(msg): + log.warning(msg) diff --git a/lapis/util.py b/lapis/util.py new file mode 100644 index 0000000..96c948c --- /dev/null +++ b/lapis/util.py @@ -0,0 +1,6 @@ +# miscallenous utility functions + +import datetime + +timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S").split('.')[0] +