123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305 |
- #!/usr/bin/env python3
- DIR = "/var/local/log/lcmlog-data"
- import os
- import os.path
- #from os import stat
- #from pwd import getpwuid
- import sys
- import pwd
- import logging
- import logging.handlers
- import hashlib
- import contextlib
- import toml
- import subprocess
- # We log what happens every time someone connects
- # Preparing the logger
- logger = logging.getLogger(__name__)
- file_formatter = logging.Formatter("%(asctime)s | %(levelname)8s | %(message)s")
- logger.setLevel(logging.INFO)
- # Logger handle to log all info
- # We use a TimedRotatingFileHandler to rotate logs once a week
- file_handler = logging.handlers.TimedRotatingFileHandler(filename = DIR + "/logs/logfile", when = "W6", backupCount = 10)
- file_handler.setFormatter(file_formatter)
- logger.addHandler(file_handler)
- # Update logfile acl
- #if pwd.getpwuid(os.stat(DIR + "/logs/logfile").st_uid).pw_name == pwd.getpwuid(os.geteuid()).pw_name:
- # subprocess.call(["touch", DIR + "/logs/logfile"])
- # #subprocess.call(["chmod", "444", DIR + "/logs/*"])
- # subprocess.call(["chmod", "666", DIR + "/logs/logfile"])
- #------------------------------------------------------------------------------
- def main():
- # The user is going to call us through ssh, so to know who he is we can simply get his effective uid
- user_id = os.geteuid()
- user_name = pwd.getpwuid(user_id).pw_name
- logger.info("Started by user " + user_name + " (id " + str(user_id) + ")")
- try:
- method = input() # Can be GET, POST or UPDATE
- logger.info("Method: " + method)
- if method == "UPDATE": # We don't need more input lines for the UPDATE method
- auth(user_id, "UPDATE", "") # Check if the user can update the database
- method_update()
- else:
- kind = input() # This is the kind of the log, and it can be 150 or Admin
- logger.info("Kind: " + kind)
- # Now date check is implemented in the client side script
- # Maybe, for the future, date control can be implemented aslo here
- date = input() # The date of the log
- logger.info("Date: " + date)
- tags = input() # Tags are comma separated
- logger.info("Tags: " + tags)
- if kind != "150" and kind != "Admin": # We only have this two log types
- raise KindError
- if method == "POST":
- auth(user_id, "POST", kind) # Check if the user can post for the requested kind
- log = sys.stdin.read() # Read the log content
- method_post(kind, user_name, date, tags, log)
- elif method == "GET":
- auth(user_id, "GET", kind) # Check if the user can get logs for the requested kind
- user_to_find = input() # Read the user name of the log writer to search
- logger.info("User to find: " + user_to_find)
- method_get(kind, user_to_find, date, tags)
- else:
- raise MethodError
- except EOFError as error:
- logger.critical("1 Not enough input lines")
- sys.exit(1)
- except FileNotFoundError as error:
- logger.critical("2 File not found")
- sys.exit(2)
- except FileExistsError as error:
- logger.critical("3 File already exists")
- sys.exit(3)
- except OSError as error:
- logger.critical("4 File error")
- sys.exit(4)
- except MethodError as error:
- logger.critical("5 Undefined method")
- sys.exit(5)
- except KindError as error:
- logger.critical("6 Undefined log kind")
- sys.exit(6)
- except AuthError as error:
- logger.critical("7 Authentication error")
- sys.exit(7)
- except Exception as error:
- logger.critical("8 Generic error: " + str(error))
- sys.exit(8)
- finally:
- logger.info("End\n")
- #------------------------------------------------------------------------------
- # Create new log file and adds it to the database
- # kind, user_name, date and tags is the log metadata
- # log is the log content
- # The log metadata and content is hashed, and the hash is saved in the database and used as the filename for the log
- # The return value of the function is the hash
- def log_create(kind, user_name, date, tags, log):
- name = hashlib.sha512((kind + user_name + date + tags + log).encode("utf-8")).hexdigest()
- with open(DIR + "/data/" + name, "x") as f:
- f.write(name + "\n" + kind + "\n" + user_name + "\n" + date + "\n" + tags + "\n" + log) # Write the file
- with open(DIR + "/data/.data", "a") as f:
- f.write(name + ":" + kind + ":" + user_name + ":" + date + ":" + tags + "\n") # And add the entry to the .data file
- return name
- # Search for the requested entry
- # The functions returns a list containing the hash (saved in the database) of all the files that meet the specified criteria
- # The kind parameter is mandatory (because different users have different privileges based on it).
- # All the other arguments can be empty. Only the arguments that are not empty are taken into consideration for the search
- def log_find(kind, user_name, date, tags):
- file_list = list()
- with open(DIR + "/data/.data", "r") as f:
- for line in f:
- found = True
- l = line.split(":")
- # The kind is different
- if l[1].find(kind) == -1:
- continue
- # The username is different, or we aren't searching by username
- if user_name and l[2].find(user_name) == -1:
- continue
- # The date is different, or we aren't searching by date
- if date and l[3].find(date) == -1:
- continue
- # Searh tags
- for t in tags.split(","):
- if t and l[4].find(t) == -1:
- found = False
- break
- # Save
- if found:
- file_list.append(l[0])
- return file_list
- # TODO: the following functions work with the hash of the log files. The problem is that there are three different places where the hash is: the first line of the file,
- # the database entry for the log and the filename of the log. I have to decide which function operates on which hash, because for example if the hash is changed in the file,
- # it needs to be changed also in the other two locations.
- # Add log file to .data
- # This function reads an existing log file and adds it to the database
- # Tha hash that is saved in the database is not calculated: the first line in the file is considered to be the hash. Use log_check to check if they are the same
- def log_add(name):
- with open(DIR + "/data/" + name, "r") as f, open(DIR + "/data/.data", "a") as data:
- data.write(f.readline().rstrip("\n") + ":" + # Hash
- f.readline().rstrip("\n") + ":" + # Kind
- f.readline().rstrip("\n") + ":" + # User name
- f.readline().rstrip("\n") + ":" + # Date
- f.readline().rstrip("\n") + "\n") # Tags
- # Check if the saved hash is correct, and if it is not, ask the user what to do
- # This function calculates the hash of the file with filename name, and returns True if it is the same as the first line of the file, False otherwise
- # If it doesn't correspond, it asks the user if he wants to keep it like it is or change it. Currently, it is pretty messed up: only the hash saved in the file is changed,
- # not the one saved in the database or the file name. Also, the dialog to ask if the hash is to be changed probably should not be in this function.
- def log_check(name):
- with open(DIR + "/data/" + name, "r") as f:
- saved_hash = f.readline().rstrip("\n")
- kind = f.readline().rstrip("\n")
- user = f.readline().rstrip("\n")
- date = f.readline().rstrip("\n")
- tags = f.readline().rstrip("\n")
- log = f.read()
- calc_hash = log_hash(name)
- result = saved_hash == calc_hash
- if not result:
- logger.warning(calc_hash + " hash does not correspond to saved one")
- while True:
- print("Warning: " + calc_hash + " sh does not correspond to saved one.\n" +
- "Do you want to: print the log (p), change the saved hash (c), or leave it as it is (l)?")
- c = input()
- if c == "p":
- sys.stdout.write("Hash: " + saved_hash + "\n")
- sys.stdout.write("Kind: " + kind + "\n")
- sys.stdout.write("User: " + user + "\n")
- sys.stdout.write("Date: " + date + "\n")
- sys.stdout.write("Tags: " + tags + "\n")
- sys.stdout.write("\n" + log + "\n")
- elif c == "l":
- logger.info("Hash unchanged")
- break
- elif c == "c":
- with open(DIR + "/data/" + name, "w") as f:
- f.write(calc_hash + "\n" + kind + "\n" + user + "\n" + date + "\n" + tags + "\n" + log)
- logger.info("Hash changed")
- break
- return result
- # Calculates hash of file
- # The first line of the file is the saved hash, therefore it is not considered in the calculation
- def log_hash(name):
- with open(DIR + "/data/" + name, "r") as f:
- f.readline() # The saved hash doesn't enter in the calculation
- kind = f.readline().rstrip("\n")
- user_name = f.readline().rstrip("\n")
- date = f.readline().rstrip("\n")
- tags = f.readline().rstrip("\n")
- log = f.read()
- return hashlib.sha512((kind + user_name + date + tags + log).encode("utf-8")).hexdigest()
- #------------------------------------------------------------------------------
- # Print specified log on stdout
- def method_get(kind, user_to_find, date, tags):
- file_list = log_find(kind, user_to_find, date, tags)
- for name in file_list:
- with open(DIR + "/data/" + name, "r") as f:
- sys.stdout.write("Hash: " + f.readline())
- sys.stdout.write("Kind: " + f.readline())
- sys.stdout.write("User: " + f.readline())
- sys.stdout.write("Date: " + f.readline())
- sys.stdout.write("Tags: " + f.readline())
- sys.stdout.write("\n" + f.read() + "------------------\n")
- logger.info("GET successful: got " + str(len(file_list)) + " files")
- # Write log
- def method_post(kind, user_name, date, tags, log):
- name = log_create(kind, user_name, date, tags, log)
- logger.info("POST successful: hash " + name)
- # Generate .data file
- def method_update():
- with contextlib.suppress(FileNotFoundError):
- os.remove(DIR + "/data/.data")
- file_list = os.listdir(DIR + "/data/")
- open(DIR + "/data/.data", "x").close()
- for name in file_list:
- newname = log_hash(name)
- if not log_check(name):
- os.rename(DIR + "/data/" + name, DIR + "/data/" + newname)
- log_add(newname)
- logger.info("UPDATE successful: added " + str(len(file_list)) + " files")
- #------------------------------------------------------------------------------
- # Checks if the user has the permissions to use the requested method
- def auth(user_id, method, kind):
- # Check if user id is in auth files
- # We suppose that every authorized user is ONLY IN A FILE!
- user_type = ""
- for file_name in ["150","Admin","Valhalla","Nirvana"]:
- with open(DIR + "/auth/" + file_name) as f:
- for line in f:
- line = line.split()[0]
- if int(line) == user_id:
- # If present, we consider only the user type
- user_type = file_name
- break
- if not user_type == "":
- break
- else:
- # If not in auth files, the user cannot do anything
- raise AuthError()
- # Now we check the user type permissions
- auth_list = toml.load(DIR + "/auth/auth.toml")[user_type]["auth"]
- if not method + " " + kind in auth_list:
- raise AuthError()
- return
- #------------------------------------------------------------------------------
- # Error definitions
- class AuthError(Exception):
- pass
- class KindError(Exception):
- pass
- class MethodError(Exception):
- pass
- #------------------------------------------------------------------------------
- # Starting point
- if __name__ == "__main__":
- main()
- # Change permissions to logfile just before leaving. Dirty fix to a not well understood problem
- subprocess.call(["chmod", "666", DIR + "/logs/logfile"])
|