lcmlog-server 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. #!/usr/bin/env python3
  2. DIR = "/var/local/log/lcmlog-data"
  3. FROM_DOMAIN = "lcm.mi.infn.it"
  4. TO_ADDRESS = "working@lcm.mi.infn.it"
  5. REPLY_TO = True # Does this make sense?
  6. import os
  7. import os.path
  8. #from os import stat
  9. #from pwd import getpwuid
  10. import sys
  11. import shlex
  12. import pwd
  13. import logging
  14. import logging.handlers
  15. import hashlib
  16. import contextlib
  17. import toml
  18. import subprocess
  19. # We log what happens every time someone connects
  20. # Preparing the logger
  21. logger = logging.getLogger(__name__)
  22. file_formatter = logging.Formatter("%(asctime)s | %(levelname)8s | %(message)s")
  23. logger.setLevel(logging.INFO)
  24. # Logger handle to log all info
  25. # We use a TimedRotatingFileHandler to rotate logs once a week
  26. file_handler = logging.handlers.TimedRotatingFileHandler(filename = DIR + "/logs/logfile", when = "W6", backupCount = 10)
  27. file_handler.setFormatter(file_formatter)
  28. logger.addHandler(file_handler)
  29. # Update logfile acl
  30. #if pwd.getpwuid(os.stat(DIR + "/logs/logfile").st_uid).pw_name == pwd.getpwuid(os.geteuid()).pw_name:
  31. # subprocess.call(["touch", DIR + "/logs/logfile"])
  32. # #subprocess.call(["chmod", "444", DIR + "/logs/*"])
  33. # subprocess.call(["chmod", "666", DIR + "/logs/logfile"])
  34. #------------------------------------------------------------------------------
  35. def main():
  36. # The user is going to call us through ssh, so to know who he is we can simply get his effective uid
  37. user_id = os.geteuid()
  38. user_name = pwd.getpwuid(user_id).pw_name
  39. logger.info("Started by user " + user_name + " (id " + str(user_id) + ")")
  40. try:
  41. method = input() # Can be GET, POST or UPDATE
  42. logger.info("Method: " + method)
  43. if method == "UPDATE": # We don't need more input lines for the UPDATE method
  44. auth(user_id, "UPDATE", "") # Check if the user can update the database
  45. method_update()
  46. else:
  47. kind = input() # This is the kind of the log, and it can be 150 or Admin
  48. logger.info("Kind: " + kind)
  49. # Now date check is implemented in the client side script
  50. # Maybe, for the future, date control can be implemented aslo here
  51. date = input() # The date of the log
  52. logger.info("Date: " + date)
  53. tags = input() # Tags are comma separated
  54. logger.info("Tags: " + tags)
  55. if kind != "150" and kind != "Admin": # We only have this two log types
  56. raise KindError
  57. if method == "POST":
  58. auth(user_id, "POST", kind) # Check if the user can post for the requested kind
  59. log = sys.stdin.read() # Read the log content
  60. method_post(kind, user_name, date, tags, log)
  61. elif method == "GET":
  62. auth(user_id, "GET", kind) # Check if the user can get logs for the requested kind
  63. user_to_find = input() # Read the user name of the log writer to search
  64. logger.info("User to find: " + user_to_find)
  65. method_get(kind, user_to_find, date, tags)
  66. else:
  67. raise MethodError
  68. except EOFError as error:
  69. logger.critical("1 Not enough input lines")
  70. sys.exit(1)
  71. except FileNotFoundError as error:
  72. logger.critical("2 File not found")
  73. sys.exit(2)
  74. except FileExistsError as error:
  75. logger.critical("3 File already exists")
  76. sys.exit(3)
  77. except OSError as error:
  78. logger.critical("4 File error")
  79. sys.exit(4)
  80. except MethodError as error:
  81. logger.critical("5 Undefined method")
  82. sys.exit(5)
  83. except KindError as error:
  84. logger.critical("6 Undefined log kind")
  85. sys.exit(6)
  86. except AuthError as error:
  87. logger.critical("7 Authentication error")
  88. sys.exit(7)
  89. except Exception as error:
  90. logger.critical("8 Generic error: " + str(error))
  91. sys.exit(8)
  92. finally:
  93. logger.info("End\n")
  94. #------------------------------------------------------------------------------
  95. # Create new log file and adds it to the database
  96. # kind, user_name, date and tags is the log metadata
  97. # log is the log content
  98. # The log metadata and content is hashed, and the hash is saved in the database and used as the filename for the log
  99. # The return value of the function is the hash
  100. def log_create(kind, user_name, date, tags, log):
  101. name = hashlib.sha512((kind + user_name + date + tags + log).encode("utf-8")).hexdigest()
  102. with open(DIR + "/data/" + name, "x") as f:
  103. f.write(name + "\n" + kind + "\n" + user_name + "\n" + date + "\n" + tags + "\n" + log) # Write the file
  104. with open(DIR + "/data/.data", "a") as f:
  105. f.write(name + ":" + kind + ":" + user_name + ":" + date + ":" + tags + "\n") # And add the entry to the .data file
  106. return name
  107. # Search for the requested entry
  108. # The functions returns a list containing the hash (saved in the database) of all the files that meet the specified criteria
  109. # The kind parameter is mandatory (because different users have different privileges based on it).
  110. # All the other arguments can be empty. Only the arguments that are not empty are taken into consideration for the search
  111. def log_find(kind, user_name, date, tags):
  112. file_list = list()
  113. with open(DIR + "/data/.data", "r") as f:
  114. for line in f:
  115. found = True
  116. l = line.split(":")
  117. # The kind is different
  118. if l[1].find(kind) == -1:
  119. continue
  120. # The username is different, or we aren't searching by username
  121. if user_name and l[2].find(user_name) == -1:
  122. continue
  123. # The date is different, or we aren't searching by date
  124. if date and l[3].find(date) == -1:
  125. continue
  126. # Searh tags
  127. for t in tags.split(","):
  128. if t and l[4].find(t) == -1:
  129. found = False
  130. break
  131. # Save
  132. if found:
  133. file_list.append(l[0])
  134. return file_list
  135. # TODO: the following functions work with the hash of the log files. The problem is that there are three different places where the hash is: the first line of the file,
  136. # the database entry for the log and the filename of the log. I have to decide which function operates on which hash, because for example if the hash is changed in the file,
  137. # it needs to be changed also in the other two locations.
  138. # Add log file to .data
  139. # This function reads an existing log file and adds it to the database
  140. # Tha hash that is saved in the database is not calculated: the first line in the file is considered to be the hash. Use log_check to check if they are the same
  141. def log_add(name):
  142. with open(DIR + "/data/" + name, "r") as f, open(DIR + "/data/.data", "a") as data:
  143. data.write(f.readline().rstrip("\n") + ":" + # Hash
  144. f.readline().rstrip("\n") + ":" + # Kind
  145. f.readline().rstrip("\n") + ":" + # User name
  146. f.readline().rstrip("\n") + ":" + # Date
  147. f.readline().rstrip("\n") + "\n") # Tags
  148. # Check if the saved hash is correct, and if it is not, ask the user what to do
  149. # This function calculates the hash of the file with filename name, and returns True if it is the same as the first line of the file, False otherwise
  150. # If it doesn't correspond, it asks the user if he wants to keep it like it is or change it. Currently, it is pretty messed up: only the hash saved in the file is changed,
  151. # not the one saved in the database or the file name. Also, the dialog to ask if the hash is to be changed probably should not be in this function.
  152. def log_check(name):
  153. with open(DIR + "/data/" + name, "r") as f:
  154. saved_hash = f.readline().rstrip("\n")
  155. kind = f.readline().rstrip("\n")
  156. user = f.readline().rstrip("\n")
  157. date = f.readline().rstrip("\n")
  158. tags = f.readline().rstrip("\n")
  159. log = f.read()
  160. calc_hash = log_hash(name)
  161. result = saved_hash == calc_hash
  162. if not result:
  163. logger.warning(calc_hash + " hash does not correspond to saved one")
  164. while True:
  165. print("Warning: " + calc_hash + " sh does not correspond to saved one.\n" +
  166. "Do you want to: print the log (p), change the saved hash (c), or leave it as it is (l)?")
  167. c = input()
  168. if c == "p":
  169. sys.stdout.write("Hash: " + saved_hash + "\n")
  170. sys.stdout.write("Kind: " + kind + "\n")
  171. sys.stdout.write("User: " + user + "\n")
  172. sys.stdout.write("Date: " + date + "\n")
  173. sys.stdout.write("Tags: " + tags + "\n")
  174. sys.stdout.write("\n" + log + "\n")
  175. elif c == "l":
  176. logger.info("Hash unchanged")
  177. break
  178. elif c == "c":
  179. with open(DIR + "/data/" + name, "w") as f:
  180. f.write(calc_hash + "\n" + kind + "\n" + user + "\n" + date + "\n" + tags + "\n" + log)
  181. logger.info("Hash changed")
  182. break
  183. return result
  184. # Calculates hash of file
  185. # The first line of the file is the saved hash, therefore it is not considered in the calculation
  186. def log_hash(name):
  187. with open(DIR + "/data/" + name, "r") as f:
  188. f.readline() # The saved hash doesn't enter in the calculation
  189. kind = f.readline().rstrip("\n")
  190. user_name = f.readline().rstrip("\n")
  191. date = f.readline().rstrip("\n")
  192. tags = f.readline().rstrip("\n")
  193. log = f.read()
  194. return hashlib.sha512((kind + user_name + date + tags + log).encode("utf-8")).hexdigest()
  195. # Email the log contents from USER_NAME@FROM_DOMAIN to TO_ADDRESS
  196. # Emails the log contents using the 'mail(1)' program. If REPLY_TO is
  197. # True add the 'Reply-to: ' header to the email
  198. def log_mail(kind, user_name, date, tags, log):
  199. if REPLY_TO:
  200. subprocess.run("printf " + shlex.quote(log) + " | mail -s \"Log" + kind + " " + date + " " + tags +
  201. "\" -r " + user + FROM_DOMAIN + " -a \"Reply-to:" TO_ADDRESS + " \" -- "
  202. + TO_ADDRESS, shell=True)
  203. else:
  204. subprocess.run("printf \"%s\n\" " + shlex.quote(log) + " | mail -s \"Log" | kind + " " + date + " " + tags +
  205. "\" -r " + user + FROM_DOMAIN + " -- " + TO_ADDRESS, shell=True)
  206. return
  207. #------------------------------------------------------------------------------
  208. # Print specified log on stdout
  209. def method_get(kind, user_to_find, date, tags):
  210. file_list = log_find(kind, user_to_find, date, tags)
  211. for name in file_list:
  212. with open(DIR + "/data/" + name, "r") as f:
  213. sys.stdout.write("Hash: " + f.readline())
  214. sys.stdout.write("Kind: " + f.readline())
  215. sys.stdout.write("User: " + f.readline())
  216. sys.stdout.write("Date: " + f.readline())
  217. sys.stdout.write("Tags: " + f.readline())
  218. sys.stdout.write("\n" + f.read() + "------------------\n")
  219. logger.info("GET successful: got " + str(len(file_list)) + " files")
  220. # Write log
  221. def method_post(kind, user_name, date, tags, log):
  222. name = log_create(kind, user_name, date, tags, log)
  223. logger.info("POST successful: hash " + name)
  224. # Generate .data file
  225. def method_update():
  226. with contextlib.suppress(FileNotFoundError):
  227. os.remove(DIR + "/data/.data")
  228. file_list = os.listdir(DIR + "/data/")
  229. open(DIR + "/data/.data", "x").close()
  230. for name in file_list:
  231. newname = log_hash(name)
  232. if not log_check(name):
  233. os.rename(DIR + "/data/" + name, DIR + "/data/" + newname)
  234. log_add(newname)
  235. logger.info("UPDATE successful: added " + str(len(file_list)) + " files")
  236. #------------------------------------------------------------------------------
  237. # Checks if the user has the permissions to use the requested method
  238. def auth(user_id, method, kind):
  239. # Check if user id is in auth files
  240. # We suppose that every authorized user is ONLY IN A FILE!
  241. user_type = ""
  242. for file_name in ["150","Admin","Valhalla","Nirvana"]:
  243. with open(DIR + "/auth/" + file_name) as f:
  244. for line in f:
  245. line = line.split()[0]
  246. if int(line) == user_id:
  247. # If present, we consider only the user type
  248. user_type = file_name
  249. break
  250. if not user_type == "":
  251. break
  252. else:
  253. # If not in auth files, the user cannot do anything
  254. raise AuthError()
  255. # Now we check the user type permissions
  256. auth_list = toml.load(DIR + "/auth/auth.toml")[user_type]["auth"]
  257. if not method + " " + kind in auth_list:
  258. raise AuthError()
  259. return
  260. #------------------------------------------------------------------------------
  261. # Error definitions
  262. class AuthError(Exception):
  263. pass
  264. class KindError(Exception):
  265. pass
  266. class MethodError(Exception):
  267. pass
  268. #------------------------------------------------------------------------------
  269. # Starting point
  270. if __name__ == "__main__":
  271. main()
  272. # Change permissions to logfile just before leaving. Dirty fix to a not well understood problem
  273. subprocess.call(["chmod", "666", DIR + "/logs/logfile"])