Coverage for backend/app/utils.py: 69%

75 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2025-09-22 15:38 +0000

1"""Module containing utility functions.""" 

2 

3import logging 

4from datetime import datetime 

5from logging.handlers import RotatingFileHandler 

6from pathlib import Path 

7from typing import Optional 

8 

9import bcrypt 

10 

11 

12def hash_password(password: str) -> str: 

13 """Hash a password for storing. 

14 :param password: password to hash 

15 :return: hashed password""" 

16 

17 return bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") 

18 

19 

20def verify_password(password: str, hashed: str) -> bool: 

21 """Verify a stored password against one provided by the user. 

22 :param password: raw password to check 

23 :param hashed: hashed password from the database 

24 :return: boolean indicating whether the passwords matched""" 

25 

26 return bcrypt.checkpw(password.encode("utf-8"), hashed.encode("utf-8")) 

27 

28 

29class AppLogger: 

30 """Centralised logging utility""" 

31 

32 _loggers = {} # Cache for created loggers 

33 

34 @classmethod 

35 def get_logger( 

36 cls, 

37 name: str, 

38 log_dir: str = "logs", 

39 log_file: Optional[str] = None, 

40 level: int = logging.INFO, 

41 max_file_size: int = 10 * 1024 * 1024, # 10MB 

42 backup_count: int = 5, 

43 console_output: bool = True, 

44 ) -> logging.Logger: 

45 """Get or create a logger with the specified configuration 

46 :param name: Logger name (usually module name) 

47 :param log_dir: Directory for log files 

48 :param log_file: Specific log file name (defaults to {name}.log) 

49 :param level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) 

50 :param max_file_size: Maximum size of log file before rotation 

51 :param backup_count: Number of backup files to keep 

52 :param console_output: Whether to output logs to console 

53 :return: Configured logger instance""" 

54 

55 # Return cached logger if it exists 

56 cache_key = f"{name}_{log_dir}_{log_file}" 

57 if cache_key in cls._loggers: 

58 return cls._loggers[cache_key] 

59 

60 # Create new logger 

61 logger = logging.getLogger(name) 

62 logger.setLevel(level) 

63 

64 # Prevent duplicate handlers if logger already exists 

65 if logger.handlers: 

66 cls._loggers[cache_key] = logger 

67 return logger 

68 

69 # Create log directory 

70 log_path = Path(log_dir) 

71 log_path.mkdir(exist_ok=True) 

72 

73 # Set log file name 

74 if not log_file: 

75 log_file = f"{name}.log" 

76 

77 full_log_path = log_path / log_file 

78 

79 # Create formatters 

80 detailed_formatter = logging.Formatter( 

81 "%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s", 

82 datefmt="%Y-%m-%d %H:%M:%S", 

83 ) 

84 

85 simple_formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S") 

86 

87 # File handler with rotation 

88 file_handler = RotatingFileHandler( 

89 full_log_path, maxBytes=max_file_size, backupCount=backup_count, encoding="utf-8" 

90 ) 

91 file_handler.setLevel(logging.DEBUG) 

92 file_handler.setFormatter(detailed_formatter) 

93 logger.addHandler(file_handler) 

94 

95 # Console handler 

96 if console_output: 

97 console_handler = logging.StreamHandler() 

98 console_handler.setLevel(level) 

99 console_handler.setFormatter(simple_formatter) 

100 logger.addHandler(console_handler) 

101 

102 # Cache the logger 

103 cls._loggers[cache_key] = logger 

104 

105 return logger 

106 

107 @classmethod 

108 def create_service_logger(cls, service_name: str, log_level: str = "INFO") -> logging.Logger: 

109 """Create a standardised logger for a service 

110 :param service_name: Name of the service (e.g., 'gmail_scraper', 'job_scraper') 

111 :param log_level: String representation of log level 

112 :return: Configured logger""" 

113 

114 level_map = { 

115 "DEBUG": logging.DEBUG, 

116 "INFO": logging.INFO, 

117 "WARNING": logging.WARNING, 

118 "ERROR": logging.ERROR, 

119 "CRITICAL": logging.CRITICAL, 

120 } 

121 

122 level = level_map.get(log_level.upper(), logging.INFO) 

123 

124 return cls.get_logger( 

125 name=service_name, 

126 log_dir="logs", 

127 log_file=f"{service_name}.log", 

128 level=level, 

129 max_file_size=10 * 1024 * 1024, # 10MB 

130 backup_count=5, 

131 console_output=True, 

132 ) 

133 

134 @classmethod 

135 def log_execution_time(cls, logger: logging.Logger, start_time: datetime, operation: str): 

136 """ 

137 Log execution time for an operation 

138 

139 :param logger: Logger instance 

140 :param start_time: Start time of the operation 

141 :param operation: Description of the operation 

142 """ 

143 end_time = datetime.now() 

144 duration = end_time - start_time 

145 logger.info(f"{operation} completed in {duration.total_seconds():.2f} seconds") 

146 

147 @classmethod 

148 def log_stats(cls, logger: logging.Logger, stats: dict, title: str = "Operation Statistics"): 

149 """ 

150 Log statistics in a formatted way 

151 

152 :param logger: Logger instance 

153 :param stats: Dictionary of statistics 

154 :param title: Title for the statistics block 

155 """ 

156 logger.info("=" * 50) 

157 logger.info(title) 

158 logger.info("=" * 50) 

159 

160 for key, value in stats.items(): 

161 if isinstance(value, list): 

162 logger.info(f"{key}: {len(value)} items") 

163 if value: # Log first few items if list is not empty 

164 sample = value[:3] 

165 logger.debug(f" Sample {key}: {sample}") 

166 else: 

167 logger.info(f"{key}: {value}") 

168 

169 logger.info("=" * 50) 

170 

171 

172def get_gmail_logger() -> logging.Logger: 

173 """Get logger for Gmail scraping service""" 

174 

175 return AppLogger.create_service_logger("gmail_scraper", "INFO") 

176 

177 

178def get_job_scraper_logger() -> logging.Logger: 

179 """Get logger for job scraping service""" 

180 

181 return AppLogger.create_service_logger("job_scraper", "INFO") 

182 

183 

184def get_scheduler_logger() -> logging.Logger: 

185 """Get logger for scheduler service""" 

186 

187 return AppLogger.create_service_logger("scheduler", "INFO") 

188 

189 

190def get_api_logger() -> logging.Logger: 

191 """Get logger for API operations""" 

192 

193 return AppLogger.create_service_logger("api", "INFO") 

194 

195 

196def get_database_logger() -> logging.Logger: 

197 """Get logger for database operations""" 

198 

199 return AppLogger.create_service_logger("database", "WARNING") 

200 

201 

202def get_auth_logger() -> logging.Logger: 

203 """Get logger for authentication operations""" 

204 

205 return AppLogger.create_service_logger("auth", "INFO")