|
| 1 | +############################################################################### |
| 2 | +# |
| 3 | +# MIT License |
| 4 | +# |
| 5 | +# Copyright (c) 2026 Advanced Micro Devices, Inc. |
| 6 | +# |
| 7 | +# Permission is hereby granted, free of charge, to any person obtaining a copy |
| 8 | +# of this software and associated documentation files (the "Software"), to deal |
| 9 | +# in the Software without restriction, including without limitation the rights |
| 10 | +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
| 11 | +# copies of the Software, and to permit persons to whom the Software is |
| 12 | +# furnished to do so, subject to the following conditions: |
| 13 | +# |
| 14 | +# The above copyright notice and this permission notice shall be included in all |
| 15 | +# copies or substantial portions of the Software. |
| 16 | +# |
| 17 | +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 18 | +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 19 | +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
| 20 | +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 21 | +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 22 | +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
| 23 | +# SOFTWARE. |
| 24 | +# |
| 25 | +############################################################################### |
| 26 | +from datetime import datetime |
| 27 | +from typing import Optional, TypedDict |
| 28 | + |
| 29 | +from nodescraper.enums import EventCategory, EventPriority, ExecutionStatus |
| 30 | +from nodescraper.interfaces import DataAnalyzer |
| 31 | +from nodescraper.models import TaskResult |
| 32 | + |
| 33 | +from .analyzer_args import JournalAnalyzerArgs |
| 34 | +from .journaldata import JournalData, JournalJsonEntry |
| 35 | + |
| 36 | + |
| 37 | +class JournalEvent(TypedDict): |
| 38 | + count: int |
| 39 | + first_occurrence: datetime |
| 40 | + last_occurrence: datetime |
| 41 | + |
| 42 | + |
| 43 | +class JournalPriority: |
| 44 | + EMERGENCY = 0 |
| 45 | + ALERT = 1 |
| 46 | + CRITICAL = 2 |
| 47 | + ERROR = 3 |
| 48 | + WARNING = 4 |
| 49 | + NOTICE = 5 |
| 50 | + INFO = 6 |
| 51 | + DEBUG = 7 |
| 52 | + |
| 53 | + |
| 54 | +class JournalAnalyzer(DataAnalyzer[JournalData, JournalAnalyzerArgs]): |
| 55 | + """Check journalctl for errors""" |
| 56 | + |
| 57 | + DATA_MODEL = JournalData |
| 58 | + |
| 59 | + @classmethod |
| 60 | + def filter_journal( |
| 61 | + cls, |
| 62 | + journal_content_json: list[JournalJsonEntry], |
| 63 | + analysis_range_start: Optional[datetime] = None, |
| 64 | + analysis_range_end: Optional[datetime] = None, |
| 65 | + ) -> list[JournalJsonEntry]: |
| 66 | + """Filter a journal log by date |
| 67 | +
|
| 68 | + Args: |
| 69 | + journal_content_json (list[JournalJsonEntry]): unfiltered journal log |
| 70 | + analysis_range_start (Optional[datetime], optional): start of analysis range. Defaults to None. |
| 71 | + analysis_range_end (Optional[datetime], optional): end of analysis range. Defaults to None. |
| 72 | +
|
| 73 | + Returns: |
| 74 | + list[JournalJsonEntry]: filtered journal log |
| 75 | + """ |
| 76 | + |
| 77 | + filtered_journal = [] |
| 78 | + |
| 79 | + found_start = False if analysis_range_start else True |
| 80 | + |
| 81 | + # Parse through the journal log and filter by date |
| 82 | + for entry in journal_content_json: |
| 83 | + date = entry.REALTIME_TIMESTAMP |
| 84 | + |
| 85 | + # Skip entries without valid timestamp |
| 86 | + if date is None: |
| 87 | + continue |
| 88 | + |
| 89 | + if analysis_range_start and not found_start and date >= analysis_range_start: |
| 90 | + found_start = True |
| 91 | + elif analysis_range_end and date >= analysis_range_end: |
| 92 | + break |
| 93 | + |
| 94 | + # only read entries after starting timestamp is found, ignore entries that do not have valid date |
| 95 | + if found_start: |
| 96 | + filtered_journal.append(entry) |
| 97 | + |
| 98 | + return filtered_journal |
| 99 | + |
| 100 | + def _priority_to_entry_priority(self, priority: int) -> EventPriority: |
| 101 | + if priority <= JournalPriority.ERROR: |
| 102 | + entry_priority = EventPriority.ERROR |
| 103 | + elif priority == JournalPriority.WARNING: |
| 104 | + entry_priority = EventPriority.WARNING |
| 105 | + elif priority >= JournalPriority.NOTICE: |
| 106 | + entry_priority = EventPriority.INFO |
| 107 | + else: |
| 108 | + # Unknown? |
| 109 | + entry_priority = EventPriority.ERROR |
| 110 | + return entry_priority |
| 111 | + |
| 112 | + def _analyze_journal_entries_by_priority( |
| 113 | + self, journal_content_json: list[JournalJsonEntry], priority: int, group: bool |
| 114 | + ) -> None: |
| 115 | + """Analyze a list of Journal Entries for a priority. |
| 116 | + if WARNING, CRITICAL or it is unknown then log an error/warning Journal Entry. |
| 117 | + Parameters |
| 118 | + ---------- |
| 119 | + journal_content_json : list[JournalJsonEntry] |
| 120 | + List of JournalJsonEntry to analyze |
| 121 | + priority : int |
| 122 | + Priority threshold to check against |
| 123 | + group : bool |
| 124 | + Whether to group similar entries |
| 125 | + """ |
| 126 | + # Use a tuple of (message, priority) as the key instead of the JournalJsonEntry object |
| 127 | + journal_event_map: dict[tuple[str, int], JournalEvent] = {} |
| 128 | + |
| 129 | + # Check against journal log priority levels. emergency(0), alert(1), critical(2), error(3), warning(4), notice(5), info(6), debug(7) |
| 130 | + for entry in journal_content_json: |
| 131 | + if entry.PRIORITY <= priority: |
| 132 | + self.result.status = ExecutionStatus.ERROR |
| 133 | + if not group: |
| 134 | + entry_dict = entry.model_dump() # Convert JournalJsonEntry to dictionary |
| 135 | + entry_dict["task_name"] = self.__class__.__name__ |
| 136 | + self._log_event( |
| 137 | + category=EventCategory.OS, |
| 138 | + description="Journal log entry with priority level %s" % entry.PRIORITY, |
| 139 | + data=entry_dict, |
| 140 | + priority=self._priority_to_entry_priority(entry.PRIORITY), |
| 141 | + console_log=False, |
| 142 | + ) |
| 143 | + else: |
| 144 | + # Handle MESSAGE as either string or list |
| 145 | + message = entry.MESSAGE |
| 146 | + if isinstance(message, list): |
| 147 | + message = " ".join(message) |
| 148 | + |
| 149 | + # Create a tuple key from message and priority |
| 150 | + entry_key = (message, entry.PRIORITY) |
| 151 | + if journal_event_map.get(entry_key) is None: |
| 152 | + journal_event_map[entry_key] = { |
| 153 | + "count": 1, |
| 154 | + "first_occurrence": ( |
| 155 | + entry.REALTIME_TIMESTAMP |
| 156 | + if entry.REALTIME_TIMESTAMP |
| 157 | + else datetime.fromtimestamp(0) |
| 158 | + ), |
| 159 | + "last_occurrence": ( |
| 160 | + entry.REALTIME_TIMESTAMP |
| 161 | + if entry.REALTIME_TIMESTAMP |
| 162 | + else datetime.fromtimestamp(0) |
| 163 | + ), |
| 164 | + } |
| 165 | + else: |
| 166 | + journal_event_map[entry_key]["count"] += 1 |
| 167 | + if entry.REALTIME_TIMESTAMP: |
| 168 | + journal_event_map[entry_key][ |
| 169 | + "last_occurrence" |
| 170 | + ] = entry.REALTIME_TIMESTAMP |
| 171 | + |
| 172 | + # log all events that were grouped |
| 173 | + if group: |
| 174 | + for (message, entry_priority), event_data in journal_event_map.items(): |
| 175 | + self._log_event( |
| 176 | + category=EventCategory.OS, |
| 177 | + description="Journal entries found in OS journal log", |
| 178 | + priority=self._priority_to_entry_priority(entry_priority), |
| 179 | + data={ |
| 180 | + "message": message, |
| 181 | + "priority": entry_priority, |
| 182 | + "count": event_data["count"], |
| 183 | + "first_occurrence": event_data["first_occurrence"], |
| 184 | + "last_occurrence": event_data["last_occurrence"], |
| 185 | + }, |
| 186 | + console_log=False, |
| 187 | + ) |
| 188 | + |
| 189 | + def analyze_data( |
| 190 | + self, data: JournalData, args: Optional[JournalAnalyzerArgs] = None |
| 191 | + ) -> TaskResult: |
| 192 | + """Analyze the OS journal log for errors |
| 193 | +
|
| 194 | + Parameters |
| 195 | + ---------- |
| 196 | + data : JournalData |
| 197 | + Journal data to analyze |
| 198 | + args : Optional[JournalAnalyzerArgs], optional |
| 199 | + Analysis arguments, by default None |
| 200 | +
|
| 201 | + Returns |
| 202 | + ------- |
| 203 | + TaskResult |
| 204 | + A TaskResult object containing the result of the analysis |
| 205 | + If journal log entries are found ExecutionStatus.OK |
| 206 | + If journal log entries are found with priority level less than or equal to check_priority ExecutionStatus.ERROR |
| 207 | + """ |
| 208 | + if args is None: |
| 209 | + args = JournalAnalyzerArgs() |
| 210 | + |
| 211 | + journal_content_json = data.journal_content_json |
| 212 | + |
| 213 | + # Filter by time range if specified |
| 214 | + if args.analysis_range_start or args.analysis_range_end: |
| 215 | + self.logger.info( |
| 216 | + "Filtering journal log using range %s - %s", |
| 217 | + args.analysis_range_start, |
| 218 | + args.analysis_range_end, |
| 219 | + ) |
| 220 | + journal_content_json = self.filter_journal( |
| 221 | + journal_content_json=journal_content_json, |
| 222 | + analysis_range_start=args.analysis_range_start, |
| 223 | + analysis_range_end=args.analysis_range_end, |
| 224 | + ) |
| 225 | + |
| 226 | + self.result.status = ExecutionStatus.OK |
| 227 | + |
| 228 | + if args.check_priority is not None: |
| 229 | + self._analyze_journal_entries_by_priority( |
| 230 | + journal_content_json, args.check_priority, args.group |
| 231 | + ) |
| 232 | + |
| 233 | + if self.result.status == ExecutionStatus.OK: |
| 234 | + self.result.message = "No journal errors found" |
| 235 | + else: |
| 236 | + self.result.message = f"Found journal entries with priority <= {args.check_priority}" |
| 237 | + |
| 238 | + return self.result |
0 commit comments