1# Copyright 2024 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14"""snapshot handler""" 15 16import dataclasses 17import functools 18import logging 19from io import StringIO 20 21from pw_log import log_decoder 22from pw_snapshot import processor 23from pw_snapshot_protos import snapshot_pb2 24from pw_symbolizer import Symbolizer, LlvmSymbolizer 25from pw_tokenizer import detokenize, elf_reader 26 27_LOG = logging.getLogger(__package__) 28 29 30def _parse_snapshot(serialized_snapshot: bytes) -> snapshot_pb2.Snapshot: 31 """Parse the serialized snapshot as a Snapshot proto.""" 32 return snapshot_pb2.Snapshot.FromString(serialized_snapshot) 33 34 35def _process_logs( 36 detokenizer: detokenize.Detokenizer | None, snapshot: snapshot_pb2.Snapshot 37) -> str: 38 """Returns the logs from the snapshot.""" 39 if not snapshot.logs: 40 return "No captured logs\n" 41 42 output: list[str] = [ 43 "Device Logs: ", 44 ] 45 46 decoded_log_stream = StringIO() 47 handler = logging.StreamHandler(stream=decoded_log_stream) 48 49 string_logger = logging.getLogger('crash_snapshot_logs') 50 string_logger.level = logging.DEBUG 51 # only send logs to o StringIO handler 52 string_logger.propagate = False 53 string_logger.addHandler(handler) 54 55 # LogStreamDecoder requires a decoded_log_handler, but it never actually 56 # is invoked here, as we manually iterate the logs and call 57 # parse_log_entry_proto directly. 58 # pylint: disable=unused-argument 59 def nop_log_handler(log: log_decoder.Log) -> None: 60 return 61 62 # pylint: enable=unused-argument 63 64 decoder = log_decoder.LogStreamDecoder( 65 decoded_log_handler=nop_log_handler, 66 detokenizer=detokenizer, 67 source_name='RpcDevice', 68 ) 69 for log_entry in snapshot.logs: 70 parsed_log = decoder.parse_log_entry_proto(log_entry) 71 log_decoder.log_decoded_log(parsed_log, string_logger) 72 73 output.append(decoded_log_stream.getvalue()) 74 75 return "\n".join(output) 76 77 78def _snapshot_symbolizer_matcher( 79 detokenizer: detokenize.Detokenizer, 80 # pylint: disable=unused-argument 81 snapshot: snapshot_pb2.Snapshot, 82) -> Symbolizer: 83 if isinstance(detokenizer, detokenize.AutoUpdatingDetokenizer): 84 if len(detokenizer.paths) > 1: 85 _LOG.info( 86 'More than one token database file. The first elf ' 87 'file in the list will be used for symbolization.' 88 ) 89 90 for database_path in detokenizer.paths: 91 path = database_path.path 92 if elf_reader.compatible_file(path): 93 _LOG.debug('Using %s for symbolization', path) 94 return LlvmSymbolizer(path) 95 96 _LOG.warning( 97 'No elf token database specified. Crash report will not ' 98 'have any symbols.' 99 ) 100 return LlvmSymbolizer() 101 102 103@dataclasses.dataclass 104class _CustomProcessor: 105 """Snapshot processor callback handler.""" 106 107 detokenizer: detokenize.Detokenizer | None 108 109 def __call__(self, serialized_snapshot: bytes) -> str: 110 snapshot = _parse_snapshot(serialized_snapshot) 111 112 output: list[str] = [] 113 output.append(_process_logs(self.detokenizer, snapshot)) 114 115 return "\n".join(output) 116 117 118def decode_snapshot( 119 detokenizer: detokenize.Detokenizer | None, 120 serialized_snapshot: bytes, 121) -> str: 122 """Decodes the serialized snapshot 123 124 Args: 125 detokenizer: String detokenizer. 126 serialized_snapshot: Contents of the snapshot dump file. 127 128 Returns: 129 The decoded snapshot as a string. 130 """ 131 custom_processor = _CustomProcessor( 132 detokenizer, 133 ) 134 135 return processor.process_snapshots( 136 serialized_snapshot=serialized_snapshot, 137 detokenizer=detokenizer, 138 symbolizer_matcher=functools.partial( 139 _snapshot_symbolizer_matcher, detokenizer 140 ), 141 user_processing_callback=custom_processor, 142 ) 143