2023-06-13 11:59:52 +08:00
|
|
|
|
############################################################################
|
|
|
|
|
|
# tools/gdb/utils.py
|
|
|
|
|
|
#
|
2024-11-20 14:47:04 +08:00
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
|
#
|
2023-06-13 11:59:52 +08:00
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
|
|
# this work for additional information regarding copyright ownership. The
|
|
|
|
|
|
# ASF licenses this file to you under the Apache License, Version 2.0 (the
|
|
|
|
|
|
# "License"); you may not use this file except in compliance with the
|
|
|
|
|
|
# License. You may obtain a copy of the License at
|
|
|
|
|
|
#
|
|
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
#
|
|
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
|
|
# under the License.
|
|
|
|
|
|
#
|
|
|
|
|
|
############################################################################
|
|
|
|
|
|
|
2024-07-18 20:53:17 +08:00
|
|
|
|
import re
|
2024-08-30 14:35:34 +08:00
|
|
|
|
import shlex
|
|
|
|
|
|
from typing import List, Tuple, Union
|
2024-11-19 09:45:04 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
import gdb
|
2024-08-16 20:36:39 +08:00
|
|
|
|
from macros import fetch_macro_info, try_expand
|
|
|
|
|
|
|
2024-08-08 11:04:16 +08:00
|
|
|
|
g_symbol_cache = {}
|
2024-08-14 11:49:49 +08:00
|
|
|
|
g_type_cache = {}
|
2024-08-16 20:36:39 +08:00
|
|
|
|
g_macro_ctx = None
|
2024-07-09 11:26:27 +08:00
|
|
|
|
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-30 14:35:34 +08:00
|
|
|
|
def backtrace(addresses: List[Union[gdb.Value, int]]) -> List[Tuple[int, str, str]]:
|
2024-07-30 19:13:51 +08:00
|
|
|
|
"""Convert addresses to backtrace"""
|
|
|
|
|
|
backtrace = []
|
|
|
|
|
|
|
|
|
|
|
|
for addr in addresses:
|
|
|
|
|
|
if not addr:
|
|
|
|
|
|
break
|
|
|
|
|
|
|
2024-08-30 14:35:34 +08:00
|
|
|
|
if type(addr) is int:
|
|
|
|
|
|
addr = gdb.Value(addr)
|
|
|
|
|
|
|
|
|
|
|
|
if addr.type.code is not gdb.TYPE_CODE_PTR:
|
|
|
|
|
|
addr = addr.cast(gdb.lookup_type("void").pointer())
|
|
|
|
|
|
|
2024-07-30 19:13:51 +08:00
|
|
|
|
func = addr.format_string(symbols=True, address=False)
|
|
|
|
|
|
sym = gdb.find_pc_line(int(addr))
|
|
|
|
|
|
source = str(sym.symtab) + ":" + str(sym.line)
|
|
|
|
|
|
backtrace.append((int(addr), func, source))
|
|
|
|
|
|
|
|
|
|
|
|
return backtrace
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-14 11:49:49 +08:00
|
|
|
|
def lookup_type(name, block=None) -> gdb.Type:
|
|
|
|
|
|
"""Return the type object of a type name"""
|
|
|
|
|
|
global g_type_cache
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 11:49:49 +08:00
|
|
|
|
key = (name, block)
|
|
|
|
|
|
if key not in g_type_cache:
|
|
|
|
|
|
try:
|
|
|
|
|
|
g_type_cache[key] = (
|
|
|
|
|
|
gdb.lookup_type(name, block=block) if block else gdb.lookup_type(name)
|
|
|
|
|
|
)
|
|
|
|
|
|
except gdb.error:
|
|
|
|
|
|
g_type_cache[key] = None
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 11:49:49 +08:00
|
|
|
|
return g_type_cache[key]
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
|
|
|
|
|
|
2024-08-14 11:49:49 +08:00
|
|
|
|
long_type = lookup_type("long")
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
# Common Helper Functions
|
|
|
|
|
|
|
|
|
|
|
|
|
2023-06-13 11:59:52 +08:00
|
|
|
|
def get_long_type():
|
|
|
|
|
|
"""Return the cached long type object"""
|
|
|
|
|
|
global long_type
|
2024-08-14 11:49:49 +08:00
|
|
|
|
return long_type
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def offset_of(typeobj, field):
|
|
|
|
|
|
"""Return the offset of a field in a structure"""
|
|
|
|
|
|
element = gdb.Value(0).cast(typeobj)
|
|
|
|
|
|
return int(str(element[field].address).split()[0], 16)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def container_of(ptr, typeobj, member):
|
|
|
|
|
|
"""Return pointer to containing data structure"""
|
|
|
|
|
|
return (ptr.cast(get_long_type()) - offset_of(typeobj, member)).cast(typeobj)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ContainerOf(gdb.Function):
|
|
|
|
|
|
"""Return pointer to containing data structure.
|
|
|
|
|
|
|
|
|
|
|
|
$container_of(PTR, "TYPE", "ELEMENT"): Given PTR, return a pointer to the
|
|
|
|
|
|
data structure of the type TYPE in which PTR is the address of ELEMENT.
|
|
|
|
|
|
Note that TYPE and ELEMENT have to be quoted as strings."""
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
super(ContainerOf, self).__init__("container_of")
|
|
|
|
|
|
|
|
|
|
|
|
def invoke(self, ptr, typename, elementname):
|
|
|
|
|
|
return container_of(
|
2024-11-19 09:45:04 +08:00
|
|
|
|
ptr, gdb.lookup_type(typename.string()).pointer(), elementname.string()
|
2023-06-13 11:59:52 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ContainerOf()
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-16 20:36:39 +08:00
|
|
|
|
class MacroCtx:
|
|
|
|
|
|
"""
|
|
|
|
|
|
This is a singleton class which only initializes once to
|
|
|
|
|
|
cache a context of macro definition which can be queried later
|
|
|
|
|
|
TODO: we only deal with single ELF at the moment for simplicity
|
|
|
|
|
|
If you load more object files while debugging, only the first one gets loaded
|
|
|
|
|
|
will be used to retrieve macro information
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
|
|
if not hasattr(cls, "instance"):
|
|
|
|
|
|
cls.instance = super(MacroCtx, cls).__new__(cls)
|
|
|
|
|
|
return cls.instance
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, filename):
|
|
|
|
|
|
self._macro_map = {}
|
|
|
|
|
|
self._file = filename
|
|
|
|
|
|
|
|
|
|
|
|
self._macro_map = fetch_macro_info(filename)
|
|
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
|
def macro_map(self):
|
|
|
|
|
|
return self._macro_map
|
|
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
|
def objfile(self):
|
|
|
|
|
|
return self._file
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
def gdb_eval_or_none(expresssion):
|
|
|
|
|
|
"""Evaluate an expression and return None if it fails"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
return gdb.parse_and_eval(expresssion)
|
|
|
|
|
|
except gdb.error:
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-08 11:04:16 +08:00
|
|
|
|
def get_symbol_value(name, locspec="nx_start", cacheable=True):
|
2024-07-09 11:26:27 +08:00
|
|
|
|
"""Return the value of a symbol value etc: Variable, Marco"""
|
2024-08-08 11:04:16 +08:00
|
|
|
|
global g_symbol_cache
|
|
|
|
|
|
|
|
|
|
|
|
# If there is a current stack frame, GDB uses the macros in scope at that frame’s source code line.
|
|
|
|
|
|
# Otherwise, GDB uses the macros in scope at the current listing location.
|
|
|
|
|
|
# Reference: https://sourceware.org/gdb/current/onlinedocs/gdb.html/Macros.html#Macros
|
2024-08-11 11:52:56 +08:00
|
|
|
|
try:
|
|
|
|
|
|
if not gdb.selected_frame():
|
|
|
|
|
|
gdb.execute(f"list {locspec}", to_string=True)
|
|
|
|
|
|
return gdb_eval_or_none(name)
|
|
|
|
|
|
except gdb.error:
|
|
|
|
|
|
pass
|
2024-08-08 11:04:16 +08:00
|
|
|
|
|
|
|
|
|
|
# Try current frame
|
|
|
|
|
|
value = gdb_eval_or_none(name)
|
|
|
|
|
|
if value:
|
|
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
# Check if the symbol is already cached
|
|
|
|
|
|
if cacheable and (name, locspec) in g_symbol_cache:
|
|
|
|
|
|
return g_symbol_cache[(name, locspec)]
|
|
|
|
|
|
|
|
|
|
|
|
# There's current frame and no definition found. We need second inferior without a valid frame
|
|
|
|
|
|
# in order to use the list command to set the scope.
|
|
|
|
|
|
if len(gdb.inferiors()) == 1:
|
|
|
|
|
|
gdb.execute(
|
|
|
|
|
|
f"add-inferior -exec {gdb.objfiles()[0].filename} -no-connection",
|
|
|
|
|
|
to_string=True,
|
|
|
|
|
|
)
|
|
|
|
|
|
g_symbol_cache = {}
|
|
|
|
|
|
|
2024-08-16 10:45:08 +08:00
|
|
|
|
try:
|
|
|
|
|
|
suppressed = "is on" in gdb.execute(
|
|
|
|
|
|
"show suppress-cli-notifications", to_string=True
|
|
|
|
|
|
)
|
|
|
|
|
|
except gdb.error:
|
|
|
|
|
|
# Treat as suppressed if the command is not available
|
|
|
|
|
|
suppressed = True
|
|
|
|
|
|
|
2024-08-08 11:04:16 +08:00
|
|
|
|
if not suppressed:
|
|
|
|
|
|
# Disable notifications
|
|
|
|
|
|
gdb.execute("set suppress-cli-notifications on")
|
|
|
|
|
|
|
|
|
|
|
|
# Switch to inferior 2 and set the scope firstly
|
|
|
|
|
|
gdb.execute("inferior 2", to_string=True)
|
|
|
|
|
|
gdb.execute(f"list {locspec}", to_string=True)
|
|
|
|
|
|
value = gdb_eval_or_none(name)
|
2024-08-16 20:36:39 +08:00
|
|
|
|
if not value:
|
|
|
|
|
|
# Try to expand macro by reading elf
|
|
|
|
|
|
global g_macro_ctx
|
|
|
|
|
|
if not g_macro_ctx:
|
|
|
|
|
|
gdb.write("No macro context found, trying to load from ELF\n")
|
|
|
|
|
|
if len(gdb.objfiles()) > 0:
|
|
|
|
|
|
g_macro_ctx = MacroCtx(gdb.objfiles()[0].filename)
|
|
|
|
|
|
else:
|
|
|
|
|
|
raise gdb.GdbError("An executable file must be provided")
|
|
|
|
|
|
|
|
|
|
|
|
expr = try_expand(name, g_macro_ctx.macro_map)
|
|
|
|
|
|
value = gdb_eval_or_none(expr)
|
|
|
|
|
|
|
2024-08-08 11:04:16 +08:00
|
|
|
|
if cacheable:
|
|
|
|
|
|
g_symbol_cache[(name, locspec)] = value
|
|
|
|
|
|
|
|
|
|
|
|
# Switch back to inferior 1
|
|
|
|
|
|
gdb.execute("inferior 1", to_string=True)
|
|
|
|
|
|
|
|
|
|
|
|
if not suppressed:
|
|
|
|
|
|
gdb.execute("set suppress-cli-notifications off")
|
|
|
|
|
|
return value
|
2024-07-09 11:26:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
2024-08-22 20:26:35 +08:00
|
|
|
|
def get_field(val, key, default=None):
|
|
|
|
|
|
"""Get a field from a gdb.Value, return default if key not found"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
return val[key] if val else default
|
|
|
|
|
|
except gdb.error:
|
|
|
|
|
|
return default
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-22 20:18:39 +08:00
|
|
|
|
def get_bytes(val, size):
|
|
|
|
|
|
"""Convert a gdb value to a bytes object"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
return val.bytes[:size]
|
|
|
|
|
|
except AttributeError: # Sometimes we don't have gdb.Value.bytes
|
|
|
|
|
|
inf = gdb.inferiors()[0]
|
|
|
|
|
|
mem = inf.read_memory(val.address, size)
|
|
|
|
|
|
return mem.tobytes()
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-15 19:55:28 +08:00
|
|
|
|
def import_check(module, name="", errmsg=""):
|
|
|
|
|
|
try:
|
|
|
|
|
|
module = __import__(module, fromlist=[name])
|
|
|
|
|
|
except ImportError:
|
|
|
|
|
|
gdb.write(errmsg if errmsg else f"Error to import {module}\n")
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
return getattr(module, name) if name else module
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
def hexdump(address, size):
|
|
|
|
|
|
inf = gdb.inferiors()[0]
|
|
|
|
|
|
mem = inf.read_memory(address, size)
|
|
|
|
|
|
bytes = mem.tobytes()
|
|
|
|
|
|
for i in range(0, len(bytes), 16):
|
2024-11-19 09:45:04 +08:00
|
|
|
|
chunk = bytes[i : i + 16]
|
2024-07-09 11:26:27 +08:00
|
|
|
|
gdb.write(f"{i + address:08x} ")
|
|
|
|
|
|
hex_values = " ".join(f"{byte:02x}" for byte in chunk)
|
|
|
|
|
|
hex_display = f"{hex_values:<47}"
|
|
|
|
|
|
gdb.write(hex_display)
|
2024-11-19 09:45:04 +08:00
|
|
|
|
ascii_values = "".join(
|
|
|
|
|
|
chr(byte) if 32 <= byte <= 126 else "." for byte in chunk
|
|
|
|
|
|
)
|
2024-07-09 11:26:27 +08:00
|
|
|
|
gdb.write(f" {ascii_values} \n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_decimal(s):
|
|
|
|
|
|
return re.fullmatch(r"\d+", s) is not None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_hexadecimal(s):
|
|
|
|
|
|
return re.fullmatch(r"0[xX][0-9a-fA-F]+|[0-9a-fA-F]+", s) is not None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Hexdump(gdb.Command):
|
|
|
|
|
|
"""hexdump address/symbol <size>"""
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
super(Hexdump, self).__init__("hexdump", gdb.COMMAND_USER)
|
|
|
|
|
|
|
|
|
|
|
|
def invoke(self, args, from_tty):
|
|
|
|
|
|
argv = args.split(" ")
|
|
|
|
|
|
address = 0
|
|
|
|
|
|
size = 0
|
2024-11-19 09:45:04 +08:00
|
|
|
|
if argv[0] == "":
|
2024-07-09 11:26:27 +08:00
|
|
|
|
gdb.write("Usage: hexdump address/symbol <size>\n")
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
if is_decimal(argv[0]) or is_hexadecimal(argv[0]):
|
|
|
|
|
|
address = int(argv[0], 0)
|
|
|
|
|
|
size = int(argv[1], 0)
|
|
|
|
|
|
else:
|
2024-11-19 09:45:04 +08:00
|
|
|
|
var = gdb.parse_and_eval(f"{argv[0]}")
|
2024-07-09 11:26:27 +08:00
|
|
|
|
address = int(var.address)
|
|
|
|
|
|
size = int(var.type.sizeof)
|
|
|
|
|
|
gdb.write(f"{argv[0]} {hex(address)} {int(size)}\n")
|
|
|
|
|
|
|
|
|
|
|
|
hexdump(address, size)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Hexdump()
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-30 14:35:34 +08:00
|
|
|
|
class Addr2Line(gdb.Command):
|
|
|
|
|
|
"""Convert addresses or expressions
|
|
|
|
|
|
|
|
|
|
|
|
Usage: addr2line address1 address2 expression1
|
|
|
|
|
|
Example: addr2line 0x1234 0x5678
|
|
|
|
|
|
addr2line "0x1234 + pointer->abc" &var var->field function_name var
|
|
|
|
|
|
addr2line $pc $r1 "$r2 + var"
|
|
|
|
|
|
addr2line [24/08/29 20:51:02] [CPU1] [209] [ap] sched_dumpstack: backtrace| 0: 0x402cd484 0x4028357e
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
super(Addr2Line, self).__init__("addr2line", gdb.COMMAND_USER)
|
|
|
|
|
|
|
|
|
|
|
|
def invoke(self, args, from_tty):
|
|
|
|
|
|
if not args:
|
|
|
|
|
|
gdb.write(Addr2Line.__doc__ + "\n")
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
addresses = []
|
|
|
|
|
|
for arg in shlex.split(args):
|
|
|
|
|
|
if is_decimal(arg):
|
|
|
|
|
|
addresses.append(int(arg))
|
|
|
|
|
|
elif is_hexadecimal(arg):
|
|
|
|
|
|
addresses.append(int(arg, 16))
|
|
|
|
|
|
else:
|
|
|
|
|
|
try:
|
|
|
|
|
|
var = gdb.parse_and_eval(f"{arg}")
|
|
|
|
|
|
addresses.append(var)
|
|
|
|
|
|
except gdb.error as e:
|
|
|
|
|
|
gdb.write(f"Ignore {arg}: {e}\n")
|
|
|
|
|
|
|
|
|
|
|
|
backtraces = backtrace(addresses)
|
|
|
|
|
|
formatter = "{:<20} {:<32} {}\n"
|
|
|
|
|
|
gdb.write(formatter.format("Address", "Symbol", "Source"))
|
|
|
|
|
|
for addr, func, source in backtraces:
|
|
|
|
|
|
gdb.write(formatter.format(hex(addr), func, source))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Addr2Line()
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
def nitems(array):
|
|
|
|
|
|
array_type = array.type
|
|
|
|
|
|
element_type = array_type.target()
|
|
|
|
|
|
element_size = element_type.sizeof
|
|
|
|
|
|
array_size = array_type.sizeof // element_size
|
|
|
|
|
|
return array_size
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
# Machine Specific Helper Functions
|
|
|
|
|
|
|
|
|
|
|
|
|
2023-06-13 11:59:52 +08:00
|
|
|
|
BIG_ENDIAN = 0
|
|
|
|
|
|
LITTLE_ENDIAN = 1
|
|
|
|
|
|
target_endianness = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_target_endianness():
|
|
|
|
|
|
"""Return the endianness of the target"""
|
|
|
|
|
|
global target_endianness
|
2024-08-14 20:51:56 +08:00
|
|
|
|
if not target_endianness:
|
2023-06-13 11:59:52 +08:00
|
|
|
|
endian = gdb.execute("show endian", to_string=True)
|
|
|
|
|
|
if "little endian" in endian:
|
|
|
|
|
|
target_endianness = LITTLE_ENDIAN
|
|
|
|
|
|
elif "big endian" in endian:
|
|
|
|
|
|
target_endianness = BIG_ENDIAN
|
|
|
|
|
|
else:
|
|
|
|
|
|
raise gdb.GdbError("unknown endianness '{0}'".format(str(endian)))
|
|
|
|
|
|
return target_endianness
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def read_memoryview(inf, start, length):
|
|
|
|
|
|
"""Read memory from the target and return a memoryview object"""
|
|
|
|
|
|
m = inf.read_memory(start, length)
|
|
|
|
|
|
if type(m) is memoryview:
|
|
|
|
|
|
return m
|
|
|
|
|
|
return memoryview(m)
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
try:
|
|
|
|
|
|
# For some prebuilt GDB, the python builtin module `struct` is not available
|
|
|
|
|
|
import struct
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
def read_u16(buffer, offset):
|
|
|
|
|
|
"""Read a 16-bit unsigned integer from a buffer"""
|
|
|
|
|
|
if get_target_endianness() == LITTLE_ENDIAN:
|
|
|
|
|
|
return struct.unpack_from("<H", buffer, offset)[0]
|
|
|
|
|
|
else:
|
|
|
|
|
|
return struct.unpack_from(">H", buffer, offset)[0]
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
def read_u32(buffer, offset):
|
|
|
|
|
|
"""Read a 32-bit unsigned integer from a buffer"""
|
|
|
|
|
|
if get_target_endianness() == LITTLE_ENDIAN:
|
|
|
|
|
|
return struct.unpack_from("<I", buffer, offset)[0]
|
|
|
|
|
|
else:
|
|
|
|
|
|
return struct.unpack_from(">I", buffer, offset)[0]
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
def read_u64(buffer, offset):
|
|
|
|
|
|
"""Read a 64-bit unsigned integer from a buffer"""
|
|
|
|
|
|
if get_target_endianness() == LITTLE_ENDIAN:
|
|
|
|
|
|
return struct.unpack_from("<Q", buffer, offset)[0]
|
|
|
|
|
|
else:
|
|
|
|
|
|
return struct.unpack_from(">Q", buffer, offset)[0]
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
except ModuleNotFoundError:
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
def read_u16(buffer, offset):
|
|
|
|
|
|
"""Read a 16-bit unsigned integer from a buffer"""
|
|
|
|
|
|
buffer_val = buffer[offset : offset + 2]
|
|
|
|
|
|
value = [0, 0]
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
if type(buffer_val[0]) is str:
|
|
|
|
|
|
value[0] = ord(buffer_val[0])
|
|
|
|
|
|
value[1] = ord(buffer_val[1])
|
|
|
|
|
|
else:
|
|
|
|
|
|
value[0] = buffer_val[0]
|
|
|
|
|
|
value[1] = buffer_val[1]
|
|
|
|
|
|
|
|
|
|
|
|
if get_target_endianness() == LITTLE_ENDIAN:
|
|
|
|
|
|
return value[0] + (value[1] << 8)
|
|
|
|
|
|
else:
|
|
|
|
|
|
return value[1] + (value[0] << 8)
|
|
|
|
|
|
|
|
|
|
|
|
def read_u32(buffer, offset):
|
|
|
|
|
|
"""Read a 32-bit unsigned integer from a buffer"""
|
|
|
|
|
|
if get_target_endianness() == LITTLE_ENDIAN:
|
|
|
|
|
|
return read_u16(buffer, offset) + (read_u16(buffer, offset + 2) << 16)
|
|
|
|
|
|
else:
|
|
|
|
|
|
return read_u16(buffer, offset + 2) + (read_u16(buffer, offset) << 16)
|
|
|
|
|
|
|
|
|
|
|
|
def read_u64(buffer, offset):
|
|
|
|
|
|
"""Read a 64-bit unsigned integer from a buffer"""
|
|
|
|
|
|
if get_target_endianness() == LITTLE_ENDIAN:
|
|
|
|
|
|
return read_u32(buffer, offset) + (read_u32(buffer, offset + 4) << 32)
|
|
|
|
|
|
else:
|
|
|
|
|
|
return read_u32(buffer, offset + 4) + (read_u32(buffer, offset) << 32)
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def read_ulong(buffer, offset):
|
|
|
|
|
|
"""Read a long from a buffer"""
|
|
|
|
|
|
if get_long_type().sizeof == 8:
|
|
|
|
|
|
return read_u64(buffer, offset)
|
|
|
|
|
|
else:
|
|
|
|
|
|
return read_u32(buffer, offset)
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-22 20:18:39 +08:00
|
|
|
|
def bswap(val, size):
|
|
|
|
|
|
"""Reverses the byte order in a gdb.Value or int value of size bytes"""
|
|
|
|
|
|
return int.from_bytes(int(val).to_bytes(size, byteorder="little"), byteorder="big")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def swap16(val):
|
|
|
|
|
|
return bswap(val, 2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def swap32(val):
|
|
|
|
|
|
return bswap(val, 4)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def swap64(val):
|
|
|
|
|
|
return bswap(val, 8)
|
|
|
|
|
|
|
|
|
|
|
|
|
2023-06-13 11:59:52 +08:00
|
|
|
|
target_arch = None
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
def is_target_arch(arch, exact=False):
|
|
|
|
|
|
"""
|
2024-11-20 14:47:04 +08:00
|
|
|
|
For non extact match, this function will
|
2024-07-09 11:26:27 +08:00
|
|
|
|
return True if the target architecture contains
|
|
|
|
|
|
keywords of an ARCH family. For example, x86 is
|
|
|
|
|
|
contained in i386:x86_64.
|
|
|
|
|
|
For exact match, this function will return True if
|
|
|
|
|
|
the target architecture is exactly the same as ARCH.
|
|
|
|
|
|
"""
|
2023-06-13 11:59:52 +08:00
|
|
|
|
if hasattr(gdb.Frame, "architecture"):
|
2024-07-09 11:26:27 +08:00
|
|
|
|
archname = gdb.newest_frame().architecture().name()
|
|
|
|
|
|
|
2024-11-19 09:45:04 +08:00
|
|
|
|
return arch in archname if not exact else arch == archname
|
2023-06-13 11:59:52 +08:00
|
|
|
|
else:
|
|
|
|
|
|
global target_arch
|
|
|
|
|
|
if target_arch is None:
|
|
|
|
|
|
target_arch = gdb.execute("show architecture", to_string=True)
|
2024-07-09 11:26:27 +08:00
|
|
|
|
pattern = r'set to "(.*?)"\s*(\(currently (".*")\))?'
|
|
|
|
|
|
match = re.search(pattern, target_arch)
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
candidate = match.group(1)
|
2023-06-13 11:59:52 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
if candidate == "auto":
|
|
|
|
|
|
target_arch = match.group(3)
|
|
|
|
|
|
else:
|
|
|
|
|
|
target_arch = candidate
|
|
|
|
|
|
|
2024-11-19 09:45:04 +08:00
|
|
|
|
return arch in target_arch if not exact else arch == target_arch
|
2024-07-09 11:26:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Kernel Specific Helper Functions
|
2023-07-20 11:25:16 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_target_smp():
|
|
|
|
|
|
"""Return Ture if the target use smp"""
|
|
|
|
|
|
|
|
|
|
|
|
if gdb.lookup_global_symbol("g_assignedtasks"):
|
|
|
|
|
|
return True
|
|
|
|
|
|
else:
|
|
|
|
|
|
return False
|
2024-06-27 14:14:36 +08:00
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
# FIXME: support RISC-V/X86/ARM64 etc.
|
|
|
|
|
|
def in_interrupt_context(cpuid=0):
|
|
|
|
|
|
frame = gdb.selected_frame()
|
2024-06-27 14:14:36 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
if is_target_arch("arm"):
|
2024-11-19 09:45:04 +08:00
|
|
|
|
xpsr = int(frame.read_register("xpsr"))
|
|
|
|
|
|
return xpsr & 0xF
|
2024-07-09 11:26:27 +08:00
|
|
|
|
else:
|
|
|
|
|
|
# TODO: figure out a more proper way to detect if
|
|
|
|
|
|
# we are in an interrupt context
|
|
|
|
|
|
g_current_regs = gdb_eval_or_none("g_current_regs")
|
2024-08-14 20:51:56 +08:00
|
|
|
|
return not g_current_regs or not g_current_regs[cpuid]
|
2024-07-09 11:26:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_arch_sp_name():
|
2024-08-08 10:24:56 +08:00
|
|
|
|
if is_target_arch("arm") or is_target_arch("aarch64"):
|
2024-08-14 20:51:56 +08:00
|
|
|
|
# arm and arm variants
|
|
|
|
|
|
return "sp"
|
2024-07-09 11:26:27 +08:00
|
|
|
|
elif is_target_arch("i386", exact=True):
|
|
|
|
|
|
return "esp"
|
|
|
|
|
|
elif is_target_arch("i386:x86-64", exact=True):
|
|
|
|
|
|
return "rsp"
|
|
|
|
|
|
else:
|
2024-08-14 20:51:56 +08:00
|
|
|
|
# Default to use sp, add more archs if needed
|
|
|
|
|
|
return "sp"
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
def get_arch_pc_name():
|
2024-08-08 10:24:56 +08:00
|
|
|
|
if is_target_arch("arm") or is_target_arch("aarch64"):
|
2024-08-14 20:51:56 +08:00
|
|
|
|
# arm and arm variants
|
|
|
|
|
|
return "pc"
|
2024-07-09 11:26:27 +08:00
|
|
|
|
elif is_target_arch("i386", exact=True):
|
|
|
|
|
|
return "eip"
|
|
|
|
|
|
elif is_target_arch("i386:x86-64", exact=True):
|
|
|
|
|
|
return "rip"
|
|
|
|
|
|
else:
|
2024-08-14 20:51:56 +08:00
|
|
|
|
# Default to use pc, add more archs if needed
|
|
|
|
|
|
return "pc"
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
def get_register_byname(regname, tcb=None):
|
|
|
|
|
|
frame = gdb.selected_frame()
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
2024-11-20 14:47:04 +08:00
|
|
|
|
# If no tcb is given then we can directly use the register from
|
2024-07-09 11:26:27 +08:00
|
|
|
|
# the cached frame by GDB
|
|
|
|
|
|
if not tcb:
|
|
|
|
|
|
return int(frame.read_register(regname))
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
# Ok, let's take it from the context in the given tcb
|
|
|
|
|
|
arch = frame.architecture()
|
|
|
|
|
|
tcbinfo = gdb.parse_and_eval("g_tcbinfo")
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
i = 0
|
|
|
|
|
|
for reg in arch.registers():
|
|
|
|
|
|
if reg.name == regname:
|
|
|
|
|
|
break
|
|
|
|
|
|
i += 1
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
regs = tcb["xcp"]["regs"].cast(gdb.lookup_type("char").pointer())
|
|
|
|
|
|
value = gdb.Value(regs + tcbinfo["reg_off"]["p"][i]).cast(
|
|
|
|
|
|
gdb.lookup_type("uintptr_t").pointer()
|
|
|
|
|
|
)[0]
|
2024-07-18 20:53:17 +08:00
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
return int(value)
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-14 20:51:56 +08:00
|
|
|
|
def get_sp(tcb=None):
|
|
|
|
|
|
return get_register_byname(get_arch_sp_name(), tcb)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_pc(tcb=None):
|
|
|
|
|
|
return get_register_byname(get_arch_pc_name(), tcb)
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-07-09 11:26:27 +08:00
|
|
|
|
def get_tcbs():
|
|
|
|
|
|
# In case we have created/deleted tasks at runtime, the tcbs will change
|
|
|
|
|
|
# so keep it as fresh as possible
|
|
|
|
|
|
pidhash = gdb.parse_and_eval("g_pidhash")
|
|
|
|
|
|
npidhash = gdb.parse_and_eval("g_npidhash")
|
|
|
|
|
|
|
|
|
|
|
|
return [pidhash[i] for i in range(0, npidhash) if pidhash[i]]
|
2024-08-29 19:23:45 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_tcb(pid):
|
|
|
|
|
|
"""get tcb from pid"""
|
|
|
|
|
|
g_pidhash = gdb.parse_and_eval("g_pidhash")
|
|
|
|
|
|
g_npidhash = gdb.parse_and_eval("g_npidhash")
|
|
|
|
|
|
tcb = g_pidhash[pid & (g_npidhash - 1)]
|
|
|
|
|
|
if not tcb or pid != tcb["pid"]:
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
return tcb
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_task_name(tcb):
|
|
|
|
|
|
try:
|
|
|
|
|
|
name = tcb["name"].cast(gdb.lookup_type("char").pointer())
|
|
|
|
|
|
return name.string()
|
|
|
|
|
|
except gdb.error:
|
|
|
|
|
|
return ""
|