tools/minidumpserver: refactor codes
VELAPLATFO-16411 1. refactor code style and simplify parse method 2. change mem find algorythm to bi-search Signed-off-by: xinbingnan <xinbingnan@xiaomi.com>
This commit is contained in:
parent
9b68971853
commit
4c75318778
@ -36,80 +36,10 @@ SHF_EXEC = 0x4
|
||||
SHF_WRITE_ALLOC = SHF_WRITE | SHF_ALLOC
|
||||
SHF_ALLOC_EXEC = SHF_ALLOC | SHF_EXEC
|
||||
|
||||
GDB_SIGNAL_DEFAULT = 7
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class dump_elf_file:
|
||||
"""
|
||||
Class to parse ELF file for memory content in various sections.
|
||||
There are read-only sections (e.g. text and rodata) where
|
||||
the memory content does not need to be dumped via coredump
|
||||
and can be retrieved from the ELF file.
|
||||
"""
|
||||
|
||||
def __init__(self, elffile: str):
|
||||
self.elffile = elffile
|
||||
self.fd = None
|
||||
self.elf = None
|
||||
self.memories = list()
|
||||
|
||||
def open(self):
|
||||
self.fd = open(self.elffile, "rb")
|
||||
self.elf = ELFFile(self.fd)
|
||||
|
||||
def close(self):
|
||||
self.fd.close()
|
||||
|
||||
def parse(self):
|
||||
if self.fd is None:
|
||||
self.open()
|
||||
|
||||
for section in self.elf.iter_sections():
|
||||
# REALLY NEED to match exact type as all other sections
|
||||
# (debug, text, etc.) are descendants where
|
||||
# isinstance() would match.
|
||||
if (
|
||||
type(section) is not elftools.elf.sections.Section
|
||||
): # pylint: disable=unidiomatic-typecheck
|
||||
continue
|
||||
|
||||
size = section["sh_size"]
|
||||
flags = section["sh_flags"]
|
||||
start = section["sh_addr"]
|
||||
end = start + size - 1
|
||||
|
||||
store = False
|
||||
desc = "?"
|
||||
|
||||
if section["sh_type"] == "SHT_PROGBITS":
|
||||
if (flags & SHF_ALLOC_EXEC) == SHF_ALLOC_EXEC:
|
||||
# Text section
|
||||
store = True
|
||||
desc = "text"
|
||||
elif (flags & SHF_WRITE_ALLOC) == SHF_WRITE_ALLOC:
|
||||
# Data section
|
||||
#
|
||||
# Running app changes the content so no need
|
||||
# to store
|
||||
pass
|
||||
elif (flags & SHF_ALLOC) == SHF_ALLOC:
|
||||
# Read only data section
|
||||
store = True
|
||||
desc = "read-only data"
|
||||
|
||||
if store:
|
||||
memory = {"start": start, "end": end, "data": section.data()}
|
||||
logger.info(
|
||||
"ELF Section: 0x%x to 0x%x of size %d (%s)"
|
||||
% (memory["start"], memory["end"], len(memory["data"]), desc)
|
||||
)
|
||||
|
||||
self.memories.append(memory)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
reg_table = {
|
||||
"arm": {
|
||||
"R0": 0,
|
||||
@ -280,127 +210,183 @@ reg_fix_value = {
|
||||
}
|
||||
|
||||
|
||||
class dump_log_file:
|
||||
def str_get_after(s, sub):
|
||||
index = s.find(sub)
|
||||
if index == -1:
|
||||
return None
|
||||
return s[index + len(sub) :]
|
||||
|
||||
|
||||
def pack_memory(start, end, data):
|
||||
return {"start": start, "end": end, "data": data}
|
||||
|
||||
|
||||
class DumpELFFile:
|
||||
"""
|
||||
Class to parse ELF file for memory content in various sections.
|
||||
There are read-only sections (e.g. text and rodata) where
|
||||
the memory content does not need to be dumped via coredump
|
||||
and can be retrieved from the ELF file.
|
||||
"""
|
||||
|
||||
def __init__(self, elffile: str):
|
||||
self.elffile = elffile
|
||||
self.__memories = []
|
||||
|
||||
def parse(self):
|
||||
self.__memories = []
|
||||
elf = ELFFile.load_from_path(self.elffile)
|
||||
|
||||
for section in elf.iter_sections():
|
||||
# REALLY NEED to match exact type as all other sections
|
||||
# (debug, text, etc.) are descendants where
|
||||
# isinstance() would match.
|
||||
if (
|
||||
type(section) is not elftools.elf.sections.Section
|
||||
): # pylint: disable=unidiomatic-typecheck
|
||||
continue
|
||||
|
||||
size = section["sh_size"]
|
||||
flags = section["sh_flags"]
|
||||
start = section["sh_addr"]
|
||||
end = start + size - 1
|
||||
|
||||
store = False
|
||||
desc = "?"
|
||||
|
||||
if section["sh_type"] == "SHT_PROGBITS":
|
||||
if (flags & SHF_ALLOC_EXEC) == SHF_ALLOC_EXEC:
|
||||
# Text section
|
||||
store = True
|
||||
desc = "text"
|
||||
elif (flags & SHF_WRITE_ALLOC) == SHF_WRITE_ALLOC:
|
||||
# Data section
|
||||
#
|
||||
# Running app changes the content so no need
|
||||
# to store
|
||||
pass
|
||||
elif (flags & SHF_ALLOC) == SHF_ALLOC:
|
||||
# Read only data section
|
||||
store = True
|
||||
desc = "read-only data"
|
||||
|
||||
if store:
|
||||
memory = pack_memory(start, end, section.data())
|
||||
logger.debug(
|
||||
f"ELF Section: {hex(memory['start'])} to {hex(memory['end'])} of size {len(memory['data'])} ({desc})"
|
||||
)
|
||||
|
||||
self.__memories.append(memory)
|
||||
|
||||
elf.close()
|
||||
return True
|
||||
|
||||
def get_memories(self):
|
||||
return self.__memories
|
||||
|
||||
|
||||
class DumpLogFile:
|
||||
def __init__(self, logfile: str):
|
||||
self.logfile = logfile
|
||||
self.fd = None
|
||||
self.arch = ""
|
||||
self.registers = []
|
||||
self.memories = list()
|
||||
self.__memories = list()
|
||||
self.reg_table = dict()
|
||||
|
||||
def open(self):
|
||||
self.fd = open(self.logfile, "r")
|
||||
def _init_register(self):
|
||||
self.registers = [b"x"] * (max(self.reg_table.values()) + 1)
|
||||
|
||||
def close(self):
|
||||
self.fd.closeself()
|
||||
def _parse_register(self, line):
|
||||
line = str_get_after(line, "up_dump_register:")
|
||||
if line is None:
|
||||
return False
|
||||
|
||||
line = line.strip()
|
||||
# find register value
|
||||
find_res = re.findall(r"(?P<REG>\w+): (?P<REGV>[0-9a-fA-F]+)", line)
|
||||
|
||||
for reg_name, reg_val in find_res:
|
||||
if reg_name in self.reg_table:
|
||||
reg_index = self.reg_table[reg_name]
|
||||
self.registers[reg_index] = int(reg_val, 16)
|
||||
return True
|
||||
|
||||
def _parse_fix_register(self, arch):
|
||||
if arch in reg_fix_value:
|
||||
for reg_name, reg_vals in reg_fix_value[arch].items():
|
||||
reg_index = self.reg_table[reg_name]
|
||||
self.registers[reg_index] = reg_vals
|
||||
|
||||
def _parse_stack(self, line, start, data):
|
||||
line = str_get_after(line, "stack_dump:")
|
||||
if line is None:
|
||||
return None
|
||||
|
||||
line = line.strip()
|
||||
|
||||
# find stack-dump
|
||||
match_res = re.match(r"(?P<ADDR_START>0x\w+): (?P<VALS>( ?\w+)+)", line)
|
||||
if match_res is None:
|
||||
return None
|
||||
|
||||
addr_start = int(match_res.groupdict()["ADDR_START"], 16)
|
||||
if start + len(data) != addr_start:
|
||||
# stack is not contiguous
|
||||
if len(data) == 0:
|
||||
start = addr_start
|
||||
else:
|
||||
self.__memories.append(pack_memory(start, start + len(data), data))
|
||||
data = b""
|
||||
start = addr_start
|
||||
|
||||
for val in match_res.groupdict()["VALS"].split():
|
||||
data = data + struct.pack("<I", int(val, 16))
|
||||
|
||||
return start, data
|
||||
|
||||
def parse(self, arch):
|
||||
self.reg_table = reg_table[arch]
|
||||
self._init_register()
|
||||
|
||||
data = bytes()
|
||||
start = 0
|
||||
if self.fd is None:
|
||||
self.open()
|
||||
|
||||
linenumber = 0
|
||||
try:
|
||||
while 1:
|
||||
line = self.fd.readline()
|
||||
if line == "":
|
||||
break
|
||||
with open(self.logfile, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
linenumber += 1
|
||||
tmp = re.search("up_dump_register:", line)
|
||||
if tmp is not None:
|
||||
# find arch
|
||||
if arch is None:
|
||||
self.arch = tmp.group(1)
|
||||
else:
|
||||
self.arch = arch
|
||||
for line_num, line in enumerate(lines):
|
||||
if line == "":
|
||||
break
|
||||
|
||||
if self.arch not in reg_table:
|
||||
logger.error("%s not supported" % (self.arch))
|
||||
# init register list
|
||||
if len(self.registers) == 0:
|
||||
for x in range(max(reg_table[self.arch].values()) + 1):
|
||||
self.registers.append(b"x")
|
||||
|
||||
# find register value
|
||||
line = line[tmp.span()[1] :]
|
||||
line = line.replace("\n", " ")
|
||||
while 1:
|
||||
tmp = re.search("([^ ]+):", line)
|
||||
if tmp is None:
|
||||
break
|
||||
register = tmp.group(1)
|
||||
line = line[tmp.span()[1] :]
|
||||
tmp = re.search("([0-9a-fA-F]+) ", line)
|
||||
if tmp is None:
|
||||
break
|
||||
if register in reg_table[self.arch].keys():
|
||||
self.registers[reg_table[self.arch][register]] = int(
|
||||
"0x" + tmp.group().replace(" ", ""), 16
|
||||
)
|
||||
line = line[tmp.span()[1] :]
|
||||
try:
|
||||
if self._parse_register(line):
|
||||
continue
|
||||
|
||||
if self.arch in reg_fix_value:
|
||||
for register in reg_fix_value[self.arch].keys():
|
||||
self.registers[reg_table[self.arch][register]] = reg_fix_value[
|
||||
self.arch
|
||||
][register]
|
||||
res = self._parse_stack(line, start, data)
|
||||
if res:
|
||||
start, data = res
|
||||
continue
|
||||
|
||||
tmp = re.search("stack_dump:", line)
|
||||
if tmp is not None:
|
||||
# find stackdump
|
||||
line = line[tmp.span()[1] :]
|
||||
tmp = re.search("([0-9a-fA-F]+):", line)
|
||||
if tmp is not None:
|
||||
line_start = int("0x" + tmp.group()[:-1], 16)
|
||||
except Exception as e:
|
||||
logger.error("parse log file error: %s line_number %d" % (e, line_num))
|
||||
sys.exit(1)
|
||||
|
||||
if start + len(data) != line_start:
|
||||
# stack is not contiguous
|
||||
if len(data) == 0:
|
||||
start = line_start
|
||||
else:
|
||||
memory = {
|
||||
"start": start,
|
||||
"end": start + len(data),
|
||||
"data": data,
|
||||
}
|
||||
self.memories.append(memory)
|
||||
data = b""
|
||||
start = line_start
|
||||
self._parse_fix_register(arch)
|
||||
if data:
|
||||
self.__memories.append(pack_memory(start, start + len(data), data))
|
||||
|
||||
line = line[tmp.span()[1] :]
|
||||
line = line.replace("\n", " ")
|
||||
|
||||
while 1:
|
||||
# record stack value
|
||||
tmp = re.search(" ([0-9a-fA-F]+)", line)
|
||||
if tmp is None:
|
||||
break
|
||||
data = data + struct.pack(
|
||||
"<I", int("0x" + tmp.group().replace(" ", ""), 16)
|
||||
)
|
||||
line = line[tmp.span()[1] :]
|
||||
except Exception as e:
|
||||
logger.error("parse log file error: %s linenumber %d" % (e, linenumber))
|
||||
os._exit(0)
|
||||
|
||||
if len(data):
|
||||
memory = {"start": start, "end": start + len(data), "data": data}
|
||||
self.memories.append(memory)
|
||||
def get_memories(self):
|
||||
return self.__memories
|
||||
|
||||
|
||||
GDB_SIGNAL_DEFAULT = 7
|
||||
|
||||
|
||||
class gdb_stub:
|
||||
def __init__(self, logfile: dump_log_file, elffile: dump_elf_file):
|
||||
class GDBStub:
|
||||
def __init__(self, logfile: DumpLogFile, elffile: DumpELFFile):
|
||||
self.logfile = logfile
|
||||
self.elffile = elffile
|
||||
self.socket = None
|
||||
self.gdb_signal = GDB_SIGNAL_DEFAULT
|
||||
self.mem_regions = self.elffile.memories + self.logfile.memories
|
||||
self.mem_regions = self.elffile.get_memories() + self.logfile.get_memories()
|
||||
|
||||
self.mem_regions.sort(key=lambda x: x["start"])
|
||||
|
||||
def get_gdb_packet(self):
|
||||
socket = self.socket
|
||||
@ -511,19 +497,29 @@ class gdb_stub:
|
||||
# the 'm' packet for reading memory: m<addr>,<len>
|
||||
|
||||
def get_mem_region(addr):
|
||||
for r in self.mem_regions:
|
||||
if r["start"] <= addr <= r["end"]:
|
||||
return r
|
||||
left = 0
|
||||
right = len(self.mem_regions) - 1
|
||||
while left <= right:
|
||||
mid = (left + right) // 2
|
||||
if (
|
||||
self.mem_regions[mid]["start"]
|
||||
<= addr
|
||||
<= self.mem_regions[mid]["end"]
|
||||
):
|
||||
return self.mem_regions[mid]
|
||||
elif addr < self.mem_regions[mid]["start"]:
|
||||
right = mid - 1
|
||||
else:
|
||||
left = mid + 1
|
||||
|
||||
return None
|
||||
|
||||
# extract address and length from packet
|
||||
# and convert them into usable integer values
|
||||
addr, length = pkt[1:].split(b",")
|
||||
s_addr = int(b"0x" + addr, 16)
|
||||
length = int(b"0x" + length, 16)
|
||||
s_addr = int(addr, 16)
|
||||
length = int(length, 16)
|
||||
|
||||
# FIXME: Need more efficient way of extracting memory content
|
||||
remaining = length
|
||||
addr = s_addr
|
||||
barray = b""
|
||||
@ -533,10 +529,6 @@ class gdb_stub:
|
||||
barray = None
|
||||
break
|
||||
|
||||
if addr > r["end"]:
|
||||
r = get_mem_region(addr)
|
||||
continue
|
||||
|
||||
offset = addr - r["start"]
|
||||
barray += r["data"][offset : offset + 1]
|
||||
|
||||
@ -596,27 +588,33 @@ class gdb_stub:
|
||||
self.put_gdb_packet(b"")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def arg_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("-e", "--elffile", required=True, help="elffile")
|
||||
|
||||
parser.add_argument("-l", "--logfile", required=True, help="logfile")
|
||||
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--arch",
|
||||
help="select architecture,if not use this options,\
|
||||
The architecture will be inferred from the logfile",
|
||||
help="select architecture,if not use this options",
|
||||
required=True,
|
||||
choices=[arch for arch in reg_table.keys()],
|
||||
)
|
||||
|
||||
parser.add_argument("-p", "--port", help="gdbport", type=int, default=1234)
|
||||
|
||||
parser.add_argument("--debug", action="store_true", default=False)
|
||||
return parser.parse_args()
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
def config_log(debug):
|
||||
if debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
logging.basicConfig(format="[%(levelname)s][%(name)s] %(message)s")
|
||||
|
||||
|
||||
def main(args):
|
||||
if not os.path.isfile(args.elffile):
|
||||
logger.error(f"Cannot find file {args.elffile}, exiting...")
|
||||
sys.exit(1)
|
||||
@ -625,18 +623,14 @@ if __name__ == "__main__":
|
||||
logger.error(f"Cannot find file {args.logfile}, exiting...")
|
||||
sys.exit(1)
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
config_log(args.debug)
|
||||
|
||||
log = dump_log_file(args.logfile)
|
||||
log = DumpLogFile(args.logfile)
|
||||
log.parse(args.arch)
|
||||
elf = dump_elf_file(args.elffile)
|
||||
elf = DumpELFFile(args.elffile)
|
||||
elf.parse()
|
||||
|
||||
gdbstub = gdb_stub(log, elf)
|
||||
logging.basicConfig(format="[%(levelname)s][%(name)s] %(message)s")
|
||||
gdb_stub = GDBStub(log, elf)
|
||||
|
||||
gdbserver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
|
||||
@ -649,6 +643,7 @@ if __name__ == "__main__":
|
||||
|
||||
logger.info(f"Waiting GDB connection on port {args.port} ...")
|
||||
logger.info("Press Ctrl+C to stop ...")
|
||||
logger.info(f'Hint: gdb {args.elffile} -ex "target remote localhost:{args.port}"')
|
||||
|
||||
while True:
|
||||
try:
|
||||
@ -656,8 +651,12 @@ if __name__ == "__main__":
|
||||
|
||||
if conn:
|
||||
logger.info(f"Accepted GDB connection from {remote}")
|
||||
gdbstub.run(conn)
|
||||
gdb_stub.run(conn)
|
||||
except KeyboardInterrupt:
|
||||
break
|
||||
|
||||
gdbserver.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(arg_parser())
|
||||
|
Loading…
Reference in New Issue
Block a user