mirror of
https://github.com/DarkFlippers/unleashed-firmware.git
synced 2024-11-23 10:01:58 +03:00
Debug: revert cortex debug to lxml and drop DWT (#2651)
* Debug: revert cortex debug to lxml * Debug: update PyCortexMDebug readme * fbt: moved "debug" dir to "scripts" subfolder * ufbt: added missing debug_other & debug_other_blackmagic targets; github: fixed script bundling * lint: fixed formatting on debug scripts * vscode: updated configuration for debug dir changes --------- Co-authored-by: hedger <hedger@users.noreply.github.com> Co-authored-by: hedger <hedger@nanode.su>
This commit is contained in:
parent
241b4ef6e4
commit
f57f0efc48
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@ -60,7 +60,7 @@ jobs:
|
||||
- name: 'Bundle scripts'
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
run: |
|
||||
tar czpf artifacts/flipper-z-any-scripts-${SUFFIX}.tgz scripts debug
|
||||
tar czpf artifacts/flipper-z-any-scripts-${SUFFIX}.tgz scripts
|
||||
|
||||
- name: 'Build the firmware'
|
||||
run: |
|
||||
|
31
.vscode/example/launch.json
vendored
31
.vscode/example/launch.json
vendored
@ -27,22 +27,21 @@
|
||||
"type": "cortex-debug",
|
||||
"servertype": "openocd",
|
||||
"device": "stlink",
|
||||
"svdFile": "./debug/STM32WB55_CM4.svd",
|
||||
"svdFile": "./scripts/debug/STM32WB55_CM4.svd",
|
||||
// If you're debugging early in the boot process, before OS scheduler is running,
|
||||
// you have to comment out the following line.
|
||||
"rtos": "FreeRTOS",
|
||||
"configFiles": [
|
||||
"interface/stlink.cfg",
|
||||
"./debug/stm32wbx.cfg",
|
||||
"./scripts/debug/stm32wbx.cfg",
|
||||
],
|
||||
"postAttachCommands": [
|
||||
"source debug/flipperversion.py",
|
||||
"source scripts/debug/flipperversion.py",
|
||||
"fw-version",
|
||||
// "compare-sections",
|
||||
"source debug/flipperapps.py",
|
||||
"source scripts/debug/flipperapps.py",
|
||||
"fap-set-debug-elf-root build/latest/.extapps",
|
||||
// "source debug/FreeRTOS/FreeRTOS.py",
|
||||
// "svd_load debug/STM32WB55_CM4.svd"
|
||||
// "source scripts/debug/FreeRTOS/FreeRTOS.py",
|
||||
]
|
||||
// "showDevDebugOutput": "raw",
|
||||
},
|
||||
@ -54,16 +53,16 @@
|
||||
"type": "cortex-debug",
|
||||
"servertype": "external",
|
||||
"gdbTarget": "${input:BLACKMAGIC}",
|
||||
"svdFile": "./debug/STM32WB55_CM4.svd",
|
||||
"svdFile": "./scripts/debug/STM32WB55_CM4.svd",
|
||||
"rtos": "FreeRTOS",
|
||||
"postAttachCommands": [
|
||||
"monitor swdp_scan",
|
||||
"attach 1",
|
||||
"set confirm off",
|
||||
"set mem inaccessible-by-default off",
|
||||
"source debug/flipperversion.py",
|
||||
"source scripts/debug/flipperversion.py",
|
||||
"fw-version",
|
||||
"source debug/flipperapps.py",
|
||||
"source scripts/debug/flipperapps.py",
|
||||
"fap-set-debug-elf-root build/latest/.extapps",
|
||||
// "compare-sections",
|
||||
]
|
||||
@ -78,12 +77,12 @@
|
||||
"servertype": "jlink",
|
||||
"interface": "swd",
|
||||
"device": "STM32WB55RG",
|
||||
"svdFile": "./debug/STM32WB55_CM4.svd",
|
||||
"svdFile": "./scripts/debug/STM32WB55_CM4.svd",
|
||||
"rtos": "FreeRTOS",
|
||||
"postAttachCommands": [
|
||||
"source debug/flipperversion.py",
|
||||
"source scripts/debug/flipperversion.py",
|
||||
"fw-version",
|
||||
"source debug/flipperapps.py",
|
||||
"source scripts/debug/flipperapps.py",
|
||||
"fap-set-debug-elf-root build/latest/.extapps",
|
||||
]
|
||||
// "showDevDebugOutput": "raw",
|
||||
@ -96,16 +95,16 @@
|
||||
"type": "cortex-debug",
|
||||
"servertype": "openocd",
|
||||
"device": "cmsis-dap",
|
||||
"svdFile": "./debug/STM32WB55_CM4.svd",
|
||||
"svdFile": "./scripts/debug/STM32WB55_CM4.svd",
|
||||
"rtos": "FreeRTOS",
|
||||
"configFiles": [
|
||||
"interface/cmsis-dap.cfg",
|
||||
"./debug/stm32wbx.cfg",
|
||||
"./scripts/debug/stm32wbx.cfg",
|
||||
],
|
||||
"postAttachCommands": [
|
||||
"source debug/flipperversion.py",
|
||||
"source scripts/debug/flipperversion.py",
|
||||
"fw-version",
|
||||
"source debug/flipperapps.py",
|
||||
"source scripts/debug/flipperapps.py",
|
||||
"fap-set-debug-elf-root build/latest/.extapps",
|
||||
],
|
||||
// "showDevDebugOutput": "raw",
|
||||
|
@ -1,84 +0,0 @@
|
||||
PyCortexMDebug
|
||||
==============
|
||||
|
||||
*A set of GDB/Python-based utilities to make life debugging ARM Cortex-M processors a bit easier*
|
||||
|
||||
It will consist of several modules which will hopefully become integrated as they evolve. Presently, there is only one:
|
||||
|
||||
## SVD
|
||||
ARM defines an SVD (System View Description) file format in its CMSIS
|
||||
standard as a means for Cortex-M-based chip manufacturers to provide a
|
||||
common description of peripherals, registers, and register fields. You
|
||||
can download SVD files for different manufacturers
|
||||
[here](http://www.arm.com/products/processors/cortex-m/cortex-microcontroller-software-interface-standard.php).
|
||||
|
||||
My implementation so far has only tested STM32 chips but should hold for others. If others are like those from ST,
|
||||
expect plenty of errors in the file. Like GPIOA having a register named GPIOB_OSPEEDR and lots of 16-bit registers
|
||||
that are listed as 32!
|
||||
|
||||
The implementation consists of two components -- An xml parser module (pysvd) and a GDB file (gdb_svd).
|
||||
I haven't yet worked out a perfect workflow for this, though it's quite easy to use when
|
||||
you already tend to have a GDB initialization file for starting up OpenOCD and the like.
|
||||
However your workflow works, just make sure to, in GDB:
|
||||
|
||||
source gdb_svd.py
|
||||
svd_load [your_svd_file].svd
|
||||
|
||||
These files can be huge so it might take a second or two. Anyways, after that, you can do
|
||||
|
||||
svd
|
||||
|
||||
to list available peripherals with descriptions. Or you can do
|
||||
|
||||
svd [some_peripheral_name]
|
||||
|
||||
to see all of the registers (with their values) for a given peripheral. For more details, run
|
||||
|
||||
svd [some_peripheral_name] [some_register_name]
|
||||
|
||||
to see all of the field values with descriptions.
|
||||
|
||||
You can add format modifiers like:
|
||||
|
||||
* `svd/x` will display values in hex
|
||||
* `svd/o` will display values in octal
|
||||
* `svd/t` or `svd/b` will display values in binary
|
||||
* `svd/a` will display values in hex and try to resolve symbols from the values
|
||||
|
||||
All field values are displayed at the correct lengths as provided by the SVD files.
|
||||
Also, tab completion exists for nearly everything! When in doubt, run `svd help`.
|
||||
|
||||
### TODO
|
||||
|
||||
Enable writing to registers and individual fields
|
||||
|
||||
### Bugs
|
||||
|
||||
There are probably a few. All planning, writing, and testing of this was done in an afternoon. There may be
|
||||
some oddities in working with non-STM32 parts. I'll play with this when I start working with other
|
||||
controllers again. If something's giving you trouble, describe the problem and it shall be fixed.
|
||||
|
||||
## DWT
|
||||
The ARM Data Watchpoint and Trace Unit (DWT) offers data watchpoints and a series of gated cycle counters. For now,
|
||||
I only support the raw cycle counter but facilities are in place to make use of others. As this is independent of the
|
||||
specific device under test, commands are simple and you can configure a clock speed to get real time values from
|
||||
counters.
|
||||
|
||||
dwt configclk 48000000
|
||||
|
||||
will set the current core clock speed. Then
|
||||
|
||||
dwt cyccnt reset
|
||||
dwt cyccnt enable
|
||||
|
||||
will reset and start the cycle counter. At any point
|
||||
|
||||
dwt cycnt
|
||||
|
||||
will then indicate the number of cycles and amount of time that has passed.
|
||||
|
||||
## ITM/ETM support
|
||||
|
||||
This is not implemented yet. I want to have more complete support for some of the nicer debug and trace features
|
||||
on Cortex-M processors. Parts of this will probably be dependent on OpenOCD and possibly on specific interfaces.
|
||||
I'll try to avoid this where possible but can't make any promises.
|
@ -1,160 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This file is part of PyCortexMDebug
|
||||
|
||||
PyCortexMDebug is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
PyCortexMDebug is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with PyCortexMDebug. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
|
||||
import gdb
|
||||
import struct
|
||||
|
||||
DWT_CTRL = 0xE0001000
|
||||
DWT_CYCCNT = 0xE0001004
|
||||
DWT_CPICNT = 0xE0001008
|
||||
DWT_EXTCNT = 0xE000100C
|
||||
DWT_SLEEPCNT = 0xE0001010
|
||||
DWT_LSUCNT = 0xE0001014
|
||||
DWT_FOLDCNT = 0xE0001018
|
||||
DWT_PCSR = 0xE000101C
|
||||
|
||||
prefix = "dwt : "
|
||||
|
||||
|
||||
class DWT(gdb.Command):
|
||||
clk = None
|
||||
is_init = False
|
||||
|
||||
def __init__(self):
|
||||
gdb.Command.__init__(self, "dwt", gdb.COMMAND_DATA)
|
||||
|
||||
@staticmethod
|
||||
def read(address, bits=32):
|
||||
"""Read from memory (using print) and return an integer"""
|
||||
value = gdb.selected_inferior().read_memory(address, bits / 8)
|
||||
return struct.unpack_from("<i", value)[0]
|
||||
|
||||
@staticmethod
|
||||
def write(address, value, bits=32):
|
||||
"""Set a value in memory"""
|
||||
gdb.selected_inferior().write_memory(address, bytes(value), bits / 8)
|
||||
|
||||
def invoke(self, args, from_tty):
|
||||
if not self.is_init:
|
||||
self.write(0xE000EDFC, self.read(0xE000EDFC) | (1 << 24))
|
||||
self.write(DWT_CTRL, 0)
|
||||
self.is_init = True
|
||||
|
||||
s = list(map(lambda x: x.lower(), str(args).split(" ")))
|
||||
# Check for empty command
|
||||
if s[0] in ["", "help"]:
|
||||
self.print_help()
|
||||
return ()
|
||||
|
||||
if s[0] == "cyccnt":
|
||||
if len(s) > 1:
|
||||
if s[1][:2] == "en":
|
||||
self.cyccnt_en()
|
||||
elif s[1][0] == "r":
|
||||
self.cyccnt_reset()
|
||||
elif s[1][0] == "d":
|
||||
self.cyccnt_dis()
|
||||
gdb.write(
|
||||
prefix
|
||||
+ "CYCCNT ({}): ".format("ON" if (self.read(DWT_CTRL) & 1) else "OFF")
|
||||
+ self.cycles_str(self.read(DWT_CYCCNT))
|
||||
)
|
||||
elif s[0] == "reset":
|
||||
if len(s) > 1:
|
||||
if s[1] == "cyccnt":
|
||||
self.cyccnt_reset()
|
||||
gdb.write(prefix + "CYCCNT reset\n")
|
||||
if s[1] == "counters":
|
||||
self.cyccnt_reset()
|
||||
gdb.write(prefix + "CYCCNT reset\n")
|
||||
else:
|
||||
self.cyccnt_reset()
|
||||
gdb.write(prefix + "CYCCNT reset\n")
|
||||
else:
|
||||
# Reset everything
|
||||
self.cyccnt_reset()
|
||||
gdb.write(prefix + "CYCCNT reset\n")
|
||||
elif s[0] == "configclk":
|
||||
if len(s) == 2:
|
||||
try:
|
||||
self.clk = float(s[1])
|
||||
except:
|
||||
self.print_help()
|
||||
else:
|
||||
self.print_help()
|
||||
else:
|
||||
# Try to figure out what stupid went on here
|
||||
gdb.write(args)
|
||||
self.print_help()
|
||||
|
||||
@staticmethod
|
||||
def complete(text, word):
|
||||
text = str(text).lower()
|
||||
s = text.split(" ")
|
||||
|
||||
commands = ["configclk", "reset", "cyccnt"]
|
||||
reset_commands = ["counters", "cyccnt"]
|
||||
cyccnt_commands = ["enable", "reset", "disable"]
|
||||
|
||||
if len(s) == 1:
|
||||
return filter(lambda x: x.startswith(s[0]), commands)
|
||||
|
||||
if len(s) == 2:
|
||||
if s[0] == "reset":
|
||||
return filter(lambda x: x.startswith(s[1]), reset_commands)
|
||||
if s[0] == "cyccnt":
|
||||
return filter(lambda x: x.startswith(s[1]), cyccnt_commands)
|
||||
|
||||
def cycles_str(self, cycles):
|
||||
if self.clk:
|
||||
return "%d cycles, %.3es\n" % (cycles, cycles * 1.0 / self.clk)
|
||||
else:
|
||||
return "%d cycles"
|
||||
|
||||
def cyccnt_en(self):
|
||||
self.write(DWT_CTRL, self.read(DWT_CTRL) | 1)
|
||||
|
||||
def cyccnt_dis(self):
|
||||
self.write(DWT_CTRL, self.read(DWT_CTRL) & 0xFFFFFFFE)
|
||||
|
||||
def cyccnt_reset(self, value=0):
|
||||
self.write(DWT_CYCCNT, value)
|
||||
|
||||
def cpicnt_reset(self, value=0):
|
||||
self.write(DWT_CPICNT, value & 0xFF)
|
||||
|
||||
@staticmethod
|
||||
def print_help():
|
||||
gdb.write("Usage:\n")
|
||||
gdb.write("=========\n")
|
||||
gdb.write("dwt:\n")
|
||||
gdb.write("\tList available peripherals\n")
|
||||
gdb.write("dwt configclk [Hz]:\n")
|
||||
gdb.write("\tSet clock for rendering time values in seconds\n")
|
||||
gdb.write("dwt reset:\n")
|
||||
gdb.write("\tReset everything in DWT\n")
|
||||
gdb.write("dwt reset counters:\n")
|
||||
gdb.write("\tReset all DWT counters\n")
|
||||
gdb.write("dwt cyccnt\n")
|
||||
gdb.write("\tDisplay the cycle count\n")
|
||||
gdb.write("\td(default):decimal, x: hex, o: octal, b: binary\n")
|
||||
return
|
||||
|
||||
|
||||
# Registers our class to GDB when sourced:
|
||||
DWT()
|
@ -1,586 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"Makes working with XML feel like you are working with JSON"
|
||||
|
||||
try:
|
||||
from defusedexpat import pyexpat as expat
|
||||
except ImportError:
|
||||
from xml.parsers import expat
|
||||
|
||||
from xml.sax.saxutils import XMLGenerator
|
||||
from xml.sax.xmlreader import AttributesImpl
|
||||
|
||||
try: # pragma no cover
|
||||
from cStringIO import StringIO
|
||||
except ImportError: # pragma no cover
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
from inspect import isgenerator
|
||||
|
||||
|
||||
class ObjectDict(dict):
|
||||
def __getattr__(self, name):
|
||||
if name in self:
|
||||
return self[name]
|
||||
else:
|
||||
raise AttributeError("No such attribute: " + name)
|
||||
|
||||
|
||||
try: # pragma no cover
|
||||
_basestring = basestring
|
||||
except NameError: # pragma no cover
|
||||
_basestring = str
|
||||
try: # pragma no cover
|
||||
_unicode = unicode
|
||||
except NameError: # pragma no cover
|
||||
_unicode = str
|
||||
|
||||
__author__ = "Martin Blech"
|
||||
__version__ = "0.12.0"
|
||||
__license__ = "MIT"
|
||||
|
||||
|
||||
class ParsingInterrupted(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _DictSAXHandler(object):
|
||||
def __init__(
|
||||
self,
|
||||
item_depth=0,
|
||||
item_callback=lambda *args: True,
|
||||
xml_attribs=True,
|
||||
attr_prefix="@",
|
||||
cdata_key="#text",
|
||||
force_cdata=False,
|
||||
cdata_separator="",
|
||||
postprocessor=None,
|
||||
dict_constructor=ObjectDict,
|
||||
strip_whitespace=True,
|
||||
namespace_separator=":",
|
||||
namespaces=None,
|
||||
force_list=None,
|
||||
comment_key="#comment",
|
||||
):
|
||||
self.path = []
|
||||
self.stack = []
|
||||
self.data = []
|
||||
self.item = None
|
||||
self.item_depth = item_depth
|
||||
self.xml_attribs = xml_attribs
|
||||
self.item_callback = item_callback
|
||||
self.attr_prefix = attr_prefix
|
||||
self.cdata_key = cdata_key
|
||||
self.force_cdata = force_cdata
|
||||
self.cdata_separator = cdata_separator
|
||||
self.postprocessor = postprocessor
|
||||
self.dict_constructor = dict_constructor
|
||||
self.strip_whitespace = strip_whitespace
|
||||
self.namespace_separator = namespace_separator
|
||||
self.namespaces = namespaces
|
||||
self.namespace_declarations = ObjectDict()
|
||||
self.force_list = force_list
|
||||
self.comment_key = comment_key
|
||||
|
||||
def _build_name(self, full_name):
|
||||
if self.namespaces is None:
|
||||
return full_name
|
||||
i = full_name.rfind(self.namespace_separator)
|
||||
if i == -1:
|
||||
return full_name
|
||||
namespace, name = full_name[:i], full_name[i + 1 :]
|
||||
try:
|
||||
short_namespace = self.namespaces[namespace]
|
||||
except KeyError:
|
||||
short_namespace = namespace
|
||||
if not short_namespace:
|
||||
return name
|
||||
else:
|
||||
return self.namespace_separator.join((short_namespace, name))
|
||||
|
||||
def _attrs_to_dict(self, attrs):
|
||||
if isinstance(attrs, dict):
|
||||
return attrs
|
||||
return self.dict_constructor(zip(attrs[0::2], attrs[1::2]))
|
||||
|
||||
def startNamespaceDecl(self, prefix, uri):
|
||||
self.namespace_declarations[prefix or ""] = uri
|
||||
|
||||
def startElement(self, full_name, attrs):
|
||||
name = self._build_name(full_name)
|
||||
attrs = self._attrs_to_dict(attrs)
|
||||
if attrs and self.namespace_declarations:
|
||||
attrs["xmlns"] = self.namespace_declarations
|
||||
self.namespace_declarations = ObjectDict()
|
||||
self.path.append((name, attrs or None))
|
||||
if len(self.path) > self.item_depth:
|
||||
self.stack.append((self.item, self.data))
|
||||
if self.xml_attribs:
|
||||
attr_entries = []
|
||||
for key, value in attrs.items():
|
||||
key = self.attr_prefix + self._build_name(key)
|
||||
if self.postprocessor:
|
||||
entry = self.postprocessor(self.path, key, value)
|
||||
else:
|
||||
entry = (key, value)
|
||||
if entry:
|
||||
attr_entries.append(entry)
|
||||
attrs = self.dict_constructor(attr_entries)
|
||||
else:
|
||||
attrs = None
|
||||
self.item = attrs or None
|
||||
self.data = []
|
||||
|
||||
def endElement(self, full_name):
|
||||
name = self._build_name(full_name)
|
||||
if len(self.path) == self.item_depth:
|
||||
item = self.item
|
||||
if item is None:
|
||||
item = None if not self.data else self.cdata_separator.join(self.data)
|
||||
|
||||
should_continue = self.item_callback(self.path, item)
|
||||
if not should_continue:
|
||||
raise ParsingInterrupted()
|
||||
if len(self.stack):
|
||||
data = None if not self.data else self.cdata_separator.join(self.data)
|
||||
item = self.item
|
||||
self.item, self.data = self.stack.pop()
|
||||
if self.strip_whitespace and data:
|
||||
data = data.strip() or None
|
||||
if data and self.force_cdata and item is None:
|
||||
item = self.dict_constructor()
|
||||
if item is not None:
|
||||
if data:
|
||||
self.push_data(item, self.cdata_key, data)
|
||||
self.item = self.push_data(self.item, name, item)
|
||||
else:
|
||||
self.item = self.push_data(self.item, name, data)
|
||||
else:
|
||||
self.item = None
|
||||
self.data = []
|
||||
self.path.pop()
|
||||
|
||||
def characters(self, data):
|
||||
if not self.data:
|
||||
self.data = [data]
|
||||
else:
|
||||
self.data.append(data)
|
||||
|
||||
def comments(self, data):
|
||||
if self.strip_whitespace:
|
||||
data = data.strip()
|
||||
self.item = self.push_data(self.item, self.comment_key, data)
|
||||
|
||||
def push_data(self, item, key, data):
|
||||
if self.postprocessor is not None:
|
||||
result = self.postprocessor(self.path, key, data)
|
||||
if result is None:
|
||||
return item
|
||||
key, data = result
|
||||
if item is None:
|
||||
item = self.dict_constructor()
|
||||
try:
|
||||
value = item[key]
|
||||
if isinstance(value, list):
|
||||
value.append(data)
|
||||
else:
|
||||
item[key] = [value, data]
|
||||
except KeyError:
|
||||
if self._should_force_list(key, data):
|
||||
item[key] = [data]
|
||||
else:
|
||||
item[key] = data
|
||||
return item
|
||||
|
||||
def _should_force_list(self, key, value):
|
||||
if not self.force_list:
|
||||
return False
|
||||
if isinstance(self.force_list, bool):
|
||||
return self.force_list
|
||||
try:
|
||||
return key in self.force_list
|
||||
except TypeError:
|
||||
return self.force_list(self.path[:-1], key, value)
|
||||
|
||||
|
||||
def parse(
|
||||
xml_input,
|
||||
encoding=None,
|
||||
expat=expat,
|
||||
process_namespaces=False,
|
||||
namespace_separator=":",
|
||||
disable_entities=True,
|
||||
process_comments=False,
|
||||
**kwargs
|
||||
):
|
||||
"""Parse the given XML input and convert it into a dictionary.
|
||||
|
||||
`xml_input` can either be a `string`, a file-like object, or a generator of strings.
|
||||
|
||||
If `xml_attribs` is `True`, element attributes are put in the dictionary
|
||||
among regular child elements, using `@` as a prefix to avoid collisions. If
|
||||
set to `False`, they are just ignored.
|
||||
|
||||
Simple example::
|
||||
|
||||
>>> import xmltodict
|
||||
>>> doc = xmltodict.parse(\"\"\"
|
||||
... <a prop="x">
|
||||
... <b>1</b>
|
||||
... <b>2</b>
|
||||
... </a>
|
||||
... \"\"\")
|
||||
>>> doc['a']['@prop']
|
||||
u'x'
|
||||
>>> doc['a']['b']
|
||||
[u'1', u'2']
|
||||
|
||||
If `item_depth` is `0`, the function returns a dictionary for the root
|
||||
element (default behavior). Otherwise, it calls `item_callback` every time
|
||||
an item at the specified depth is found and returns `None` in the end
|
||||
(streaming mode).
|
||||
|
||||
The callback function receives two parameters: the `path` from the document
|
||||
root to the item (name-attribs pairs), and the `item` (dict). If the
|
||||
callback's return value is false-ish, parsing will be stopped with the
|
||||
:class:`ParsingInterrupted` exception.
|
||||
|
||||
Streaming example::
|
||||
|
||||
>>> def handle(path, item):
|
||||
... print('path:%s item:%s' % (path, item))
|
||||
... return True
|
||||
...
|
||||
>>> xmltodict.parse(\"\"\"
|
||||
... <a prop="x">
|
||||
... <b>1</b>
|
||||
... <b>2</b>
|
||||
... </a>\"\"\", item_depth=2, item_callback=handle)
|
||||
path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:1
|
||||
path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:2
|
||||
|
||||
The optional argument `postprocessor` is a function that takes `path`,
|
||||
`key` and `value` as positional arguments and returns a new `(key, value)`
|
||||
pair where both `key` and `value` may have changed. Usage example::
|
||||
|
||||
>>> def postprocessor(path, key, value):
|
||||
... try:
|
||||
... return key + ':int', int(value)
|
||||
... except (ValueError, TypeError):
|
||||
... return key, value
|
||||
>>> xmltodict.parse('<a><b>1</b><b>2</b><b>x</b></a>',
|
||||
... postprocessor=postprocessor)
|
||||
ObjectDict([(u'a', ObjectDict([(u'b:int', [1, 2]), (u'b', u'x')]))])
|
||||
|
||||
You can pass an alternate version of `expat` (such as `defusedexpat`) by
|
||||
using the `expat` parameter. E.g:
|
||||
|
||||
>>> import defusedexpat
|
||||
>>> xmltodict.parse('<a>hello</a>', expat=defusedexpat.pyexpat)
|
||||
ObjectDict([(u'a', u'hello')])
|
||||
|
||||
You can use the force_list argument to force lists to be created even
|
||||
when there is only a single child of a given level of hierarchy. The
|
||||
force_list argument is a tuple of keys. If the key for a given level
|
||||
of hierarchy is in the force_list argument, that level of hierarchy
|
||||
will have a list as a child (even if there is only one sub-element).
|
||||
The index_keys operation takes precedence over this. This is applied
|
||||
after any user-supplied postprocessor has already run.
|
||||
|
||||
For example, given this input:
|
||||
<servers>
|
||||
<server>
|
||||
<name>host1</name>
|
||||
<os>Linux</os>
|
||||
<interfaces>
|
||||
<interface>
|
||||
<name>em0</name>
|
||||
<ip_address>10.0.0.1</ip_address>
|
||||
</interface>
|
||||
</interfaces>
|
||||
</server>
|
||||
</servers>
|
||||
|
||||
If called with force_list=('interface',), it will produce
|
||||
this dictionary:
|
||||
{'servers':
|
||||
{'server':
|
||||
{'name': 'host1',
|
||||
'os': 'Linux'},
|
||||
'interfaces':
|
||||
{'interface':
|
||||
[ {'name': 'em0', 'ip_address': '10.0.0.1' } ] } } }
|
||||
|
||||
`force_list` can also be a callable that receives `path`, `key` and
|
||||
`value`. This is helpful in cases where the logic that decides whether
|
||||
a list should be forced is more complex.
|
||||
|
||||
|
||||
If `process_comment` is `True` then comment will be added with comment_key
|
||||
(default=`'#comment'`) to then tag which contains comment
|
||||
|
||||
For example, given this input:
|
||||
<a>
|
||||
<b>
|
||||
<!-- b comment -->
|
||||
<c>
|
||||
<!-- c comment -->
|
||||
1
|
||||
</c>
|
||||
<d>2</d>
|
||||
</b>
|
||||
</a>
|
||||
|
||||
If called with process_comment=True, it will produce
|
||||
this dictionary:
|
||||
'a': {
|
||||
'b': {
|
||||
'#comment': 'b comment',
|
||||
'c': {
|
||||
|
||||
'#comment': 'c comment',
|
||||
'#text': '1',
|
||||
},
|
||||
'd': '2',
|
||||
},
|
||||
}
|
||||
"""
|
||||
handler = _DictSAXHandler(namespace_separator=namespace_separator, **kwargs)
|
||||
if isinstance(xml_input, _unicode):
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
xml_input = xml_input.encode(encoding)
|
||||
if not process_namespaces:
|
||||
namespace_separator = None
|
||||
parser = expat.ParserCreate(encoding, namespace_separator)
|
||||
try:
|
||||
parser.ordered_attributes = True
|
||||
except AttributeError:
|
||||
# Jython's expat does not support ordered_attributes
|
||||
pass
|
||||
parser.StartNamespaceDeclHandler = handler.startNamespaceDecl
|
||||
parser.StartElementHandler = handler.startElement
|
||||
parser.EndElementHandler = handler.endElement
|
||||
parser.CharacterDataHandler = handler.characters
|
||||
if process_comments:
|
||||
parser.CommentHandler = handler.comments
|
||||
parser.buffer_text = True
|
||||
if disable_entities:
|
||||
try:
|
||||
# Attempt to disable DTD in Jython's expat parser (Xerces-J).
|
||||
feature = "http://apache.org/xml/features/disallow-doctype-decl"
|
||||
parser._reader.setFeature(feature, True)
|
||||
except AttributeError:
|
||||
# For CPython / expat parser.
|
||||
# Anything not handled ends up here and entities aren't expanded.
|
||||
parser.DefaultHandler = lambda x: None
|
||||
# Expects an integer return; zero means failure -> expat.ExpatError.
|
||||
parser.ExternalEntityRefHandler = lambda *x: 1
|
||||
if hasattr(xml_input, "read"):
|
||||
parser.ParseFile(xml_input)
|
||||
elif isgenerator(xml_input):
|
||||
for chunk in xml_input:
|
||||
parser.Parse(chunk, False)
|
||||
parser.Parse(b"", True)
|
||||
else:
|
||||
parser.Parse(xml_input, True)
|
||||
return handler.item
|
||||
|
||||
|
||||
def _process_namespace(name, namespaces, ns_sep=":", attr_prefix="@"):
|
||||
if not namespaces:
|
||||
return name
|
||||
try:
|
||||
ns, name = name.rsplit(ns_sep, 1)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
ns_res = namespaces.get(ns.strip(attr_prefix))
|
||||
name = (
|
||||
"{}{}{}{}".format(
|
||||
attr_prefix if ns.startswith(attr_prefix) else "", ns_res, ns_sep, name
|
||||
)
|
||||
if ns_res
|
||||
else name
|
||||
)
|
||||
return name
|
||||
|
||||
|
||||
def _emit(
|
||||
key,
|
||||
value,
|
||||
content_handler,
|
||||
attr_prefix="@",
|
||||
cdata_key="#text",
|
||||
depth=0,
|
||||
preprocessor=None,
|
||||
pretty=False,
|
||||
newl="\n",
|
||||
indent="\t",
|
||||
namespace_separator=":",
|
||||
namespaces=None,
|
||||
full_document=True,
|
||||
expand_iter=None,
|
||||
):
|
||||
key = _process_namespace(key, namespaces, namespace_separator, attr_prefix)
|
||||
if preprocessor is not None:
|
||||
result = preprocessor(key, value)
|
||||
if result is None:
|
||||
return
|
||||
key, value = result
|
||||
if (
|
||||
not hasattr(value, "__iter__")
|
||||
or isinstance(value, _basestring)
|
||||
or isinstance(value, dict)
|
||||
):
|
||||
value = [value]
|
||||
for index, v in enumerate(value):
|
||||
if full_document and depth == 0 and index > 0:
|
||||
raise ValueError("document with multiple roots")
|
||||
if v is None:
|
||||
v = ObjectDict()
|
||||
elif isinstance(v, bool):
|
||||
if v:
|
||||
v = _unicode("true")
|
||||
else:
|
||||
v = _unicode("false")
|
||||
elif not isinstance(v, dict):
|
||||
if (
|
||||
expand_iter
|
||||
and hasattr(v, "__iter__")
|
||||
and not isinstance(v, _basestring)
|
||||
):
|
||||
v = ObjectDict(((expand_iter, v),))
|
||||
else:
|
||||
v = _unicode(v)
|
||||
if isinstance(v, _basestring):
|
||||
v = ObjectDict(((cdata_key, v),))
|
||||
cdata = None
|
||||
attrs = ObjectDict()
|
||||
children = []
|
||||
for ik, iv in v.items():
|
||||
if ik == cdata_key:
|
||||
cdata = iv
|
||||
continue
|
||||
if ik.startswith(attr_prefix):
|
||||
ik = _process_namespace(
|
||||
ik, namespaces, namespace_separator, attr_prefix
|
||||
)
|
||||
if ik == "@xmlns" and isinstance(iv, dict):
|
||||
for k, v in iv.items():
|
||||
attr = "xmlns{}".format(":{}".format(k) if k else "")
|
||||
attrs[attr] = _unicode(v)
|
||||
continue
|
||||
if not isinstance(iv, _unicode):
|
||||
iv = _unicode(iv)
|
||||
attrs[ik[len(attr_prefix) :]] = iv
|
||||
continue
|
||||
children.append((ik, iv))
|
||||
if pretty:
|
||||
content_handler.ignorableWhitespace(depth * indent)
|
||||
content_handler.startElement(key, AttributesImpl(attrs))
|
||||
if pretty and children:
|
||||
content_handler.ignorableWhitespace(newl)
|
||||
for child_key, child_value in children:
|
||||
_emit(
|
||||
child_key,
|
||||
child_value,
|
||||
content_handler,
|
||||
attr_prefix,
|
||||
cdata_key,
|
||||
depth + 1,
|
||||
preprocessor,
|
||||
pretty,
|
||||
newl,
|
||||
indent,
|
||||
namespaces=namespaces,
|
||||
namespace_separator=namespace_separator,
|
||||
expand_iter=expand_iter,
|
||||
)
|
||||
if cdata is not None:
|
||||
content_handler.characters(cdata)
|
||||
if pretty and children:
|
||||
content_handler.ignorableWhitespace(depth * indent)
|
||||
content_handler.endElement(key)
|
||||
if pretty and depth:
|
||||
content_handler.ignorableWhitespace(newl)
|
||||
|
||||
|
||||
def unparse(
|
||||
input_dict,
|
||||
output=None,
|
||||
encoding="utf-8",
|
||||
full_document=True,
|
||||
short_empty_elements=False,
|
||||
**kwargs
|
||||
):
|
||||
"""Emit an XML document for the given `input_dict` (reverse of `parse`).
|
||||
|
||||
The resulting XML document is returned as a string, but if `output` (a
|
||||
file-like object) is specified, it is written there instead.
|
||||
|
||||
Dictionary keys prefixed with `attr_prefix` (default=`'@'`) are interpreted
|
||||
as XML node attributes, whereas keys equal to `cdata_key`
|
||||
(default=`'#text'`) are treated as character data.
|
||||
|
||||
The `pretty` parameter (default=`False`) enables pretty-printing. In this
|
||||
mode, lines are terminated with `'\n'` and indented with `'\t'`, but this
|
||||
can be customized with the `newl` and `indent` parameters.
|
||||
|
||||
"""
|
||||
if full_document and len(input_dict) != 1:
|
||||
raise ValueError("Document must have exactly one root.")
|
||||
must_return = False
|
||||
if output is None:
|
||||
output = StringIO()
|
||||
must_return = True
|
||||
if short_empty_elements:
|
||||
content_handler = XMLGenerator(output, encoding, True)
|
||||
else:
|
||||
content_handler = XMLGenerator(output, encoding)
|
||||
if full_document:
|
||||
content_handler.startDocument()
|
||||
for key, value in input_dict.items():
|
||||
_emit(key, value, content_handler, full_document=full_document, **kwargs)
|
||||
if full_document:
|
||||
content_handler.endDocument()
|
||||
if must_return:
|
||||
value = output.getvalue()
|
||||
try: # pragma no cover
|
||||
value = value.decode(encoding)
|
||||
except AttributeError: # pragma no cover
|
||||
pass
|
||||
return value
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
import sys
|
||||
import marshal
|
||||
|
||||
try:
|
||||
stdin = sys.stdin.buffer
|
||||
stdout = sys.stdout.buffer
|
||||
except AttributeError:
|
||||
stdin = sys.stdin
|
||||
stdout = sys.stdout
|
||||
|
||||
(item_depth,) = sys.argv[1:]
|
||||
item_depth = int(item_depth)
|
||||
|
||||
def handle_item(path, item):
|
||||
marshal.dump((path, item), stdout)
|
||||
return True
|
||||
|
||||
try:
|
||||
root = parse(
|
||||
stdin,
|
||||
item_depth=item_depth,
|
||||
item_callback=handle_item,
|
||||
dict_constructor=dict,
|
||||
)
|
||||
if item_depth == 0:
|
||||
handle_item([], root)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
@ -29,7 +29,6 @@ from FreeRTOSgdb.GDBCommands import ShowQueueInfo
|
||||
|
||||
class Scheduler:
|
||||
def __init__(self):
|
||||
|
||||
self._blocked = ListInspector("xSuspendedTaskList")
|
||||
self._delayed1 = ListInspector("xDelayedTaskList1")
|
||||
self._delayed2 = ListInspector("xDelayedTaskList2")
|
@ -61,7 +61,6 @@ class ShowQueueInfo(gdb.Command):
|
||||
if maxCount == 0:
|
||||
print(outputFmt % (q.GetName(), q.GetQueueMessagesWaiting(), "", ""))
|
||||
else:
|
||||
|
||||
for i in range(0, maxCount):
|
||||
txName = ""
|
||||
if i < len(sendList):
|
@ -48,7 +48,6 @@ class HandleRegistry:
|
||||
print("%d: %3s %16s" % (i, h, name))
|
||||
|
||||
def FilterBy(self, qMode):
|
||||
|
||||
"""Retrieve a List of Mutex Queue Handles"""
|
||||
resp = []
|
||||
for i in range(self._minIndex, self._maxIndex):
|
@ -56,7 +56,6 @@ class ListInspector:
|
||||
of some of the TCB Task lists.
|
||||
"""
|
||||
if self._list != None:
|
||||
|
||||
CastType = None
|
||||
if CastTypeStr != None:
|
||||
if type(CastTypeStr) == str:
|
||||
@ -73,7 +72,6 @@ class ListInspector:
|
||||
index = self._list["pxIndex"]
|
||||
|
||||
if numElems > 0 and numElems < 200:
|
||||
|
||||
if startElem == 0:
|
||||
curr = index
|
||||
else:
|
@ -47,7 +47,6 @@ QueueMode.Map = QueueMap
|
||||
|
||||
|
||||
class QueueInspector:
|
||||
|
||||
QueueType = gdb.lookup_type("Queue_t")
|
||||
|
||||
def __init__(self, handle):
|
@ -11,7 +11,6 @@ import gdb
|
||||
|
||||
|
||||
class TaskInspector:
|
||||
|
||||
TCBType = gdb.lookup_type("TCB_t")
|
||||
|
||||
def __init__(self, handle):
|
@ -28,7 +28,5 @@ directory = path.abspath(directory)
|
||||
sys.path.append(directory)
|
||||
|
||||
from cmdebug.svd_gdb import LoadSVD
|
||||
from cmdebug.dwt_gdb import DWT
|
||||
|
||||
DWT()
|
||||
LoadSVD()
|
35
scripts/debug/PyCortexMDebug/README.md
Normal file
35
scripts/debug/PyCortexMDebug/README.md
Normal file
@ -0,0 +1,35 @@
|
||||
PyCortexMDebug
|
||||
==============
|
||||
|
||||
## SVD
|
||||
|
||||
ARM defines an SVD (System View Description) file format in its CMSIS standard as a means for Cortex-M-based chip manufacturers to provide a common description of peripherals, registers, and register fields. You can download SVD files for different manufacturers [here](http://www.arm.com/products/processors/cortex-m/cortex-microcontroller-software-interface-standard.php).
|
||||
|
||||
The implementation consists of two components -- An lxml-based parser module (pysvd) and a GDB file (gdb_svd). I haven't yet worked out a perfect workflow for this, though it's quite easy to use when you already tend to have a GDB initialization file for starting up OpenOCD and the like. However your workflow works, just make sure to, in GDB:
|
||||
|
||||
source gdb_svd.py
|
||||
svd_load [your_svd_file].svd
|
||||
|
||||
These files can be huge so it might take a second or two. Anyways, after that, you can do
|
||||
|
||||
svd
|
||||
|
||||
to list available peripherals with descriptions. Or you can do
|
||||
|
||||
svd [some_peripheral_name]
|
||||
|
||||
to see all of the registers (with their values) for a given peripheral. For more details, run
|
||||
|
||||
svd [some_peripheral_name] [some_register_name]
|
||||
|
||||
to see all of the field values with descriptions.
|
||||
|
||||
You can add format modifiers like:
|
||||
|
||||
* `svd/x` will display values in hex
|
||||
* `svd/o` will display values in octal
|
||||
* `svd/t` or `svd/b` will display values in binary
|
||||
* `svd/a` will display values in hex and try to resolve symbols from the values
|
||||
|
||||
All field values are displayed at the correct lengths as provided by the SVD files.
|
||||
Also, tab completion exists for nearly everything! When in doubt, run `svd help`.
|
@ -16,15 +16,14 @@ You should have received a copy of the GNU General Public License
|
||||
along with PyCortexMDebug. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
|
||||
from collections import OrderedDict
|
||||
from . import x2d
|
||||
|
||||
import traceback
|
||||
import warnings
|
||||
import pickle
|
||||
import lxml.objectify as objectify
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
import os
|
||||
import pickle
|
||||
import traceback
|
||||
import re
|
||||
import warnings
|
||||
|
||||
|
||||
class SmartDict:
|
||||
@ -127,31 +126,26 @@ class SVDFile:
|
||||
|
||||
def __init__(self, fname):
|
||||
"""
|
||||
|
||||
Args:
|
||||
fname: Filename for the SVD file
|
||||
"""
|
||||
f = objectify.parse(os.path.expanduser(fname))
|
||||
root = f.getroot()
|
||||
periph = root.peripherals.getchildren()
|
||||
self.peripherals = SmartDict()
|
||||
self.base_address = 0
|
||||
|
||||
xml_file_name = os.path.expanduser(fname)
|
||||
pickle_file_name = xml_file_name + ".pickle"
|
||||
root = None
|
||||
if os.path.exists(pickle_file_name):
|
||||
print("Loading pickled SVD")
|
||||
root = pickle.load(open(pickle_file_name, "rb"))
|
||||
else:
|
||||
print("Loading XML SVD and pickling it")
|
||||
root = x2d.parse(open(xml_file_name, "rb"))
|
||||
pickle.dump(root, open(pickle_file_name, "wb"), pickle.HIGHEST_PROTOCOL)
|
||||
print("Processing SVD tree")
|
||||
# XML elements
|
||||
for p in root["device"]["peripherals"]["peripheral"]:
|
||||
for p in periph:
|
||||
try:
|
||||
self.peripherals[p["name"]] = SVDPeripheral(p, self)
|
||||
if p.tag == "peripheral":
|
||||
self.peripherals[str(p.name)] = SVDPeripheral(p, self)
|
||||
else:
|
||||
# This is some other tag
|
||||
pass
|
||||
except SVDNonFatalError as e:
|
||||
# print(e)
|
||||
pass
|
||||
print("SVD Ready")
|
||||
print(e)
|
||||
|
||||
|
||||
def add_register(parent, node):
|
||||
@ -271,11 +265,11 @@ class SVDPeripheral:
|
||||
self.parent_base_address = parent.base_address
|
||||
|
||||
# Look for a base address, as it is required
|
||||
if "baseAddress" not in svd_elem:
|
||||
if not hasattr(svd_elem, "baseAddress"):
|
||||
raise SVDNonFatalError("Periph without base address")
|
||||
self.base_address = int(str(svd_elem.baseAddress), 0)
|
||||
if "@derivedFrom" in svd_elem:
|
||||
derived_from = svd_elem["@derivedFrom"]
|
||||
if "derivedFrom" in svd_elem.attrib:
|
||||
derived_from = svd_elem.attrib["derivedFrom"]
|
||||
try:
|
||||
self.name = str(svd_elem.name)
|
||||
except AttributeError:
|
||||
@ -301,14 +295,16 @@ class SVDPeripheral:
|
||||
self.clusters = SmartDict()
|
||||
|
||||
if hasattr(svd_elem, "registers"):
|
||||
if "register" in svd_elem.registers:
|
||||
for r in svd_elem.registers.register:
|
||||
if isinstance(r, x2d.ObjectDict):
|
||||
add_register(self, r)
|
||||
if "cluster" in svd_elem.registers:
|
||||
for c in svd_elem.registers.cluster:
|
||||
if isinstance(c, x2d.ObjectDict):
|
||||
add_cluster(self, c)
|
||||
registers = [
|
||||
r
|
||||
for r in svd_elem.registers.getchildren()
|
||||
if r.tag in ["cluster", "register"]
|
||||
]
|
||||
for r in registers:
|
||||
if r.tag == "cluster":
|
||||
add_cluster(self, r)
|
||||
elif r.tag == "register":
|
||||
add_register(self, r)
|
||||
|
||||
def refactor_parent(self, parent):
|
||||
self.parent_base_address = parent.base_address
|
||||
@ -342,11 +338,11 @@ class SVDPeripheralRegister:
|
||||
else:
|
||||
self.size = 0x20
|
||||
self.fields = SmartDict()
|
||||
if "fields" in svd_elem:
|
||||
if hasattr(svd_elem, "fields"):
|
||||
# Filter fields to only consider those of tag "field"
|
||||
for f in svd_elem.fields.field:
|
||||
if isinstance(f, x2d.ObjectDict):
|
||||
self.fields[str(f.name)] = SVDPeripheralRegisterField(f, self)
|
||||
fields = [f for f in svd_elem.fields.getchildren() if f.tag == "field"]
|
||||
for f in fields:
|
||||
self.fields[str(f.name)] = SVDPeripheralRegisterField(f, self)
|
||||
|
||||
def refactor_parent(self, parent):
|
||||
self.parent_base_address = parent.base_address
|
@ -18,7 +18,7 @@ def GetDevices(env):
|
||||
def generate(env, **kw):
|
||||
env.AddMethod(GetDevices)
|
||||
env.SetDefault(
|
||||
FBT_DEBUG_DIR="${ROOT_DIR}/debug",
|
||||
FBT_DEBUG_DIR="${FBT_SCRIPT_DIR}/debug",
|
||||
)
|
||||
|
||||
if (adapter_serial := env.subst("$OPENOCD_ADAPTER_SERIAL")) != "auto":
|
||||
|
@ -170,7 +170,6 @@ class Main(App):
|
||||
"update.dir",
|
||||
"sdk_headers.dir",
|
||||
"lib.dir",
|
||||
"debug.dir",
|
||||
"scripts.dir",
|
||||
)
|
||||
|
||||
|
@ -186,6 +186,33 @@ dist_env.PhonyTarget(
|
||||
FBT_FAP_DEBUG_ELF_ROOT=path_as_posix(dist_env.subst("$FBT_FAP_DEBUG_ELF_ROOT")),
|
||||
)
|
||||
|
||||
# Debug alien elf
|
||||
debug_other_opts = [
|
||||
"-ex",
|
||||
"source ${FBT_DEBUG_DIR}/PyCortexMDebug/PyCortexMDebug.py",
|
||||
"-ex",
|
||||
"source ${FBT_DEBUG_DIR}/flipperversion.py",
|
||||
"-ex",
|
||||
"fw-version",
|
||||
]
|
||||
|
||||
dist_env.PhonyTarget(
|
||||
"debug_other",
|
||||
"${GDBPYCOM}",
|
||||
GDBOPTS="${GDBOPTS_BASE}",
|
||||
GDBREMOTE="${OPENOCD_GDB_PIPE}",
|
||||
GDBPYOPTS=debug_other_opts,
|
||||
)
|
||||
|
||||
dist_env.PhonyTarget(
|
||||
"debug_other_blackmagic",
|
||||
"${GDBPYCOM}",
|
||||
GDBOPTS="${GDBOPTS_BASE} ${GDBOPTS_BLACKMAGIC}",
|
||||
GDBREMOTE="${BLACKMAGIC_ADDR}",
|
||||
GDBPYOPTS=debug_other_opts,
|
||||
)
|
||||
|
||||
|
||||
dist_env.PhonyTarget(
|
||||
"flash_blackmagic",
|
||||
"$GDB $GDBOPTS $SOURCES $GDBFLASH",
|
||||
|
@ -78,10 +78,8 @@ def generate(env, **kw):
|
||||
env.SetDefault(
|
||||
# Paths
|
||||
SDK_DEFINITION=env.File(sdk_data["sdk_symbols"]),
|
||||
FBT_DEBUG_DIR=pathlib.Path(
|
||||
sdk_current_sdk_dir_node.Dir(sdk_components["debug.dir"]).abspath
|
||||
).as_posix(),
|
||||
FBT_SCRIPT_DIR=scripts_dir,
|
||||
FBT_DEBUG_DIR=scripts_dir.Dir("debug"),
|
||||
LIBPATH=sdk_current_sdk_dir_node.Dir(sdk_components["lib.dir"]),
|
||||
FW_ELF=sdk_current_sdk_dir_node.File(sdk_components["firmware.elf"]),
|
||||
FW_BIN=sdk_current_sdk_dir_node.File(sdk_components["full.bin"]),
|
||||
|
Loading…
Reference in New Issue
Block a user