forked from zephyrproject-rtos/zephyr
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
scripts: edts: Add extended DTS database
The Extended Device Tree Specification database collates device tree (dts) information with information taken from the device tree bindings. The EDTS database may be loaded from a json file, stored to a json file or extracted from the DTS files and the bindings yaml files. The commit integrates database development done in zephyrproject-rtos#9876 which was based on zephyrproject-rtos#6762. Major differences/ improvements to zephyrproject-rtos#9876 are: - the database now has an own extraction function that can be used instead of e.g. extract_dts_includes. The extraction function follows the design of the extract_dts_includes script and the additions that were done in zephyrproject-rtos#9876. It is restructured and several globals are now classes and objects. All functionality of extract_dts_includes related to the generation of defines is not part of the database extract function. It's sole purpose is to fill the database directly from the compiled DTS file. - the database got itś own directory 'edtsdb' to structure all files related to the database. - The EDTSDevice class from zephyrproject-rtos#9876 was enhanced to allow devices to access the database they are taken from. Mayor compatibility issues to zephyrproject-rtos#9876. - the consumer, provider API and the internal structure of the databse is copied from zephyrproject-rtos#9876. API should be fully compatible. - the EDTSDevice class is copied from zephyrproject-rtos#9876. The device API should be fully compatible except for the unique id feature. Device name can be used instead. Signed off from zephyrproject-rtos#9876 added to attribute for the changes done there and copied. Signed-off-by: Erwan Gouriou <erwan.gouriou@linaro.org> Signed-off-by: Kumar Gala <kumar.gala@linaro.org> Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
- Loading branch information
Showing
23 changed files
with
2,781 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
#!/usr/bin/env python3 | ||
# | ||
# Copyright (c) 2018 Bobby Noelte | ||
# Copyright (c) 2018 Linaro Limited | ||
# | ||
# SPDX-License-Identifier: Apache-2.0 | ||
# | ||
|
||
from pathlib import Path | ||
import argparse | ||
import pprint | ||
|
||
from .edtsdb.database import EDTSDb | ||
from .edtsdb.device import EDTSDevice | ||
|
||
## | ||
# @brief Extended DTS database | ||
# | ||
# Database schema: | ||
# | ||
# _edts dict( | ||
# 'aliases': dict(alias) : sorted list(device-id)), | ||
# 'chosen': dict(chosen), | ||
# 'devices': dict(device-id : device-struct), | ||
# 'compatibles': dict(compatible : sorted list(device-id)), | ||
# 'device-types': dict(device-type : sorted list(compatible)), | ||
# ... | ||
# ) | ||
# | ||
# device-struct dict( | ||
# 'device-id' : device-id, | ||
# 'compatible' : list(compatible) or compatible, | ||
# 'label' : label, | ||
# property-name : property-value ... | ||
# ) | ||
# | ||
# Database types: | ||
# | ||
# device-id: opaque id for a device (do not use for other purposes), | ||
# compatible: any of ['st,stm32-spi-fifo', ...] - 'compatibe' from <binding>.yaml | ||
# label: any of ['UART_0', 'SPI_11', ...] - label directive from DTS | ||
# | ||
class EDTSDatabase(EDTSDb): | ||
|
||
def __init__(self, *args, **kw): | ||
super().__init__(*args, **kw) | ||
|
||
def main(self): | ||
args = self.parse_arguments() | ||
edts_file = Path(args.edts[0]) | ||
if not edts_file.is_file(): | ||
raise Exception( | ||
"Generated extended device tree database file '{}' not found/ no access.". | ||
format(edts_file)) | ||
self.load(str(edts_file)) | ||
|
||
pprint.pprint(dict(self._edts)) | ||
|
||
return 0 | ||
|
||
|
||
if __name__ == '__main__': | ||
EDTSDatabase().main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,208 @@ | ||
#!/usr/bin/env python3 | ||
# | ||
# Copyright (c) 2017, Linaro Limited | ||
# Copyright (c) 2018, Bobby Noelte | ||
# | ||
# SPDX-License-Identifier: Apache-2.0 | ||
# | ||
|
||
import os, fnmatch | ||
import re | ||
import yaml | ||
import collections | ||
|
||
class Bindings(yaml.Loader): | ||
|
||
## | ||
# List of all yaml files available for yaml loaders | ||
# of this class. Must be preset before the first | ||
# load operation. | ||
_files = [] | ||
|
||
## | ||
# Files that are already included. | ||
# Must be reset on the load of every new binding | ||
_included = [] | ||
|
||
@staticmethod | ||
def dict_merge(dct, merge_dct): | ||
# from https://gist.github.com/angstwad/bf22d1822c38a92ec0a9 | ||
|
||
""" Recursive dict merge. Inspired by :meth:``dict.update()``, instead of | ||
updating only top-level keys, dict_merge recurses down into dicts nested | ||
to an arbitrary depth, updating keys. The ``merge_dct`` is merged into | ||
``dct``. | ||
:param dct: dict onto which the merge is executed | ||
:param merge_dct: dct merged into dct | ||
:return: None | ||
""" | ||
for k, v in merge_dct.items(): | ||
if (k in dct and isinstance(dct[k], dict) | ||
and isinstance(merge_dct[k], collections.Mapping)): | ||
Bindings.dict_merge(dct[k], merge_dct[k]) | ||
else: | ||
if k in dct and dct[k] != merge_dct[k]: | ||
print("bindings.py: Merge of '{}': '{}' overwrites '{}'.".format( | ||
k, merge_dct[k], dct[k])) | ||
dct[k] = merge_dct[k] | ||
|
||
@classmethod | ||
def _traverse_inherited(cls, node): | ||
""" Recursive overload procedure inside ``node`` | ||
``inherits`` section is searched for and used as node base when found. | ||
Base values are then overloaded by node values | ||
and some consistency checks are done. | ||
:param node: | ||
:return: node | ||
""" | ||
|
||
# do some consistency checks. Especially id is needed for further | ||
# processing. title must be first to check. | ||
if 'title' not in node: | ||
# If 'title' is missing, make fault finding more easy. | ||
# Give a hint what node we are looking at. | ||
print("bindings.py: node without 'title' -", node) | ||
for prop in ('title', 'version', 'description'): | ||
if prop not in node: | ||
node[prop] = "<unknown {}>".format(prop) | ||
print("bindings.py: WARNING:", | ||
"'{}' property missing in".format(prop), | ||
"'{}' binding. Using '{}'.".format(node['title'], | ||
node[prop])) | ||
|
||
# warn if we have an 'id' field | ||
if 'id' in node: | ||
print("bindings.py: WARNING: id field set", | ||
"in '{}', should be removed.".format(node['title'])) | ||
|
||
if 'inherits' in node: | ||
if isinstance(node['inherits'], list): | ||
inherits_list = node['inherits'] | ||
else: | ||
inherits_list = [node['inherits'],] | ||
node.pop('inherits') | ||
for inherits in inherits_list: | ||
if 'inherits' in inherits: | ||
inherits = cls._traverse_inherited(inherits) | ||
if 'type' in inherits: | ||
if 'type' not in node: | ||
node['type'] = [] | ||
if not isinstance(node['type'], list): | ||
node['type'] = [node['type'],] | ||
if isinstance(inherits['type'], list): | ||
node['type'].extend(inherits['type']) | ||
else: | ||
node['type'].append(inherits['type']) | ||
|
||
# type, title, description, version of inherited node | ||
# are overwritten by intention. Remove to prevent dct_merge to | ||
# complain about duplicates. | ||
inherits.pop('type', None) | ||
inherits.pop('title', None) | ||
inherits.pop('version', None) | ||
inherits.pop('description', None) | ||
cls.dict_merge(inherits, node) | ||
node = inherits | ||
return node | ||
|
||
@classmethod | ||
def _collapse_inherited(cls, bindings_list): | ||
collapsed = dict(bindings_list) | ||
|
||
for k, v in collapsed.items(): | ||
v = cls._traverse_inherited(v) | ||
collapsed[k]=v | ||
|
||
return collapsed | ||
|
||
## | ||
# @brief Get bindings for given compatibles. | ||
# | ||
# @param compatibles | ||
# @param bindings_dir_pathes directories to search for binding files | ||
# @return dictionary of bindings found | ||
@classmethod | ||
def bindings(cls, compatibles, bindings_dir_pathes): | ||
# find unique set of compatibles across all active nodes | ||
s = set() | ||
for k, v in compatibles.items(): | ||
if isinstance(v, list): | ||
for item in v: | ||
s.add(item) | ||
else: | ||
s.add(v) | ||
|
||
# scan YAML files and find the ones we are interested in | ||
cls._files = [] | ||
for bindings_dir_path in bindings_dir_pathes: | ||
for root, dirnames, filenames in os.walk(bindings_dir_path): | ||
for filename in fnmatch.filter(filenames, '*.yaml'): | ||
cls._files.append(os.path.join(root, filename)) | ||
|
||
bindings_list = {} | ||
file_load_list = set() | ||
for file in cls._files: | ||
for line in open(file, 'r', encoding='utf-8'): | ||
if re.search('^\s+constraint:*', line): | ||
c = line.split(':')[1].strip() | ||
c = c.strip('"') | ||
if c in s: | ||
if file not in file_load_list: | ||
file_load_list.add(file) | ||
with open(file, 'r', encoding='utf-8') as yf: | ||
cls._included = [] | ||
bindings_list[c] = yaml.load(yf, cls) | ||
|
||
# collapse the bindings inherited information before return | ||
return cls._collapse_inherited(bindings_list) | ||
|
||
def __init__(self, stream): | ||
filepath = os.path.realpath(stream.name) | ||
if filepath in self._included: | ||
print("Error:: circular inclusion for file name '{}'". | ||
format(stream.name)) | ||
raise yaml.constructor.ConstructorError | ||
self._included.append(filepath) | ||
super(Bindings, self).__init__(stream) | ||
Bindings.add_constructor('!include', Bindings._include) | ||
Bindings.add_constructor('!import', Bindings._include) | ||
|
||
def _include(self, node): | ||
if isinstance(node, yaml.ScalarNode): | ||
return self._extract_file(self.construct_scalar(node)) | ||
|
||
elif isinstance(node, yaml.SequenceNode): | ||
result = [] | ||
for filename in self.construct_sequence(node): | ||
result.append(self._extract_file(filename)) | ||
return result | ||
|
||
elif isinstance(node, yaml.MappingNode): | ||
result = {} | ||
for k, v in self.construct_mapping(node).iteritems(): | ||
result[k] = self._extract_file(v) | ||
return result | ||
|
||
else: | ||
print("Error:: unrecognised node type in !include statement") | ||
raise yaml.constructor.ConstructorError | ||
|
||
def _extract_file(self, filename): | ||
filepaths = [filepath for filepath in self._files if filepath.endswith(filename)] | ||
if len(filepaths) == 0: | ||
print("Error:: unknown file name '{}' in !include statement". | ||
format(filename)) | ||
raise yaml.constructor.ConstructorError | ||
elif len(filepaths) > 1: | ||
# multiple candidates for filename | ||
files = [] | ||
for filepath in filepaths: | ||
if os.path.basename(filename) == os.path.basename(filepath): | ||
files.append(filepath) | ||
if len(files) > 1: | ||
print("Error:: multiple candidates for file name '{}' in !include statement". | ||
format(filename), filepaths) | ||
raise yaml.constructor.ConstructorError | ||
filepaths = files | ||
with open(filepaths[0], 'r', encoding='utf-8') as f: | ||
return yaml.load(f, Bindings) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
# | ||
# Copyright (c) 2018 Bobby Noelte | ||
# Copyright (c) 2018 Linaro Limited | ||
# | ||
# SPDX-License-Identifier: Apache-2.0 | ||
# | ||
|
||
import json | ||
from .device import EDTSDevice | ||
|
||
## | ||
# @brief ETDS Database consumer | ||
# | ||
# Methods for ETDS database usage. | ||
# | ||
class EDTSConsumerMixin(object): | ||
__slots__ = [] | ||
|
||
## | ||
# @brief Get compatibles | ||
# | ||
# @param None | ||
# @return edts 'compatibles' dict | ||
def get_compatibles(self): | ||
return self._edts['compatibles'] | ||
|
||
## | ||
# @brief Get aliases | ||
# | ||
# @param None | ||
# @return edts 'aliases' dict | ||
def get_aliases(self): | ||
return self._edts['aliases'] | ||
|
||
## | ||
# @brief Get chosen | ||
# | ||
# @param None | ||
# @return edts 'chosen' dict | ||
def get_chosen(self): | ||
return self._edts['chosen'] | ||
|
||
## | ||
# @brief Get device types | ||
# | ||
# @param None | ||
# @return edts device types dict | ||
def get_device_types(self): | ||
return self._edts['device-types'] | ||
|
||
## | ||
# @brief Get controllers | ||
# | ||
# @param None | ||
# @return compatible generic device type | ||
def get_controllers(self): | ||
return self._edts['controllers'] | ||
|
||
## | ||
# @brief Get device ids of all activated compatible devices. | ||
# | ||
# @param compatibles compatible(s) | ||
# @return list of device ids of activated devices that are compatible | ||
def get_device_ids_by_compatible(self, compatibles): | ||
device_ids = dict() | ||
if not isinstance(compatibles, list): | ||
compatibles = [compatibles, ] | ||
for compatible in compatibles: | ||
for device_id in self._edts['compatibles'].get(compatible, []): | ||
device_ids[device_id] = 1 | ||
return list(device_ids.keys()) | ||
|
||
## | ||
# @brief Get device id of activated device with given label. | ||
# | ||
# @return device id | ||
def get_device_id_by_label(self, label): | ||
for device_id, device in self._edts['devices'].items(): | ||
if label == device.get('label', None): | ||
return device_id | ||
print("consumer.py: Device with label", | ||
"'{}' not available in EDTS".format(label)) | ||
return None | ||
|
||
## | ||
# @brief Get the device dict matching a device_id. | ||
# | ||
# @param device_id | ||
# @return (dict)device | ||
def get_device_by_device_id(self, device_id): | ||
try: | ||
return EDTSDevice(self, device_id) | ||
except: | ||
print("consumer.py: Device with device id", | ||
"'{}' not available in EDTS".format(device_id)) | ||
return None | ||
|
||
def load(self, file_path): | ||
with open(file_path, "r") as load_file: | ||
self._edts = json.load(load_file) |
Oops, something went wrong.