-
Notifications
You must be signed in to change notification settings - Fork 1.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[yang]Adding a tool for generating documentation based on yang model #11291
Merged
Merged
Changes from all commits
Commits
Show all changes
10 commits
Select commit
Hold shift + click to select a range
310b5f6
Yang helper initial changes
dgsudharsan c131a41
Adding dependecies in setup.py
dgsudharsan 5bd8cc4
Adding sonic-cfg-help in setup.py
dgsudharsan 3c1d4ae
Addressing code review comments
dgsudharsan b811833
Pep8 compliance
dgsudharsan 4bd547f
Pep8 compliance
dgsudharsan 2a8a75a
Fixing UT failure
dgsudharsan c430e26
Merge remote-tracking branch 'upstream/master' into yang_help
dgsudharsan c6d0814
Fixing test issues
dgsudharsan 4960acb
Fixing LGTM warnings
dgsudharsan File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,205 @@ | ||
#!/usr/bin/env python3 | ||
|
||
import argparse | ||
from tabulate import tabulate | ||
import re | ||
import textwrap | ||
|
||
import sonic_yang | ||
|
||
YANG_MODELS_DIR = "/usr/local/yang-models" | ||
|
||
|
||
class SonicCfgDescriber: | ||
|
||
def __init__(self, table_name, field, print_format, | ||
yang_models_dir=YANG_MODELS_DIR): | ||
self.yang_models_dir = yang_models_dir | ||
self.yang_parser = sonic_yang.SonicYang(self.yang_models_dir) | ||
self.yang_parser.loadYangModel() | ||
self.table_descr = {} | ||
self.table_name = table_name | ||
self.field = field | ||
self.print_format = print_format | ||
|
||
def print_documentation(self): | ||
for j in self.yang_parser.yJson: | ||
toplevel = j['module'].get('container') | ||
if toplevel is not None: | ||
container = toplevel.get('container') | ||
if isinstance(container, list): | ||
for c in container: | ||
if c.get('@name') == self.table_name or \ | ||
not self.table_name: | ||
self.print_table(c, self.field) | ||
elif isinstance(container, dict): | ||
if container.get('@name') == self.table_name or \ | ||
not self.table_name: | ||
self.print_table(container, self.field) | ||
|
||
def print_table(self, table, field): | ||
if table is None: | ||
return | ||
print("\n" + table.get('@name')) | ||
if table.get('description', {}).get('text') is not None: | ||
print("Description: " + table.get('description').get('text')) | ||
print () | ||
|
||
if table.get('list') is not None: | ||
if (isinstance(table['list'], list)): | ||
for schema in table['list']: | ||
self.print_field_desc(schema, field) | ||
print() | ||
else: | ||
self.print_field_desc(table['list'], field) | ||
print() | ||
elif table.get('container') is not None: | ||
self.print_field_desc(table.get('container'), field) | ||
print() | ||
|
||
def get_referenced_table_field(self, ref): | ||
if 'LIST' in ref.split('/')[-2]: | ||
table = ref.split('/')[-3].split(':')[-1] | ||
else: | ||
table = ref.split('/')[-2].split(':')[-1] | ||
field = ref.split('/')[-1].split(':')[-1] | ||
return(table + ":" + field) | ||
|
||
def parse_when_condition(self, table): | ||
condition = table['@condition'] | ||
desc = "" | ||
if "boolean" in condition: | ||
values = re.findall("\'(.*?)\'", condition, re.DOTALL) | ||
field = re.search("boolean\((.*?)\[", condition) | ||
desc = "when " + field.group(1) + " in " + ",".join(values) | ||
elif condition.startswith("(/"): | ||
field = re.search("/(.*)\:(.*) \=", condition) | ||
ref_table = condition.split("/")[2].split(':')[-1] | ||
values = re.findall("\'(.*?)\'", condition, re.DOTALL) | ||
desc = "when " + ref_table + ":" + field.group(2) + \ | ||
" in " + ",".join(values) | ||
|
||
return desc | ||
|
||
def parse_choice(self, table, field): | ||
out = [] | ||
for keys in table['case']: | ||
desc = "Mutually exclusive in group " + table['@name'] | ||
if 'when' in keys: | ||
desc += "\n" + self.parse_when_condition(keys['when']) | ||
out += self.validate_and_parse_leaf(keys, field, desc) | ||
return out | ||
|
||
def parse_leaf(self, key, field, desc=""): | ||
mandatory = '' | ||
default = '' | ||
out = [] | ||
reference = '' | ||
name = key.get('@name') | ||
if field and name != field: | ||
return [] | ||
if isinstance(key, dict): | ||
if key.get('description', {}).get('text') is not None: | ||
desc += "\n".join(textwrap.wrap(re.sub(r"\s+", " ", | ||
key['description']['text']), width=50)) | ||
if key.get('mandatory') is not None: | ||
mandatory = key.get('mandatory').get('@value') | ||
if key.get('default') is not None: | ||
default = key.get('default').get('@value') | ||
if key.get('type') is not None: | ||
if key['type'].get('@name') == 'leafref': | ||
reference = self.get_referenced_table_field( | ||
key['type']['path'].get('@value')) | ||
elif key['type'].get('@name') == 'union': | ||
for types in key['type']['type']: | ||
if 'path' in types: | ||
val = self.get_referenced_table_field( | ||
types['path'].get('@value')) | ||
if not reference: | ||
reference = val | ||
else: | ||
reference += "\n" + val | ||
out.append([name, desc, mandatory, default, reference]) | ||
return out | ||
|
||
def validate_and_parse_leaf(self, table, field, desc=""): | ||
out = [] | ||
if 'leaf' in table: | ||
if isinstance(table['leaf'], list): | ||
for key in table['leaf']: | ||
ret = self.parse_leaf(key, field, desc) | ||
out = out + ret | ||
elif isinstance(table['leaf'], dict): | ||
ret = self.parse_leaf(table['leaf'], field, desc) | ||
out = out + ret | ||
|
||
if 'leaf-list' in table: | ||
if desc: | ||
desc = desc + "\n" | ||
desc = desc + "The field contains list of unique members" | ||
if isinstance(table['leaf-list'], list): | ||
for key in table['leaf-list']: | ||
ret = self.parse_leaf(key, field, desc) | ||
out = out + ret | ||
elif isinstance(table['leaf-list'], dict): | ||
ret = self.parse_leaf(table['leaf-list'], field, desc) | ||
out = out + ret | ||
return out | ||
|
||
def print_field_desc(self, table, field): | ||
if table is None: | ||
return | ||
|
||
header = ['Field', 'Description', 'Mandatory', 'Default', 'Reference'] | ||
out = [] | ||
if 'key' in table: | ||
print("key - " + ":".join(table['key']['@value'].split())) | ||
|
||
out += self.validate_and_parse_leaf(table, field) | ||
|
||
if 'choice' in table: | ||
if isinstance(table['choice'], list): | ||
for key in table['choice']: | ||
out += self.parse_choice(key, field) | ||
elif isinstance(table['choice'], dict): | ||
out += self.parse_choice(table['choice'], field) | ||
|
||
if 'list' in table: | ||
out += self.validate_and_parse_leaf(table['list'], field, | ||
"This field is for storing " + | ||
"mapping between two fields") | ||
|
||
print(tabulate(out, header, tablefmt=self.print_format)) | ||
|
||
|
||
def main(): | ||
parser = argparse.ArgumentParser(description="Description of table name") | ||
parser.add_argument("-t", "--table", help="Table name", default='') | ||
parser.add_argument("-f", "--field", help="Field", default='') | ||
parser.add_argument("-p", "--print_format", help="Print format", | ||
default='grid') | ||
parser.add_argument('-a', "--all", action="store_true", default=False, | ||
help="Print all tables") | ||
args = parser.parse_args() | ||
if not (args.table or args.all): | ||
print("Error: Table or all option is required") | ||
parser.print_help() | ||
return -1 | ||
|
||
if args.table and args.all: | ||
print("Cannot have table and all option together") | ||
parser.print_help() | ||
return -1 | ||
|
||
if args.field and not args.table: | ||
print("Error: Filter by field requires table to be specified") | ||
parser.print_help() | ||
return -1 | ||
|
||
yang_cfg = SonicCfgDescriber(args.table, args.field, args.print_format) | ||
yang_cfg.print_documentation() | ||
|
||
|
||
if __name__ == "__main__": | ||
main() | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,170 @@ | ||
import json | ||
import subprocess | ||
import os | ||
|
||
from unittest import TestCase | ||
|
||
output1="""\ | ||
Error: Table or all option is required | ||
usage: sonic-cfg-help [-h] [-t TABLE] [-f FIELD] [-p PRINT_FORMAT] [-a] | ||
|
||
Description of table name | ||
|
||
optional arguments: | ||
-h, --help show this help message and exit | ||
-t TABLE, --table TABLE | ||
Table name | ||
-f FIELD, --field FIELD | ||
Field | ||
-p PRINT_FORMAT, --print_format PRINT_FORMAT | ||
Print format | ||
-a, --all Print all tables | ||
""" | ||
|
||
techsupport_table_output="""\ | ||
|
||
AUTO_TECHSUPPORT | ||
Description: AUTO_TECHSUPPORT part of config_db.json | ||
|
||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| Field | Description | Mandatory | Default | Reference | | ||
+=========================+====================================================+=============+===========+=============+ | ||
| state | Knob to make techsupport invocation event-driven | | | | | ||
| | based on core-dump generation | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| rate_limit_interval | Minimum time in seconds between two successive | | | | | ||
| | techsupport invocations. Configure 0 to explicitly | | | | | ||
| | disable | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| max_techsupport_limit | Max Limit in percentage for the cummulative size | | | | | ||
| | of ts dumps. No cleanup is performed if the value | | | | | ||
| | isn't configured or is 0.0 | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| max_core_limit | Max Limit in percentage for the cummulative size | | | | | ||
| | of core dumps. No cleanup is performed if the | | | | | ||
| | value isn't congiured or is 0.0 | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| available_mem_threshold | Memory threshold; 0 to disable techsupport | | 10.0 | | | ||
| | invocation on memory usage threshold crossing | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| min_available_mem | Minimum Free memory (in MB) that should be | | 200 | | | ||
| | available for the techsupport execution to start | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
| since | Only collect the logs & core-dumps generated since | | | | | ||
| | the time provided. A default value of '2 days ago' | | | | | ||
| | is used if this value is not set explicitly or a | | | | | ||
| | non-valid string is provided | | | | | ||
+-------------------------+----------------------------------------------------+-------------+-----------+-------------+ | ||
|
||
""" | ||
|
||
techsupport_table_field_output="""\ | ||
|
||
AUTO_TECHSUPPORT | ||
Description: AUTO_TECHSUPPORT part of config_db.json | ||
|
||
+---------+--------------------------------------------------+-------------+-----------+-------------+ | ||
| Field | Description | Mandatory | Default | Reference | | ||
+=========+==================================================+=============+===========+=============+ | ||
| state | Knob to make techsupport invocation event-driven | | | | | ||
| | based on core-dump generation | | | | | ||
+---------+--------------------------------------------------+-------------+-----------+-------------+ | ||
|
||
""" | ||
|
||
portchannel_table_field_output="""\ | ||
|
||
PORTCHANNEL | ||
Description: PORTCHANNEL part of config_db.json | ||
|
||
key - name | ||
+---------+-------------------------------------------+-------------+-----------+-------------+ | ||
| Field | Description | Mandatory | Default | Reference | | ||
+=========+===========================================+=============+===========+=============+ | ||
| members | The field contains list of unique members | | | PORT:name | | ||
+---------+-------------------------------------------+-------------+-----------+-------------+ | ||
|
||
""" | ||
|
||
dscp_to_tc_table_field_output="""\ | ||
|
||
DSCP_TO_TC_MAP | ||
Description: DSCP_TO_TC_MAP part of config_db.json | ||
|
||
key - name | ||
+---------+------------------------------------------------------+-------------+-----------+-------------+ | ||
| Field | Description | Mandatory | Default | Reference | | ||
+=========+======================================================+=============+===========+=============+ | ||
| name | | | | | | ||
+---------+------------------------------------------------------+-------------+-----------+-------------+ | ||
| dscp | This field is for storing mapping between two fields | | | | | ||
+---------+------------------------------------------------------+-------------+-----------+-------------+ | ||
| tc | This field is for storing mapping between two fields | | | | | ||
+---------+------------------------------------------------------+-------------+-----------+-------------+ | ||
|
||
""" | ||
|
||
acl_rule_table_field_output="""\ | ||
|
||
ACL_RULE | ||
Description: ACL_RULE part of config_db.json | ||
|
||
key - ACL_TABLE_NAME:RULE_NAME | ||
+-----------+-----------------------------------------+-------------+-----------+-------------+ | ||
| Field | Description | Mandatory | Default | Reference | | ||
+===========+=========================================+=============+===========+=============+ | ||
| ICMP_TYPE | Mutually exclusive in group icmp | | | | | ||
| | when IP_TYPE in ANY,IP,IPV4,IPv4ANY,ARP | | | | | ||
+-----------+-----------------------------------------+-------------+-----------+-------------+ | ||
|
||
""" | ||
|
||
class TestCfgHelp(TestCase): | ||
|
||
def setUp(self): | ||
self.test_dir = os.path.dirname(os.path.realpath(__file__)) | ||
self.script_file = os.path.join(self.test_dir, '..', 'sonic-cfg-help') | ||
|
||
def run_script(self, argument): | ||
print('\n Running sonic-cfg-help ' + argument) | ||
output = subprocess.check_output(self.script_file + ' ' + argument, shell=True) | ||
|
||
output = output.decode() | ||
|
||
linecount = output.strip().count('\n') | ||
if linecount <= 0: | ||
print(' Output: ' + output.strip()) | ||
else: | ||
print(' Output: ({0} lines, {1} bytes)'.format(linecount + 1, len(output))) | ||
return output | ||
|
||
def test_dummy_run(self): | ||
argument = '' | ||
output = self.run_script(argument) | ||
self.assertEqual(output, output1) | ||
|
||
def test_single_table(self): | ||
argument = '-t AUTO_TECHSUPPORT' | ||
output = self.run_script(argument) | ||
self.assertEqual(output, techsupport_table_output) | ||
|
||
def test_single_field(self): | ||
argument = '-t AUTO_TECHSUPPORT -f state' | ||
output = self.run_script(argument) | ||
self.assertEqual(output, techsupport_table_field_output) | ||
|
||
def test_leaf_list(self): | ||
argument = '-t PORTCHANNEL -f members' | ||
output = self.run_script(argument) | ||
self.assertEqual(output, portchannel_table_field_output) | ||
|
||
def test_leaf_list_map(self): | ||
argument = '-t DSCP_TO_TC_MAP' | ||
output = self.run_script(argument) | ||
self.maxDiff = None | ||
self.assertEqual(output, dscp_to_tc_table_field_output) | ||
|
||
def test_when_condition(self): | ||
argument = '-t ACL_RULE -f ICMP_TYPE' | ||
output = self.run_script(argument) | ||
self.assertEqual(output, acl_rule_table_field_output) |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If we modify techsupport yang model, this unit test will fail?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes. We need to update this test as well.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe we can create some dummy yang models for unit test.
And then we can modify Yang model, and don't need to update this unit test.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Hi @ganglyu ,
I prefer to use any active yang model rather than introducing a dummy for the test. We do have instances of such tests in other repositories where if schema is changed, the UT needs to be updated.
I don't think the yang model I chosen would change quite often. Even if someone changes it and accidentally misses to update UT, it will be caught during build time and build will fail, forcing the user to modify the test.