Skip to content

Commit f0370ca

Browse files
Merge pull request #178 from JuDFTteam/release-0.11.3
🚀 Release `0.11.3`
2 parents 83b35b4 + 62a1b48 commit f0370ca

31 files changed

+2140
-304
lines changed

.github/workflows/cd.yml

+18-4
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,19 @@ jobs:
1414

1515
runs-on: ubuntu-latest
1616

17+
strategy:
18+
matrix:
19+
include:
20+
- name: docs
21+
sphinx-options: ''
22+
allow-failure: false
23+
- name: docs-nitpicky
24+
sphinx-options: '-nW'
25+
allow-failure: true
26+
27+
name: ${{ matrix.name }}
28+
continue-on-error: ${{ matrix.allow-failure }}
29+
1730
steps:
1831
- uses: actions/checkout@v3
1932

@@ -43,8 +56,9 @@ jobs:
4356
- name: Build documentation
4457
env:
4558
READTHEDOCS: 'True'
59+
SPHINXOPTS: ${{ matrix.sphinx-options }}
4660
run: |
47-
SPHINXOPTS='-nW' make -C docs html
61+
make -C docs html
4862
4963
pre-commit:
5064
runs-on: ubuntu-latest
@@ -55,13 +69,13 @@ jobs:
5569
include:
5670
- name: pre-commit-errors
5771
skip-hooks: pylint-warnings
58-
strict: false
72+
allow-failure: false
5973
- name: pre-commit-warnings
6074
skip-hooks: pylint-errors
61-
strict: true
75+
allow-failure: true
6276

6377
name: ${{ matrix.name }}
64-
continue-on-error: ${{ matrix.strict }}
78+
continue-on-error: ${{ matrix.allow-failure }}
6579

6680
steps:
6781
- uses: actions/checkout@v3

.github/workflows/ci.yml

+18-4
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,19 @@ jobs:
88

99
runs-on: ubuntu-latest
1010

11+
strategy:
12+
matrix:
13+
include:
14+
- name: docs
15+
sphinx-options: ''
16+
allow-failure: false
17+
- name: docs-nitpicky
18+
sphinx-options: '-nW'
19+
allow-failure: true
20+
21+
name: ${{ matrix.name }}
22+
continue-on-error: ${{ matrix.allow-failure }}
23+
1124
steps:
1225
- uses: actions/checkout@v3
1326

@@ -37,8 +50,9 @@ jobs:
3750
- name: Build documentation
3851
env:
3952
READTHEDOCS: 'True'
53+
SPHINXOPTS: ${{ matrix.sphinx-options }}
4054
run: |
41-
SPHINXOPTS='-nW' make -C docs html
55+
make -C docs html
4256
4357
pre-commit:
4458
runs-on: ubuntu-latest
@@ -49,13 +63,13 @@ jobs:
4963
include:
5064
- name: pre-commit-errors
5165
skip-hooks: pylint-warnings
52-
strict: false
66+
allow-failure: false
5367
- name: pre-commit-warnings
5468
skip-hooks: pylint-errors
55-
strict: true
69+
allow-failure: true
5670

5771
name: ${{ matrix.name }}
58-
continue-on-error: ${{ matrix.strict }}
72+
continue-on-error: ${{ matrix.allow-failure }}
5973

6074
steps:
6175
- uses: actions/checkout@v3

.pre-commit-config.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ ci:
55

66
repos:
77
- repo: https://github.com/pre-commit/pre-commit-hooks
8-
rev: v4.2.0
8+
rev: v4.3.0
99
hooks:
1010
- id: double-quote-string-fixer
1111
types: [python]
@@ -42,7 +42,7 @@ repos:
4242
]
4343

4444
- repo: https://github.com/asottile/pyupgrade
45-
rev: v2.32.0
45+
rev: v2.34.0
4646
hooks:
4747
- id: pyupgrade
4848
args: [

CHANGELOG.md

+15
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,20 @@
11
# Changelog
22

3+
## latest
4+
[full changelog](https://github.com/JuDFTteam/masci-tools/compare/v0.11.3...develop)
5+
6+
Nothing here yet
7+
8+
## v.0.11.3
9+
[full changelog](https://github.com/JuDFTteam/masci-tools/compare/v0.11.2...v0.11.3)
10+
11+
### Improvements
12+
- Changes to KKR plotting routine `dispersionplot` for compatibility with AiiDA v2.0
13+
- Connecting vectors for intersite `GreensFunction` are now saved in Angstroem. For better interoperability with ase, pymatgen, AiiDA
14+
15+
### For Developers
16+
- Relaxed CI requirements for docs build. Nitpicky mode is no longer required to pass but is treated as a hint to look into the warnings
17+
318
## v.0.11.2
419
[full changelog](https://github.com/JuDFTteam/masci-tools/compare/v0.11.1...v0.11.2)
520

docs/source/user_guide/hdf5_parser.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ The recipe for extracting bandstructure information form the `banddos.hdf` looks
6767
```{literalinclude} ../../../masci_tools/io/parsers/hdf5/recipes.py
6868
:language: python
6969
:linenos: true
70-
:lines: 170-323
70+
:pyobject: bands_recipe_format
7171
```
7272

7373
Each recipe can define the `datasets` and `attributes` entry (if one is not defined,

masci_tools/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
__copyright__ = ('Copyright (c), Forschungszentrum Jülich GmbH, IAS-1/PGI-1, Germany. '
2222
'All rights reserved.')
2323
__license__ = 'MIT license, see LICENSE.txt file.'
24-
__version__ = '0.11.2'
24+
__version__ = '0.11.3'
2525
__authors__ = 'The JuDFT team. Also see AUTHORS.txt file.'
2626

2727
logging.getLogger(__name__).addHandler(logging.NullHandler())

masci_tools/io/cif2inp_ase.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
Binv = np.linalg.inv(structure.cell)
2929
frac_coordinates = structure.arrays['positions'].dot(Binv)
3030

31-
with open(inpFilename, 'w+') as f:
31+
with open(inpFilename, 'w+', encoding='utf-8') as f:
3232
natoms = len(structure.arrays['numbers'])
3333
f.write(structureFormula + '\r\n')
3434
f.write('&input film=F /\r\n')

masci_tools/io/parsers/fleur/__init__.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
from .fleur_inpxml_parser import inpxml_parser
1717
from .fleur_outxml_parser import outxml_parser, register_migration, conversion_function
18-
from . import task_migrations #pylint: disable=unused-import
19-
from . import outxml_conversions #pylint: disable=unused-import
18+
from . import task_migrations #pylint: disable=unused-import,cyclic-import
19+
from . import outxml_conversions #pylint: disable=unused-import,cyclic-import
2020

2121
__all__ = ['inpxml_parser', 'outxml_parser', 'register_migration', 'conversion_function']

masci_tools/io/parsers/fleur/default_parse_tasks.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@
6363
6464
.. literalinclude:: ../../../../masci_tools/io/parsers/fleur/default_parse_tasks.py
6565
:language: python
66-
:lines: 66-
66+
:lines: 70-
6767
:linenos:
6868
6969
"""

masci_tools/io/parsers/fleur/fleur_inpxml_parser.py

+71-83
Original file line numberDiff line numberDiff line change
@@ -128,104 +128,92 @@ def inpxml_todict(parent: etree._Element,
128128
129129
:return: a python dictionary
130130
"""
131+
#These keys have to never appear as an attribute/tag name
132+
#The underscores should guarantee that
133+
_TEXT_PLACEHOLDER = '__text__'
134+
_OMIT_PLACEHOLDER = '__omit__'
131135

132136
#Check if this is the first call to this routine
133137
if base_xpath is None:
134138
base_xpath = f'/{parent.tag}'
135139

136-
return_dict: dict[str, Any] = {}
137-
if list(parent.items()):
138-
return_dict = {str(key): val for key, val in parent.items()}
139-
# Now we have to convert lazy fortran style into pretty things for the Database
140-
for key in return_dict:
141-
if key in schema_dict['attrib_types']:
142-
return_dict[key], suc = convert_from_xml(return_dict[key],
140+
content: dict[str, Any] = {}
141+
# Now we have to convert lazy fortran style into pretty things for the Database
142+
for key, value in parent.items():
143+
attrib_name, value = str(key), str(value)
144+
if attrib_name in schema_dict['attrib_types']:
145+
content[attrib_name], suc = convert_from_xml(value,
143146
schema_dict,
144-
key,
147+
attrib_name,
145148
text=False,
146149
constants=constants,
147150
logger=logger)
148-
if not suc and logger is not None:
149-
logger.warning("Failed to convert attribute '%s' Got: '%s'", key, return_dict[key])
150-
151-
if parent.text:
152-
# has text, but we don't want all the '\n' s and empty strings in the database
153-
if parent.text.strip() != '': # might not be the best solutions
154-
if parent.tag not in schema_dict['text_tags']:
155-
if logger is not None:
156-
logger.error('Something is wrong in the schema_dict: %s is not in text_tags, but it has text',
157-
parent.tag)
158-
raise ValueError(
159-
f'Something is wrong in the schema_dict: {parent.tag} is not in text_tags, but it has text')
160-
161-
converted_text, suc = convert_from_xml(str(parent.text),
162-
schema_dict,
163-
parent.tag,
164-
text=True,
165-
constants=constants,
166-
logger=logger)
167-
168151
if not suc and logger is not None:
169-
logger.warning("Failed to text of '%s' Got: '%s'", parent.tag, parent.text)
152+
logger.warning("Failed to convert attribute '%s' Got: '%s'", attrib_name, value)
153+
154+
# has text, but we don't want all the '\n' s and empty strings in the database
155+
if parent.text and parent.text.strip() != '':
156+
157+
if parent.tag not in schema_dict['text_tags']:
158+
if logger is not None:
159+
logger.error('Something is wrong in the schema_dict: %s is not in text_tags, but it has text',
160+
parent.tag)
161+
raise ValueError(
162+
f'Something is wrong in the schema_dict: {parent.tag} is not in text_tags, but it has text')
170163

171-
if not return_dict:
172-
return_dict = converted_text #type:ignore
173-
else:
174-
return_dict['text_value'] = converted_text
175-
if 'label' in return_dict:
176-
return_dict['text_label'] = return_dict['label']
177-
return_dict.pop('label')
164+
converted_text, suc = convert_from_xml(str(parent.text),
165+
schema_dict,
166+
parent.tag,
167+
text=True,
168+
constants=constants,
169+
logger=logger)
170+
171+
if not suc and logger is not None:
172+
logger.warning("Failed to text of '%s' Got: '%s'", parent.tag, parent.text)
173+
174+
content[_TEXT_PLACEHOLDER] = converted_text
178175

179176
tag_info = schema_dict['tag_info'].get(base_xpath, EMPTY_TAG_INFO)
180177
for element in parent:
181178

182-
new_base_xpath = f'{base_xpath}/{element.tag}'
183-
omitt_contained_tags = element.tag in schema_dict['omitt_contained_tags']
184-
new_return_dict = inpxml_todict(element,
185-
schema_dict,
186-
constants,
187-
base_xpath=new_base_xpath,
188-
omitted_tags=omitt_contained_tags,
189-
logger=logger)
190-
191-
if element.tag in tag_info['several']:
192-
# make a list, otherwise the tag will be overwritten in the dict
193-
if element.tag not in return_dict: # is this the first occurrence?
194-
if omitted_tags:
195-
if len(return_dict) == 0:
196-
return_dict = [] #type:ignore
197-
else:
198-
return_dict[element.tag] = []
199-
if omitted_tags:
200-
return_dict.append(new_return_dict) #type:ignore
201-
elif 'text_value' in new_return_dict:
202-
for key, value in new_return_dict.items():
203-
if key == 'text_value':
204-
return_dict[element.tag].append(value)
205-
elif key == 'text_label':
206-
if 'labels' not in return_dict:
207-
return_dict['labels'] = {}
208-
return_dict['labels'][value] = new_return_dict['text_value']
209-
else:
210-
if key not in return_dict:
211-
return_dict[key] = []
212-
elif not isinstance(return_dict[key], list): #Key seems to be defined already
213-
if logger is not None:
214-
logger.error('%s cannot be extracted to the next level', key)
215-
raise ValueError(f'{key} cannot be extracted to the next level')
216-
return_dict[key].append(value)
217-
for key in new_return_dict.keys():
218-
if key in ['text_value', 'text_label']:
219-
continue
220-
if len(return_dict[key]) != len(return_dict[element.tag]):
179+
child_content = inpxml_todict(element,
180+
schema_dict,
181+
constants,
182+
base_xpath=f'{base_xpath}/{element.tag}',
183+
omitted_tags=element.tag in schema_dict['omitt_contained_tags'],
184+
logger=logger)
185+
186+
if _OMIT_PLACEHOLDER in child_content:
187+
#We knoe that there is only one key here
188+
child_content = child_content.pop(_OMIT_PLACEHOLDER)
189+
190+
tag_name = element.tag
191+
if omitted_tags:
192+
tag_name = _OMIT_PLACEHOLDER
193+
194+
if element.tag in tag_info['several']\
195+
and _TEXT_PLACEHOLDER in child_content:
196+
#The text is stored under the name of the tag
197+
text_value = child_content.pop(_TEXT_PLACEHOLDER)
198+
content.setdefault(tag_name, []).append(text_value)
199+
child_tag_info = schema_dict['tag_info'].get(f'{base_xpath}/{element.tag}', EMPTY_TAG_INFO)
200+
for key, value in child_content.items():
201+
if key not in child_tag_info['optional_attribs']:
202+
#All required attributes are stored as lists
203+
if key in content and \
204+
not isinstance(content[key], list): #Key seems to be defined already
221205
if logger is not None:
222-
logger.error(
223-
'Extracted optional argument %s at the moment only label is supported correctly', key)
224-
raise ValueError(
225-
f'Extracted optional argument {key} at the moment only label is supported correctly')
226-
else:
227-
return_dict[element.tag].append(new_return_dict)
206+
logger.error('%s cannot be extracted to the next level', key)
207+
raise ValueError(f'{key} cannot be extracted to the next level')
208+
content.setdefault(key, []).append(value)
209+
else:
210+
#All optional attributes are stored as dicts pointing to the text
211+
content.setdefault(key, {})[value] = text_value
212+
elif element.tag in tag_info['several']:
213+
content.setdefault(tag_name, []).append(child_content)
214+
elif _TEXT_PLACEHOLDER in child_content:
215+
content[tag_name] = child_content.pop(_TEXT_PLACEHOLDER)
228216
else:
229-
return_dict[element.tag] = new_return_dict
217+
content[tag_name] = child_content
230218

231-
return return_dict
219+
return content

masci_tools/io/parsers/fleur/fleur_outxml_parser.py

+4-6
Original file line numberDiff line numberDiff line change
@@ -218,14 +218,12 @@ def outxml_parser(outxmlfile: XMLFileLike,
218218
if not list_return:
219219
#Convert one item lists to simple values
220220
for key, value in out_dict.items():
221-
if isinstance(value, list):
222-
if len(value) == 1:
223-
out_dict[key] = value[0]
221+
if isinstance(value, list) and len(value) == 1:
222+
out_dict[key] = value[0]
224223
elif isinstance(value, dict):
225224
for subkey, subvalue in value.items():
226-
if isinstance(subvalue, list):
227-
if len(subvalue) == 1:
228-
out_dict[key][subkey] = subvalue[0]
225+
if isinstance(subvalue, list) and len(subvalue) == 1:
226+
out_dict[key][subkey] = subvalue[0]
229227

230228
if parser_log_handler is not None:
231229
if logger is not None:

0 commit comments

Comments
 (0)