Skip to content

Commit

Permalink
fixes typos in IRIs, resolves #81; updates terms
Browse files Browse the repository at this point in the history
The following typos in IRIs were fixed using the new SHACL shapes from
previous commit:
- dpv:expiry relation instead of dpv:hasExpiry relation in consent
- dpv:hasConsequenceOn was used as a parent even though it was proposed.
  The term has been promoted to accepted status
- Typos in Technical measures where Crypto- was mistyped as Cryto-

Errors in labels:
- MaintainCreditCheckingDatabase
- MaintainCreditRatingDatabase

The following terms were updated:
- GDPR's legal bases where text has been added from Art.6 and the parent
  terms have been aligned with main spec's legal bases (including
  creation of new terms to match granularity)
- Anonymisation and Pseudonymisation have been changed to be types of
  Deidentification techniques (as the grouping parent concept) to
  distinguish them following discussions in #15
- DPV-LEGAL has laws and DPAs for USA from contributions by @JonathanBowker
  • Loading branch information
coolharsh55 committed Nov 24, 2022
1 parent c63ecfe commit b1a86ea
Show file tree
Hide file tree
Showing 527 changed files with 290,890 additions and 275,110 deletions.
14 changes: 7 additions & 7 deletions documentation-generator/002_parse_csv_to_rdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -693,17 +693,17 @@ def serialize_graph(graph, filepath):
graph.namespace_manager.bind(prefix, namespace)
proposed = []
Location_schema = namedtuple('Legal_Location', (
'Term', 'Label', 'ParentTerm', 'Alpha2', 'Alpha3', 'Numeric', 'M49',
'term', 'Label', 'Parentterm', 'Alpha2', 'Alpha3', 'Numeric', 'M49',
'broader', 'narrower', 'created', 'modified',
'status', 'contributors', 'resolution'))
concepts = extract_terms_from_csv(
f'{IMPORT_CSV_PATH}/legal_Locations.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.Term]
parent = DPV[row.ParentTerm.replace("dpv:", "")]
term = BASE[row.term]
parent = DPV[row.Parentterm.replace("dpv:", "")]
graph.add((term, RDF.type, DPV.Concept))
graph.add((term, RDF.type, SKOS.Concept))
graph.add((term, DPV.isInstanceOf, parent))
Expand Down Expand Up @@ -819,7 +819,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_Authorities.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPV.Concept))
Expand Down Expand Up @@ -871,7 +871,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_EU_EEA.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPV.Concept))
Expand Down Expand Up @@ -930,7 +930,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_EU_Adequacy.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPV.Concept))
Expand Down
14 changes: 7 additions & 7 deletions documentation-generator/002_parse_csv_to_rdf_owl.py
Original file line number Diff line number Diff line change
Expand Up @@ -708,17 +708,17 @@ def serialize_graph(graph, filepath):
graph.namespace_manager.bind(prefix, namespace)
proposed = []
Location_schema = namedtuple('Legal_Location', (
'Term', 'Label', 'ParentTerm', 'Alpha2', 'Alpha3', 'Numeric', 'M49',
'term', 'Label', 'Parentterm', 'Alpha2', 'Alpha3', 'Numeric', 'M49',
'broader', 'narrower', 'created', 'modified',
'status', 'contributors', 'resolution'))
concepts = extract_terms_from_csv(
f'{IMPORT_CSV_PATH}/legal_Locations.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.Term]
parent = row.ParentTerm.replace("dpv:", "")
term = BASE[row.term]
parent = row.Parentterm.replace("dpv:", "")
graph.add((term, RDF.type, DPVO[f'{parent}']))
graph.add((term, RDF.type, OWL.NamedIndividual))
graph.add((term, RDFS.isDefinedBy, BASE['']))
Expand Down Expand Up @@ -824,7 +824,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_Authorities.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPVO[f'{row.type.replace("dpv:","")}']))
Expand Down Expand Up @@ -871,7 +871,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_EU_EEA.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPVO[f'{row.type.replace("dpv:","")}']))
Expand Down Expand Up @@ -927,7 +927,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_EU_Adequacy.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPVO.Law))
Expand Down
14 changes: 7 additions & 7 deletions documentation-generator/002_parse_csv_to_rdf_skos.py
Original file line number Diff line number Diff line change
Expand Up @@ -750,17 +750,17 @@ def serialize_graph(graph, filepath):
graph.namespace_manager.bind(prefix, namespace)
proposed = []
Location_schema = namedtuple('Legal_Location', (
'Term', 'Label', 'ParentTerm', 'Alpha2', 'Alpha3', 'Numeric', 'M49',
'term', 'Label', 'Parentterm', 'Alpha2', 'Alpha3', 'Numeric', 'M49',
'broader', 'narrower', 'created', 'modified',
'status', 'contributors', 'resolution'))
concepts = extract_terms_from_csv(
f'{IMPORT_CSV_PATH}/legal_Locations.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.Term]
parent = row.ParentTerm.replace("dpv:", "")
term = BASE[row.term]
parent = row.Parentterm.replace("dpv:", "")
graph.add((term, RDF.type, DPVS[f'{parent}']))
graph.add((term, RDF.type, SKOS.Concept))
graph.add((term, RDFS.isDefinedBy, BASE['']))
Expand Down Expand Up @@ -873,7 +873,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_Authorities.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPVS[f'{row.type.replace("dpv:","")}']))
Expand Down Expand Up @@ -924,7 +924,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_EU_EEA.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPVS[f'{row.type.replace("dpv:","")}']))
Expand Down Expand Up @@ -982,7 +982,7 @@ def serialize_graph(graph, filepath):
f'{IMPORT_CSV_PATH}/legal_EU_Adequacy.csv', Location_schema)
for row in concepts:
if row.status not in VOCAB_TERM_ACCEPT:
proposed.append(row.Term)
proposed.append(row.term)
continue
term = BASE[row.term]
graph.add((term, RDF.type, DPVS.Law))
Expand Down
4 changes: 2 additions & 2 deletions documentation-generator/801_generate_releases.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ zip -q releases/dpv-owl.zip -r dpv-owl
echo "generated releases/dpv-owl.zip"

# 4. dpv-csv.zip - DPV + Extensions as CSV files
cp documentation-generator/vocab_csv/dpv_terms_discussion.xlsx releases/dpv.xlsx
echo "generated releases/dpv.xlsx"
zip -q releases/dpv-xlsx.zip documentation-generator/vocab_csv/*.xlsx
echo "generated releases/dpv-xlsx.zip"

# 5. dpv-json.zip - DPV + Extensions as JSON files
# TODO
Expand Down
2 changes: 1 addition & 1 deletion documentation-generator/jinja2_resources/links_label.json

Large diffs are not rendered by default.

Loading

0 comments on commit b1a86ea

Please sign in to comment.