Skip to content

Commit

Permalink
Extract generateError
Browse files Browse the repository at this point in the history
  • Loading branch information
takahi-i committed Oct 24, 2017
1 parent 868dc0b commit 42aff4a
Showing 1 changed file with 32 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,33 +55,43 @@ public void validate(Document document) {
}
}

private void generateErrors(Document document, Sentence sentence, TokenElement token, String reading) {
private void generateErrors(Document document, Sentence sentence, TokenElement targetToken, String reading) {
// extract words which have same reading as targetToken
Map<String, List<CandidateTokenInfo>> variationMap = generateVariationMap(document, targetToken, reading);

for (String surface : variationMap.keySet()) {
generateError(document, sentence, targetToken, variationMap, surface);
}
}

private void generateError(Document document, Sentence sentence, TokenElement targetToken, Map<String, List<CandidateTokenInfo>> variationMap, String surface) {
StringBuilder candidates = new StringBuilder();
candidates.append(surface);
List<CandidateTokenInfo> candidateTokenList = variationMap.get(surface);
candidates.append("(");
candidates.append(candidateTokenList.get(0).element.getTags().get(0));
candidates.append(")");
if (document.getFileName().orElse("").length() > 0) {
candidates.append(" in ");
candidates.append(document.getFileName().orElse(""));
}
candidates.append(" at ");
candidates.append(addTokenInfo(candidateTokenList));
addLocalizedErrorFromToken(sentence, targetToken, candidates.toString());
}

private Map<String, List<CandidateTokenInfo>> generateVariationMap(Document document, TokenElement targetToken, String reading) {
List<CandidateTokenInfo> tokens = this.readingMap.get(document).get(reading);
Map<String, List<CandidateTokenInfo>> candidateMap = new HashMap<>();
Map<String, List<CandidateTokenInfo>> variationMap = new HashMap<>();
for (CandidateTokenInfo candidate : tokens) {
if (candidate.element != token && !token.getSurface().equals(candidate.element.getSurface())) {
if (!candidateMap.containsKey(candidate.element.getSurface())) {
candidateMap.put(candidate.element.getSurface(), new LinkedList<>());
if (candidate.element != targetToken && !targetToken.getSurface().equals(candidate.element.getSurface())) {
if (!variationMap.containsKey(candidate.element.getSurface())) {
variationMap.put(candidate.element.getSurface(), new LinkedList<>());
}
candidateMap.get(candidate.element.getSurface()).add(candidate);
}
}

for (String surface : candidateMap.keySet()) {
StringBuilder candidates = new StringBuilder();
candidates.append(surface);
List<CandidateTokenInfo> candidateTokenList = candidateMap.get(surface);
candidates.append("(");
candidates.append(candidateTokenList.get(0).element.getTags().get(0));
candidates.append(")");
if (document.getFileName().orElse("").length() > 0) {
candidates.append(" in ");
candidates.append(document.getFileName().orElse(""));
variationMap.get(candidate.element.getSurface()).add(candidate);
}
candidates.append(" at ");
candidates.append(addTokenInfo(candidateTokenList));
addLocalizedErrorFromToken(sentence, token, candidates.toString());
}
return variationMap;
}

private String addTokenInfo(List<CandidateTokenInfo> candidateTokenList) {
Expand Down

0 comments on commit 42aff4a

Please sign in to comment.