Commit c15074ad authored by Romana Pernischova's avatar Romana Pernischova
Browse files

Merge branch 'remoteUpdate' into 'master'

Remote update

See merge request !1
parents 68c9d50f cc77f34f
chimp.plugin
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="MavenProjectsManager">
<option name="originalFiles">
<list>
<option value="$PROJECT_DIR$/pom.xml" />
</list>
</option>
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="openjdk-15" project-jdk-type="JavaSDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
......@@ -7,10 +7,12 @@ This repository contains code for the Protégé desktop plugin.
To build and run the plugin, the following items must be installed:
+ Apache's [Maven](http://maven.apache.org/index.html).
+ A Protégé distribution (5.0.0 or higher). The Protégé 5.5.0 release is [available](http://protege.stanford.edu/products.php#desktop-protege) from the main Protégé website.
You can also download a pre-build jar of the plugin under Releases and place it in the Protégé Plugin folder (instructions are available on the Protégé website).
### Build
1. In the chimp-plugin directory:
......@@ -23,6 +25,7 @@ You can also download a pre-build jar of the plugin under Releases and place it
### View the Plugin in Protégé
1. Launch your Protégé distribution.
2. Window > Views > Ontology views > ChImp (Change Impact)
......@@ -34,8 +37,9 @@ You can also download a pre-build jar of the plugin under Releases and place it
#### Setup
1. Download the project:
git clone git@gitlab.ifi.uzh.ch:DDIS-Public/chimp-protege-plugin.git
2. Open up in an IDE: Intellij works best.
......
......@@ -5,7 +5,7 @@
<groupId>chimp</groupId>
<artifactId>chimp.plugin</artifactId>
<version>1.1.0</version>
<version>1.2.0</version>
<packaging>bundle</packaging>
<name>Chimp Plugin</name>
......@@ -101,6 +101,7 @@
<excludes>
<!-- Exclude performance tests-->
<exclude>**/*PerformanceTest.java</exclude>
<exclude>**/*Test.java</exclude>
</excludes>
</configuration>
</plugin>
......
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import reasoning.ChimpReasoner;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.Set;
public class AddedInferenceImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(AddedInferenceImpact.class);
private final ChimpReasoner customReasoner;
public AddedInferenceImpact(ChimpReasoner customReasoner) {
super("Added Inference Impact",
"This measure signals the amount of new axioms in the materialization in comparison " +
"to the shared of amount of axioms in the materialization, when comparing " +
"the materialization now and to the one calculated as the reasoner was started. " +
"If this number is one, it means that the amount " +
"of new axioms in the materialization is the same as the amount that has " +
"not changed. Therefore, half of the materialization is new compared to the old version.",
"impact_{add,m} = \\frac{\\Delta_i^+}{m_{i,i+1}}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
Set<OWLAxiom> materializedAdditions = new HashSet<>(newMaterialization);
materializedAdditions.removeAll(oldMaterialization);
Set<OWLAxiom> intersection = new HashSet<>(newMaterialization);
intersection.retainAll(oldMaterialization);
double sizeOfMaterializedAdditions = materializedAdditions.size();
double sizeOfIntersection = intersection.size();
return sizeOfIntersection > 0 ? sizeOfMaterializedAdditions / sizeOfIntersection : 0;
}
}
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import reasoning.ChimpReasoner;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.Set;
public class AddedInferenceNewRatio extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(AddedInferenceNewRatio.class);
private final ChimpReasoner customReasoner;
public AddedInferenceNewRatio(ChimpReasoner customReasoner) {
super("Added Inference New Ratio",
"This measure signals the amount of new axioms in the materialization " +
"in comparison to the size of the new materialization, when " +
"comparing the materialization now and to the one calculated " +
"as the reasoner was started. If this number is one, it means " +
"that the materialization is completely new. The closer this " +
"number is to 0, the less has been added in the new materialization.",
"impact_{add,m_{i+1}} = \\frac{\\Delta_i^+}{m_{i+1}}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
Set<OWLAxiom> MaterializedAdditions = new HashSet<>(newMaterialization);
MaterializedAdditions.removeAll(oldMaterialization);
double sizeOfMaterializedAdditions = MaterializedAdditions.size();
double sizeOfMaterialization = newMaterialization.size();
return sizeOfMaterialization > 0 ? sizeOfMaterializedAdditions / sizeOfMaterialization : 0;
}
}
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reasoning.ChimpReasoner;
import java.util.HashSet;
import java.util.Set;
public class AddedInferenceOldRatio extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(AddedInferenceOldRatio.class);
private final ChimpReasoner reasoner;
public AddedInferenceOldRatio(ChimpReasoner reasoner) {
super("Added Inference Old Ratio",
"This measure signals the amount of new axioms in the materialization " +
"in comparison to the size of the old materialization, when " +
"comparing the materialization now and to the one calculated " +
"as the reasoner was started. If this number is larger than 1, " +
"it means that the amount of new axioms in the materialization " +
"exceed the size of the old materialization.",
"impact_{add,m_i} = \\frac{\\Delta_i^+}{m_i}");
this.reasoner = reasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = reasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = reasoner.getFirstMaterialization();
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
Set<OWLAxiom> materializedAdditions = new HashSet<>(newMaterialization);
materializedAdditions.removeAll(oldMaterialization);
double sizeOfMaterializedAdditions = materializedAdditions.size();
double sizeOfMaterialization = oldMaterialization.size();
return sizeOfMaterialization > 0 ? sizeOfMaterializedAdditions / sizeOfMaterialization : 0;
}
}
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reasoning.ChimpReasoner;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
public class ChangeImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(ChangeImpact.class);
private final ChimpReasoner customReasoner;
public ChangeImpact(ChimpReasoner customReasoner) {
super("Change-based Impact",
"We divide the number of changed inferred axioms by the number of changed ontology axioms. " +
"This metrics gives an indication of how impactful each change is on the inferrence. " +
"A number close and above 1 signals that the number of changes to the ontology have an " +
"equivalent number of changes on the inferrence.",
"\\gamma = \\frac{|\\Delta_i^+| + |\\Delta_i^-|}{|\\delta_i^+| + |\\delta_i^-|}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
Set<OWLAxiom> firstOntology = customReasoner.getFirstOntology();
Set<OWLOntology> activeOntologies = customReasoner.getCurrentOntology();
Set<OWLAxiom> currentOntology = new TreeSet<>();
for (OWLOntology ont : activeOntologies){
currentOntology.addAll(ont.getAxioms());
}
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization,
firstOntology,currentOntology));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization,
Set<OWLAxiom> oldOntology, Set<OWLAxiom> newOntology) {
Set<OWLAxiom> materializedAdditions = new HashSet<>(newMaterialization);
materializedAdditions.removeAll(oldMaterialization);
Set<OWLAxiom> materializedRemovals = new HashSet<>(oldMaterialization);
materializedRemovals.removeAll(newMaterialization);
Set<OWLAxiom> ontologyAdditions = new HashSet<>(newOntology);
ontologyAdditions.removeAll(oldOntology);
Set<OWLAxiom> ontologyRemovals = new HashSet<>(oldOntology);
ontologyRemovals.removeAll(newOntology);
double sizeOfMaterializedChanges = materializedAdditions.size() + materializedRemovals.size();
double sizeOfOntologyChanges = ontologyAdditions.size() + ontologyRemovals.size();
log.info("Ontology changes: " + sizeOfOntologyChanges);
log.info("Mat changes: " + sizeOfMaterializedChanges);
return sizeOfOntologyChanges > 0 ? sizeOfMaterializedChanges / sizeOfOntologyChanges : 0;
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
return 0;
}
}
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import reasoning.ChimpReasoner;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.Set;
public class ChangeMaxImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(ChangeMaxImpact.class);
private final ChimpReasoner customReasoner;
public ChangeMaxImpact(ChimpReasoner customReasoner) {
super("Change Max Impact",
"This measure signals the amount of changed axioms (removed and added) " +
"in the materialization in comparison to the entire materialization. " +
"Here the maximum is taken, meaning that the bigger materialization " +
"is chosen between the old and the new one.",
"impact_{D,a} = \\frac{\\Delta_i^+ + \\Delta_i^-}{max(m_i,m_{i+1})}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
Set<OWLAxiom> materializedAdditions = new HashSet<>(newMaterialization);
materializedAdditions.removeAll(oldMaterialization);
Set<OWLAxiom> materializedRemovals = new HashSet<>(oldMaterialization);
materializedRemovals.removeAll(newMaterialization);
double sizeOfMaterializedChanges = materializedAdditions.size() + materializedRemovals.size();
double maxOfMaterializations = Math.max(oldMaterialization.size(), newMaterialization.size());
return maxOfMaterializations > 0 ? sizeOfMaterializedChanges / maxOfMaterializations : 0;
}
}
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reasoning.ChimpReasoner;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
public class ChangeNoiseImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(ChangeNoiseImpact.class);
private final ChimpReasoner customReasoner;
public ChangeNoiseImpact(ChimpReasoner customReasoner) {
super("Change Noise Impact",
"This measure signals the amount of changed axioms (added and removed) in the materialization " +
"in comparison to the number of changes in the ontology exclusing all changes on the hierarchy.",
"\\gamma_\\sqsubseteq = \\frac{|\\Delta_i^+| - h_\\Delta_i^+ + |\\Delta_i^-| - h_\\Delta_i^-}" +
"{|\\delta_i^+| - h_\\delta_i^+ + |\\delta_i^-| - h_\\delta_i^-}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
Set<OWLAxiom> firstOntology = customReasoner.getFirstOntology();
Set<OWLOntology> activeOntologies = customReasoner.getCurrentOntology();
Set<OWLAxiom> currentOntology = new TreeSet<>();
for (OWLOntology ont : activeOntologies){
currentOntology.addAll(ont.getAxioms());
}
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization,firstOntology,currentOntology));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization,
Set<OWLAxiom> oldOntology, Set<OWLAxiom> newOntology) {
Set<OWLAxiom> materializedAdditions = new HashSet<>(newMaterialization);
materializedAdditions.removeAll(oldMaterialization);
Set<OWLAxiom> materializedNoiseAdditions = materializedAdditions.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() != AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
Set<OWLAxiom> materializedRemovals = new HashSet<>(oldMaterialization);
materializedRemovals.removeAll(newMaterialization);
Set<OWLAxiom> materializedNoiseRemovals = materializedRemovals.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() != AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
Set<OWLAxiom> ontologyAdditions = new HashSet<>(newOntology);
ontologyAdditions.removeAll(oldOntology);
Set<OWLAxiom> ontologyNoiseAdditions = ontologyAdditions.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() != AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
Set<OWLAxiom> ontologyRemovals = new HashSet<>(oldOntology);
ontologyRemovals.removeAll(newOntology);
Set<OWLAxiom> ontologyNoiseRemovals = ontologyRemovals.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() != AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
double sizeOfMaterializedChanges = materializedNoiseAdditions.size() + materializedNoiseRemovals.size();
double sizeOfOntologyChanges = ontologyNoiseAdditions.size() + ontologyNoiseRemovals.size();
return sizeOfOntologyChanges > 0 ? sizeOfMaterializedChanges / sizeOfOntologyChanges : 0;
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
return 0;
}
}
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reasoning.ChimpReasoner;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
public class ChangeSubclassImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(ChangeSubclassImpact.class);
private final ChimpReasoner customReasoner;
public ChangeSubclassImpact(ChimpReasoner customReasoner) {
super("Change Hierarchy Impact",
"This measure signals the amount of changed subclass axioms (added and removed) in the materialization" +
" with h_{\\Delta_i^+} and h_{\\Delta_i^-} " +
"in comparison to the number of changed subclass axioms in the ontology, which are denoted with h_{\\delta_i^+}" +
"and h_{\\delta_i^-}." +
"The changes to the materialization do not include the changes to the ontology, therefore," +
" \\Delta_i \backslash \\delta_i = {}",
"\\gamma = \\frac{h_{\\Delta_i^+} + h_{\\Delta_i^-}}{h_{\\delta_i^+} + h_{\\delta_i^-}}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
Set<OWLAxiom> firstOntology = customReasoner.getFirstOntology();
Set<OWLOntology> activeOntologies = customReasoner.getCurrentOntology();
Set<OWLAxiom> currentOntology = new TreeSet<>();
for (OWLOntology ont : activeOntologies){
currentOntology.addAll(ont.getAxioms());
}
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization,firstOntology,currentOntology));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization,
Set<OWLAxiom> oldOntology, Set<OWLAxiom> newOntology) {
Set<OWLAxiom> materializedAdditions = new HashSet<>(newMaterialization);
materializedAdditions.removeAll(oldMaterialization);
Set<OWLAxiom> materializedHierarchyAdditions = materializedAdditions.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() == AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
Set<OWLAxiom> materializedRemovals = new HashSet<>(oldMaterialization);
materializedRemovals.removeAll(newMaterialization);
Set<OWLAxiom> materializedHierarchyRemovals = materializedRemovals.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() == AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
Set<OWLAxiom> ontologyAdditions = new HashSet<>(newOntology);
ontologyAdditions.removeAll(oldOntology);
Set<OWLAxiom> ontologyHierarchyAdditions = ontologyAdditions.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() == AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
Set<OWLAxiom> ontologyRemovals = new HashSet<>(oldOntology);
ontologyRemovals.removeAll(newOntology);
Set<OWLAxiom> ontologyHierarchyRemovals = ontologyRemovals.stream()
.filter(owlAxiom -> owlAxiom.getAxiomType() == AxiomType.SUBCLASS_OF)
.collect(Collectors.toSet());
double sizeOfMaterializedChanges = materializedHierarchyAdditions.size() + materializedHierarchyRemovals.size();
double sizeOfOntologyChanges = ontologyHierarchyAdditions.size() + ontologyHierarchyRemovals.size();
return sizeOfOntologyChanges > 0 ? sizeOfMaterializedChanges / sizeOfOntologyChanges : 0;
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
return 0;
}
}
......@@ -11,7 +11,9 @@ import reasoning.ChimpReasoner;
import java.util.Set;
public class GraphDistanceImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(GraphDistanceImpact.class);
private final ChimpReasoner chimpReasoner;
private final OWLReasoner underlyingReasoner;
private Double I_M_old;
......
package metrics.impact;
import metrics.ImpactMetric;
import metrics.Metric;
import reasoning.ChimpReasoner;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
public class HierarchyImpact extends Metric implements ImpactMetric {
private final Logger log = LoggerFactory.getLogger(HierarchyImpact.class);
private final ChimpReasoner customReasoner;
public HierarchyImpact(ChimpReasoner customReasoner) {
super("Hierarchy Impact",
"This measure signals the amount of changed SubclassOf axioms (added and removed) in the materialization " +
"in comparison to the unchanged part of the materialization, when " +
"comparing the materialization now and to the one calculated " +
"as the reasoner was started. If this number is larger than 1, " +
"it means that the amount of changes to the hierarchy in the materialization " +
"exceed the amount of shared axioms between the two materializations.",
"impact_{d_h,m_{i,i+1}} = \\frac{h_{\\Delta_i}}{m_{i,i+1}}");
this.customReasoner = customReasoner;
}
@Override
public void calculateMetric() {
Set<OWLAxiom> currentMaterialization = customReasoner.getCurrentMaterialization();
Set<OWLAxiom> firstMaterialization = customReasoner.getFirstMaterialization();
if (currentMaterialization != null) {
updateMetric(calculateMetricWithMaterializations(firstMaterialization, currentMaterialization));
} else {
log.debug(getDisplayName() + " could not be calculated because there were no previous materializations");
}
}
@Override
public double calculateMetricWithMaterializations(Set<OWLAxiom> oldMaterialization, Set<OWLAxiom> newMaterialization) {
Set<OWLAxiom> materializedAdditions = new