From 86e3bd22fd599b4730524f0c5dd09e5c0aaf04a2 Mon Sep 17 00:00:00 2001 From: goodb Date: Fri, 27 Sep 2019 14:46:55 -0700 Subject: [PATCH] implements pattern for retrieving canonical identifiers in gpad output This work is in reference to https://github.com/geneontology/pathways2GO/issues/71 This allows entity ontologies aside from neo to be used to construct go-cams while maintaining GPAD outputs that adhere strictly to canonical terminologies such as UniProt for human genes. It works by adding the annotation property http://geneontology.org/lego/canonical_record to link new terms (e.g. reactome entities) to canonical terms (e.g. corresponding uniprots). When these annotations are present, the GPAD SPARQL export process begins by converting the model to one with all of the external types replaced by canonical types. The rest of the gpad export process is then unchanged. --- .../minerva/cli/MinervaCommandRunner.java | 2 +- .../minerva/CoreMolecularModelManager.java | 274 +++++++++++------- .../UndoAwareMolecularModelManager.java | 2 + .../minerva/server/StartUpTool.java | 77 +++-- .../server/handler/OperationsImpl.java | 2 +- 5 files changed, 214 insertions(+), 143 deletions(-) diff --git a/minerva-cli/src/main/java/org/geneontology/minerva/cli/MinervaCommandRunner.java b/minerva-cli/src/main/java/org/geneontology/minerva/cli/MinervaCommandRunner.java index 8eea764c..639ddccf 100644 --- a/minerva-cli/src/main/java/org/geneontology/minerva/cli/MinervaCommandRunner.java +++ b/minerva-cli/src/main/java/org/geneontology/minerva/cli/MinervaCommandRunner.java @@ -419,7 +419,7 @@ else if (opts.nextEq("--ontology")) { BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(ontology, curieHandler, modelIdPrefix, inputDB, null); for (IRI modelIRI : m3.getAvailableModelIds()) { try { - String gpad = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getDoNotAnnotateSubset()).exportGPAD(m3.createInferredModel(modelIRI)); + String gpad = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getDoNotAnnotateSubset()).exportGPAD(m3.createCanonicalInferredModel(modelIRI)); String fileName = StringUtils.replaceOnce(modelIRI.toString(), modelIdPrefix, "") + ".gpad"; Writer writer = new OutputStreamWriter(new FileOutputStream(Paths.get(gpadOutputFolder, fileName).toFile()), StandardCharsets.UTF_8); writer.write(gpad); diff --git a/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java b/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java index 3d2770fa..fa91cd19 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java @@ -1,6 +1,7 @@ package org.geneontology.minerva; import java.io.ByteArrayOutputStream; +import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -70,12 +71,14 @@ import org.semanticweb.owlapi.model.RemoveOntologyAnnotation; import org.semanticweb.owlapi.model.SetOntologyID; import org.semanticweb.owlapi.model.parameters.Imports; +import org.semanticweb.owlapi.model.parameters.OntologyCopy; import org.semanticweb.owlapi.oboformat.OBOFormatOWLAPIParserFactory; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory; import org.semanticweb.owlapi.rio.RioMemoryTripleSource; import org.semanticweb.owlapi.rio.RioParserImpl; +import org.semanticweb.owlapi.search.EntitySearcher; import org.semanticweb.owlapi.util.PriorityCollection; import com.google.common.base.Optional; @@ -95,14 +98,14 @@ * @param object for holding meta data associated with each operation */ public abstract class CoreMolecularModelManager { - + private static Logger LOG = Logger.getLogger(CoreMolecularModelManager.class); // axiom has evidence RO:0002612 private static final IRI HAS_EVIDENCE_IRI = IRI.create("http://purl.obolibrary.org/obo/RO_0002612"); // legacy private static final IRI HAS_EVIDENCE_IRI_OLD = AnnotationShorthand.evidence.getAnnotationProperty(); - + private static final OWLAnnotationProperty HAS_SHORTHAND = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#shorthand")); private static final OWLAnnotationProperty IN_SUBSET = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#inSubset")); private static final Set DO_NOT_ANNOTATE_SUBSETS = new HashSet<>(); @@ -112,25 +115,25 @@ public abstract class CoreMolecularModelManager { } final OWLOntology tbox; -// final OWLReasonerFactory rf; + // final OWLReasonerFactory rf; final OWLReasoner tbox_reasoner; private final IRI tboxIRI; final Map modelMap = new HashMap(); Set additionalImports; - + private final RuleEngine ruleEngine; private final Map legacyRelationIndex = new HashMap(); private final Map tboxLabelIndex = new HashMap(); private final Map tboxShorthandIndex = new HashMap(); private final Set doNotAnnotateSubset = new HashSet<>(); - - + + /** * Use start up time to create a unique prefix for id generation */ static String uniqueTop = Long.toHexString(Math.abs((System.currentTimeMillis()/1000))); static final AtomicLong instanceCounter = new AtomicLong(0L); - + /** * Generate a new id from the unique server prefix and a global counter * @@ -141,7 +144,7 @@ private static String localUnique(){ String unique = uniqueTop + String.format("%08d", counterValue); return unique; } - + /** * Check that the given string looks similar to a local unique id * @@ -162,7 +165,7 @@ static boolean isLocalUnique(String s) { } return result; } - + private static boolean isHex(char c) { // check that char is a digit or a-e boolean result = false; @@ -174,7 +177,7 @@ else if (c == 'a' || c == 'b' || c == 'c' || c == 'd' || c == 'e' || c == 'f') { } return result; } - + /** * Generate an id and prepend the given prefixes. * @@ -197,7 +200,7 @@ static IRI generateId(CharSequence...prefixes) { * @throws OWLOntologyCreationException */ public CoreMolecularModelManager(OWLOntology tbox) throws OWLOntologyCreationException { - super(); + super(); this.tbox = tbox; tboxIRI = getTboxIRI(tbox); this.ruleEngine = initializeRuleEngine(); @@ -234,7 +237,7 @@ private static synchronized Set removeOBOParserFactories(OWLOn } return copied; } - + private static synchronized void resetOBOParserFactories(OWLOntologyManager m, Set factories) { m.setOntologyParsers(factories); } @@ -271,42 +274,36 @@ protected void init() throws OWLOntologyCreationException { public OWLOntology getOntology() { return tbox; } - + public Map getLegacyRelationShorthandIndex() { return Collections.unmodifiableMap(this.legacyRelationIndex); } - + public Map getTboxLabelIndex() { return Collections.unmodifiableMap(this.tboxLabelIndex); } - + public Map getTboxShorthandIndex() { return Collections.unmodifiableMap(this.tboxShorthandIndex); } - + public Set getDoNotAnnotateSubset() { return Collections.unmodifiableSet(this.doNotAnnotateSubset); } - + public RuleEngine getRuleEngine() { return ruleEngine; } - + private RuleEngine initializeRuleEngine() { Set rules = new HashSet<>(); rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.translate(getOntology(), Imports.INCLUDED, true, true, true, true)).asJava()); rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.indirectRules(getOntology())).asJava()); return new RuleEngine(Bridge.rulesFromJena(JavaConverters.asScalaSetConverter(rules).asScala()), true); } - - /** - * Return Arachne working memory representing LEGO model combined with inference rules. - * This model will not remain synchronized with changes to data. - * @param modelId - * @return Jena model - */ - public WorkingMemory createInferredModel(IRI modelId) { - Set statements = JavaConverters.setAsJavaSetConverter(SesameJena.ontologyAsTriples(getModelAbox(modelId))).asJava(); + + public WorkingMemory createInferredModel(OWLOntology abox, IRI modelId) { + Set statements = JavaConverters.setAsJavaSetConverter(SesameJena.ontologyAsTriples(abox)).asJava(); Set triples = statements.stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); try { // Using model's ontology IRI so that a spurious different ontology declaration triple isn't added @@ -317,8 +314,61 @@ public WorkingMemory createInferredModel(IRI modelId) { LOG.error("Couldn't add rbox statements to data model.", e); } return getRuleEngine().processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + } - + + /** + * Return Arachne working memory representing LEGO model combined with inference rules. + * This model will not remain synchronized with changes to data. + * @param modelId + * @return Jena model + */ + public WorkingMemory createInferredModel(IRI modelId) { + return createInferredModel(getModelAbox(modelId), modelId); + } + + public WorkingMemory createCanonicalInferredModel(IRI modelId) { + //swap out any non-canonical types + OWLOntology source_abox = getModelAbox(modelId); + OWLOntologyManager aman = OWLManager.createOWLOntologyManager(); + OWLDataFactory df = aman.getOWLDataFactory(); + OWLAnnotationProperty canonical_record = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/canonical_record")); + OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); + try { + OWLOntology abox = aman.copyOntology(source_abox, OntologyCopy.DEEP); + OWLReasoner abox_reasoner = reasonerFactory.createReasoner(abox); + //convert to canonical wherever possible + abox.getIndividualsInSignature().forEach(i->{ + Set types = abox_reasoner.getTypes(i, true).getFlattened(); + for(OWLClass type : types) { + Collection canons = EntitySearcher.getAnnotationObjects(type, tbox, canonical_record); + //adding multiple types to an instance of a set object is + //probably not kosher.. but seems to work for now. + //more correct to create new instances for each + if(canons!=null&&canons.size()>0) { + for(OWLAnnotation canon : canons) { + if(canon.getValue().asIRI().isPresent()) { + OWLClass canonical = df.getOWLClass(canon.getValue().asIRI().get()); + //direct swap + //remove the old one + OWLClassAssertionAxiom original = df.getOWLClassAssertionAxiom(type, i); + aman.removeAxiom(abox, original); + //add the new one + OWLClassAssertionAxiom canonical_type = df.getOWLClassAssertionAxiom(canonical, i); + aman.addAxiom(abox, canonical_type); + } + } + } + } + }); + return createInferredModel(abox, modelId); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + return createInferredModel(source_abox, modelId); + } + } + private void initializeLegacyRelationIndex() { synchronized(legacyRelationIndex) { OWLAnnotationProperty rdfsLabel = OWLManager.getOWLDataFactory().getRDFSLabel(); @@ -337,7 +387,7 @@ private void initializeLegacyRelationIndex() { } } } - + private void initializeTboxLabelIndex() { synchronized(tboxLabelIndex) { OWLAnnotationProperty rdfsLabel = OWLManager.getOWLDataFactory().getRDFSLabel(); @@ -350,7 +400,7 @@ private void initializeTboxLabelIndex() { } } } - + private void initializeTboxShorthandIndex() { synchronized(tboxShorthandIndex) { for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { @@ -362,7 +412,7 @@ private void initializeTboxShorthandIndex() { } } } - + private void initializeDoNotAnnotateSubset() { synchronized(doNotAnnotateSubset) { for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { @@ -385,7 +435,7 @@ public void addImports(Iterable imports) { } } } - + public Collection getImports() { Set allImports = new HashSet(); allImports.add(tboxIRI); @@ -403,15 +453,15 @@ public Set getIndividuals(IRI modelId) { return mod.getAboxOntology().getIndividualsInSignature(); } - -// /** -// * @param mod -// * @param q -// * @return all individuals in the model that satisfy q -// */ -// public Set getIndividualsByQuery(ModelContainer mod, OWLClassExpression q) { -// return mod.getReasoner().getInstances(q, false).getFlattened(); -// } + + // /** + // * @param mod + // * @param q + // * @return all individuals in the model that satisfy q + // */ + // public Set getIndividualsByQuery(ModelContainer mod, OWLClassExpression q) { + // return mod.getReasoner().getInstances(q, false).getFlattened(); + // } /** * @param model @@ -423,29 +473,29 @@ public OWLNamedIndividual createIndividual(ModelContainer model, OWLClassExpress OWLNamedIndividual individual = createIndividual(model, ce, null, metadata); return individual; } - + OWLNamedIndividual createIndividual(ModelContainer model, OWLClassExpression ce, Set annotations, METADATA metadata) { Pair> pair = createIndividual(model.getModelId(), model.getAboxOntology(), ce, annotations); addAxioms(model, pair.getRight(), metadata); return pair.getLeft(); } - + OWLNamedIndividual createIndividualWithIRI(ModelContainer model, IRI individualIRI, Set annotations, METADATA metadata) { Pair> pair = createIndividualInternal(individualIRI, model.getAboxOntology(), null, annotations); addAxioms(model, pair.getRight(), metadata); return pair.getLeft(); } - + public static Pair> createIndividual(IRI modelId, OWLOntology abox, OWLClassExpression ce, Set annotations) { IRI iri = generateId(modelId, "/"); return createIndividualInternal(iri, abox, ce, annotations); } - + private static Pair> createIndividualInternal(IRI iri, OWLOntology abox, OWLClassExpression ce, Set annotations) { LOG.info("Generating individual for IRI: "+iri); OWLDataFactory f = abox.getOWLOntologyManager().getOWLDataFactory(); OWLNamedIndividual i = f.getOWLNamedIndividual(iri); - + // create axioms Set axioms = new HashSet(); // declaration @@ -456,23 +506,23 @@ private static Pair> createIndividualInternal( axioms.add(f.getOWLAnnotationAssertionAxiom(iri, annotation)); } } - + if (ce != null) { OWLClassAssertionAxiom typeAxiom = createType(f, i, ce); if (typeAxiom != null) { axioms.add(typeAxiom); } } - + return Pair.of(i, axioms); } - + public static class DeleteInformation { public final Set usedIRIs = new HashSet(); public final Set updated = new HashSet(); public final Set touched = new HashSet(); } - + /** * Deletes an individual and return all IRIs used as an annotation value. * Also tries to delete all annotations (OWLObjectPropertyAssertionAxiom @@ -487,19 +537,19 @@ public static class DeleteInformation { public DeleteInformation deleteIndividual(ModelContainer model, OWLNamedIndividual i, METADATA metadata) { Set toRemoveAxioms = new HashSet(); final DeleteInformation deleteInformation = new DeleteInformation(); - + final OWLOntology ont = model.getAboxOntology(); final OWLDataFactory f = model.getOWLDataFactory(); - + // Declaration axiom toRemoveAxioms.add(model.getOWLDataFactory().getOWLDeclarationAxiom(i)); - + // Logic axiom for (OWLAxiom ax : ont.getAxioms(i, Imports.EXCLUDED)) { extractEvidenceIRIValues(ax.getAnnotations(), deleteInformation.usedIRIs); toRemoveAxioms.add(ax); } - + // OWLObjectPropertyAssertionAxiom Set allAssertions = ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION); final IRI iIRI = i.getIRI(); @@ -536,7 +586,7 @@ public DeleteInformation deleteIndividual(ModelContainer model, OWLNamedIndividu extractEvidenceIRIValues(axiom.getAnnotation(), deleteInformation.usedIRIs); toRemoveAxioms.add(axiom); } - + // search for all annotations which use individual IRI as value Set axioms = ont.getAxioms(AxiomType.ANNOTATION_ASSERTION); for (OWLAnnotationAssertionAxiom ax : axioms) { @@ -545,12 +595,12 @@ public DeleteInformation deleteIndividual(ModelContainer model, OWLNamedIndividu toRemoveAxioms.add(ax); OWLAnnotationSubject subject = ax.getSubject(); subject.accept(new OWLAnnotationSubjectVisitor() { - + @Override public void visit(OWLAnonymousIndividual individual) { // do nothing } - + @Override public void visit(IRI iri) { // check if they subject is a declared named individual @@ -562,15 +612,15 @@ public void visit(IRI iri) { } } } - + removeAxioms(model, toRemoveAxioms, metadata); if (deleteInformation.updated.isEmpty() == false) { addAxioms(model, deleteInformation.updated, metadata); } - + return deleteInformation; } - + public static Set extractEvidenceIRIValues(Set annotations) { if (annotations == null || annotations.isEmpty()) { return Collections.emptySet(); @@ -579,7 +629,7 @@ public static Set extractEvidenceIRIValues(Set annotations) extractEvidenceIRIValues(annotations, iriSet); return iriSet; } - + private static void extractEvidenceIRIValues(Set annotations, final Set iriSet) { if (annotations != null) { for (OWLAnnotation annotation : annotations) { @@ -587,23 +637,23 @@ private static void extractEvidenceIRIValues(Set annotations, fin } } } - + private static void extractEvidenceIRIValues(OWLAnnotation annotation, final Set iriSet) { if (annotation != null) { OWLAnnotationProperty property = annotation.getProperty(); if (HAS_EVIDENCE_IRI.equals(property.getIRI()) || HAS_EVIDENCE_IRI_OLD.equals(property.getIRI())){ annotation.getValue().accept(new OWLAnnotationValueVisitor() { - + @Override public void visit(OWLLiteral literal) { // ignore } - + @Override public void visit(OWLAnonymousIndividual individual) { // ignore } - + @Override public void visit(IRI iri) { iriSet.add(iri); @@ -612,11 +662,11 @@ public void visit(IRI iri) { } } } - + public void addAnnotations(ModelContainer model, OWLNamedIndividual i, Collection annotations, METADATA metadata) { addAnnotations(model, i.getIRI(), annotations, metadata); } - + public void addAnnotations(ModelContainer model, IRI subject, Collection annotations, METADATA metadata) { Set axioms = new HashSet(); OWLDataFactory f = model.getOWLDataFactory(); @@ -625,7 +675,7 @@ public void addAnnotations(ModelContainer model, IRI subject, Collection removeAxioms = new HashSet(); OWLDataFactory f = model.getOWLDataFactory(); @@ -639,7 +689,7 @@ public void updateAnnotation(ModelContainer model, IRI subject, OWLAnnotation up removeAxioms(model, removeAxioms, metadata); addAxiom(model, f.getOWLAnnotationAssertionAxiom(subject, update), metadata); } - + public void addModelAnnotations(ModelContainer model, Collection annotations, METADATA metadata) { OWLOntology aBox = model.getAboxOntology(); List changes = new ArrayList(); @@ -648,7 +698,7 @@ public void addModelAnnotations(ModelContainer model, Collection } applyChanges(model, changes, metadata); } - + public void updateAnnotation(ModelContainer model, OWLAnnotation update, METADATA metadata) { OWLOntology aBox = model.getAboxOntology(); List changes = new ArrayList(); @@ -666,7 +716,7 @@ public void updateAnnotation(ModelContainer model, OWLAnnotation update, METADAT public void removeAnnotations(ModelContainer model, OWLNamedIndividual i, Collection annotations, METADATA metadata) { removeAnnotations(model, i.getIRI(), annotations, metadata); } - + void removeAnnotations(ModelContainer model, IRI subject, Collection annotations, METADATA metadata) { OWLOntology ont = model.getAboxOntology(); Set toRemove = new HashSet(); @@ -688,14 +738,14 @@ public void removeAnnotations(ModelContainer model, Collection an } applyChanges(model, changes, metadata); } - + public void addDataProperty(ModelContainer model, OWLNamedIndividual i, OWLDataProperty prop, OWLLiteral literal, METADATA metadata) { OWLAxiom axiom = model.getOWLDataFactory().getOWLDataPropertyAssertionAxiom(prop, i, literal); addAxiom(model, axiom, metadata); } - + public void removeDataProperty(ModelContainer model, OWLNamedIndividual i, OWLDataProperty prop, OWLLiteral literal, METADATA metadata) { @@ -707,12 +757,12 @@ public void removeDataProperty(ModelContainer model, break; } } - + if (toRemove != null) { removeAxiom(model, toRemove, metadata); } } - + /** * Fetches a model by its Id * @@ -732,7 +782,7 @@ public ModelContainer getModel(IRI id) { return modelMap.get(id); } } - + /** * Retrieve the abox ontology. May skip loading the imports. * This method is mostly intended to read metadata from a model. @@ -753,7 +803,7 @@ public OWLOntology getModelAbox(IRI id) { } return abox; } - + public boolean isModelModified(IRI modelId) { ModelContainer model = modelMap.get(modelId); if (model != null) { @@ -763,14 +813,14 @@ public boolean isModelModified(IRI modelId) { // non in-memory models are considered not modified. return false; } - + /** * @param modelId * @return ontology * @throws OWLOntologyCreationException */ protected abstract OWLOntology loadModelABox(IRI modelId) throws OWLOntologyCreationException; - + /** * @param id */ @@ -779,14 +829,14 @@ public void unlinkModel(IRI id) { model.dispose(); modelMap.remove(id); } - + /** * @return ids for all loaded models */ public Set getModelIds() { return modelMap.keySet(); } - + /** * internal method to cleanup this instance */ @@ -809,7 +859,7 @@ public void dispose() { public String exportModel(ModelContainer model, OWLDocumentFormat ontologyFormat) throws OWLOntologyStorageException { final OWLOntology aBox = model.getAboxOntology(); final OWLOntologyManager manager = aBox.getOWLOntologyManager(); - + // make sure the exported ontology has an ontologyId and that it maps to the modelId final IRI expectedABoxIRI = model.getModelId(); Optional currentABoxIRI = aBox.getOntologyID().getOntologyIRI(); @@ -831,12 +881,12 @@ public String exportModel(ModelContainer model, OWLDocumentFormat ontologyFormat else { manager.saveOntology(aBox, outputStream); } - + // extract the string from the buffer String modelString = outputStream.toString(); return modelString; } - + /** * Try to load (or replace) a model with the given ontology. It is expected * that the content is an A-Box ontology, which imports the T-BOX. Also the @@ -873,7 +923,7 @@ public ModelContainer importModel(String modelData) throws OWLOntologyCreationEx finally { resetOBOParserFactories(manager, originalFactories); } - + // try to extract modelId IRI modelId = null; Optional ontologyIRI = modelOntology.getOntologyID().getOntologyIRI(); @@ -888,16 +938,16 @@ public ModelContainer importModel(String modelData) throws OWLOntologyCreationEx if (existingModel != null) { unlinkModel(modelId); } - + // add to internal model ModelContainer newModel = addModel(modelId, modelOntology); - + // update imports updateImports(newModel); - + return newModel; } - + protected abstract void loadModel(IRI modelId, boolean isOverride) throws OWLOntologyCreationException; ModelContainer addModel(IRI modelId, OWLOntology abox) throws OWLOntologyCreationException { @@ -918,7 +968,7 @@ public void addType(IRI modelId, OWLNamedIndividual i, OWLClass c, METADATA meta ModelContainer model = getModel(modelId); addType(model, i, c, metadata); } - + /** * Adds ClassAssertion(c,i) to specified model * @@ -964,7 +1014,7 @@ public void addType(IRI modelId, ModelContainer model = getModel(modelId); addType(model, i, p, filler, metadata); } - + /** * Adds a ClassAssertion, where the class expression instantiated is an * ObjectSomeValuesFrom expression @@ -1008,9 +1058,9 @@ public void removeType(ModelContainer model, OWLIndividual i, removeAxiom(model, ax, metadata); } } - + } - + void removeType(ModelContainer model, OWLIndividual i, OWLObjectPropertyExpression p, @@ -1057,7 +1107,7 @@ public static OWLObjectPropertyAssertionAxiom createFact(OWLDataFactory f, public Set removeFact(ModelContainer model, OWLObjectPropertyExpression p, OWLIndividual i, OWLIndividual j, METADATA metadata) { OWLDataFactory f = model.getOWLDataFactory(); - + OWLOntology ont = model.getAboxOntology(); OWLAxiom toRemove = null; Set iriSet = new HashSet(); @@ -1076,7 +1126,7 @@ public Set removeFact(ModelContainer model, OWLObjectPropertyExpression p, removeAxiom(model, toRemove, metadata); return iriSet; } - + public void addAnnotations(ModelContainer model, OWLObjectPropertyExpression p, OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, METADATA metadata) { @@ -1091,7 +1141,7 @@ public void addAnnotations(ModelContainer model, OWLObjectPropertyExpression p, } addAnnotations(model, toModify, annotations, metadata); } - + void addAnnotations(ModelContainer model, OWLObjectPropertyAssertionAxiom toModify, Set annotations, METADATA metadata) { if (toModify != null) { @@ -1100,7 +1150,7 @@ void addAnnotations(ModelContainer model, OWLObjectPropertyAssertionAxiom toModi modifyAnnotations(toModify, combindedAnnotations, model, metadata); } } - + public void updateAnnotation(ModelContainer model, OWLObjectPropertyExpression p, OWLNamedIndividual i, OWLNamedIndividual j, OWLAnnotation update, METADATA metadata) { @@ -1115,7 +1165,7 @@ public void updateAnnotation(ModelContainer model, OWLObjectPropertyExpression p } updateAnnotation(model, toModify, update, metadata); } - + OWLObjectPropertyAssertionAxiom updateAnnotation(ModelContainer model, OWLObjectPropertyAssertionAxiom toModify, OWLAnnotation update, METADATA metadata) { @@ -1133,7 +1183,7 @@ OWLObjectPropertyAssertionAxiom updateAnnotation(ModelContainer model, } return newAxiom; } - + public OWLObjectPropertyAssertionAxiom removeAnnotations(ModelContainer model, OWLObjectPropertyExpression p, OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, METADATA metadata) { OWLOntology ont = model.getAboxOntology(); @@ -1153,7 +1203,7 @@ public OWLObjectPropertyAssertionAxiom removeAnnotations(ModelContainer model, O } return newAxiom; } - + private OWLObjectPropertyAssertionAxiom modifyAnnotations(OWLObjectPropertyAssertionAxiom axiom, Set replacement, ModelContainer model, METADATA metadata) { @@ -1167,7 +1217,7 @@ private OWLObjectPropertyAssertionAxiom modifyAnnotations(OWLObjectPropertyAsser applyChanges(model, changes, metadata); return newAxiom; } - + public void addAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { OWLOntology ont = model.getAboxOntology(); List changes = Collections.singletonList(new AddAxiom(ont, axiom)); @@ -1178,7 +1228,7 @@ public void addAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); } } - + void addAxioms(ModelContainer model, Set axioms, METADATA metadata) { OWLOntology ont = model.getAboxOntology(); List changes = new ArrayList(axioms.size()); @@ -1192,7 +1242,7 @@ void addAxioms(ModelContainer model, Set axioms, METADATA me applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); } } - + void removeAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { OWLOntology ont = model.getAboxOntology(); List changes = Collections.singletonList(new RemoveAxiom(ont, axiom)); @@ -1208,7 +1258,7 @@ void removeAxioms(IRI modelId, Set axioms, METADATA metadata) { ModelContainer model = getModel(modelId); removeAxioms(model, axioms, metadata); } - + void removeAxioms(ModelContainer model, Set axioms, METADATA metadata) { OWLOntology ont = model.getAboxOntology(); List changes = new ArrayList(axioms.size()); @@ -1232,13 +1282,13 @@ private void applyChanges(ModelContainer model, List changes, applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); } } - + private void applyChanges(ModelContainer model, OWLOntologyManager m, List changes, METADATA metadata) { List appliedChanges = model.applyChanges(changes); addToHistory(model, appliedChanges, metadata); } - + /** * Hook for implementing an undo and redo. * @@ -1300,7 +1350,7 @@ public IRI getDocumentIRI(IRI ontologyIRI) { resetOBOParserFactories(manager, originalFactories); } } - + private static OWLOntology loadOWLOntologyDocumentSource(final OWLOntologyDocumentSource source, final OWLOntologyManager manager) throws OWLOntologyCreationException { final OWLOntology ontology; if (source instanceof RioMemoryTripleSource) { @@ -1316,7 +1366,7 @@ private static OWLOntology loadOWLOntologyDocumentSource(final OWLOntologyDocume } return ontology; } - + /** * This method will check the given model and update the import declarations. * It will add missing IRIs and remove obsolete ones. @@ -1328,10 +1378,10 @@ private static OWLOntology loadOWLOntologyDocumentSource(final OWLOntologyDocume public void updateImports(ModelContainer model) { updateImports(model.getAboxOntology(), tboxIRI, additionalImports); } - + static void updateImports(final OWLOntology aboxOntology, IRI tboxIRI, Set additionalImports) { List changes = new ArrayList(); - + Set missingImports = new HashSet(); missingImports.add(tboxIRI); missingImports.addAll(additionalImports); @@ -1348,7 +1398,7 @@ static void updateImports(final OWLOntology aboxOntology, IRI tboxIRI, Set changes.add(new AddImport(aboxOntology, decl)); } } - + if (!changes.isEmpty()) { m.applyChanges(changes); } @@ -1361,5 +1411,5 @@ public OWLOntology getTbox() { public OWLReasoner getTbox_reasoner() { return tbox_reasoner; } - + } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java b/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java index 2462acca..051e0f1e 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java @@ -13,6 +13,7 @@ import org.geneontology.minerva.UndoAwareMolecularModelManager.UndoMetadata; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.util.ReverseChangeGenerator; +import org.geneontology.rules.engine.WorkingMemory; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChange; @@ -314,5 +315,6 @@ public void clearUndoHistory(IRI modelId) { protected void applyChanges(ModelContainer model, List changes) { model.applyChanges(changes); } + } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java b/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java index 43394eaa..0b81816a 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java @@ -25,6 +25,8 @@ import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; import org.semanticweb.owlapi.model.*; +import org.semanticweb.owlapi.search.EntitySearcher; +import org.semanticweb.owlapi.search.Searcher; import owltools.cli.Opts; import owltools.gaf.eco.EcoMapperFactory; @@ -35,6 +37,7 @@ import java.io.File; import java.net.URL; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Map; @@ -43,7 +46,7 @@ public class StartUpTool { - + private static final Logger LOGGER = Logger.getLogger(StartUpTool.class); public static class MinervaStartUpConfig { @@ -54,9 +57,9 @@ public static class MinervaStartUpConfig { public String exportFolder = null; public String modelIdPrefix = "http://model.geneontology.org/"; public String modelIdcurie = "gomodel"; - + public String defaultModelState = "development"; - + public String golrUrl = null; public String monarchUrl = null; public String golrSeedUrl = null; @@ -67,7 +70,7 @@ public static class MinervaStartUpConfig { public boolean checkLiteralIds = true; public String reasonerOpt = null; - + public CurieHandler curieHandler; // The subset of highly relevant relations is configured using super property @@ -79,25 +82,25 @@ public static class MinervaStartUpConfig { public int port = 6800; public String contextPrefix = null; // root context by default public String contextString = null; - + // increase default size to deal with large HTTP GET requests public int requestHeaderSize = 64*1024; public int requestBufferSize = 128*1024; - + public boolean useRequestLogging = false; - + public boolean useGolrUrlLogging = false; - + public String prefixesFile = null; public int sparqlEndpointTimeout = 100; - + public String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; public String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; public ShexValidator shex; - + } - + public static void main(String[] args) throws Exception { Opts opts = new Opts(args); MinervaStartUpConfig conf = new MinervaStartUpConfig(); @@ -109,7 +112,7 @@ public static void main(String[] args) throws Exception { File shex_map_file = new File("./target/go-cam-shapes.shapeMap"); org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); conf.shex = new ShexValidator(shex_schema_file, shex_map_file); - + while (opts.hasArgs()) { if (opts.nextEq("-g|--graph")) { conf.ontology = opts.nextOpt(); @@ -225,7 +228,7 @@ else if (opts.nextEq("--sparql-endpoint-timeout")) { if (conf.contextPrefix != null) { conf.contextString = "/"+conf.contextPrefix; } - + // set curie handler final CurieMappings mappings; if (conf.prefixesFile != null) { @@ -251,7 +254,7 @@ else if (opts.nextEq("--sparql-endpoint-timeout")) { " use url logging: "+conf.useGolrUrlLogging); conf.lookupService = new CachingExternalLookupService(conf.lookupService, conf.golrCacheSize, conf.golrCacheDuration, conf.golrCacheDurationUnit); } - + Server server = startUp(conf); try { server.join(); @@ -261,7 +264,7 @@ else if (opts.nextEq("--sparql-endpoint-timeout")) { server.destroy(); } } - + /** * Try to resolve the given string into an {@link OWLObjectProperty}. * @@ -288,7 +291,7 @@ public static OWLObjectProperty getRelation(String rel, OWLGraphWrapper g) { } return p; } - + /** * Find all asserted direct sub properties of the parent property. * @@ -321,8 +324,24 @@ public static Server startUp(final MinervaStartUpConfig conf) pw.addIRIMapper(new CatalogXmlIRIMapper(conf.catalog)); } OWLGraphWrapper graph = pw.parseToOWLGraph(conf.ontology); - - // try to get important relations + //In some cases, go-lego is not pre-merged and parseToOWLgraph keeps the imports separate + //most OWL API methods have an include-imports option that makes this work + //but EntitySearcher methods that deal with annotation assertions do not. + //The current pattern for mapping external ontologies to local ones (e.g. reactome to uniprot) + //involves the use of an annotation property.. To get that to work, + //need to pre-merge the ontologies. + OWLOntology full_tbox = graph.getSourceOntology(); + Set import_set = graph.getAllOntologies(); + if(import_set!=null) { + for(OWLOntology ont : import_set) { + if(!ont.equals(full_tbox)) { + full_tbox.getOWLOntologyManager().addAxioms(full_tbox, ont.getAxioms()); + full_tbox.getOWLOntologyManager().removeOntology(ont); + } + } + } + graph.setSourceOntology(full_tbox); + if (conf.importantRelationParent != null) { // try to find parent property OWLObjectProperty parentProperty = getRelation(conf.importantRelationParent, graph); @@ -339,8 +358,8 @@ public static Server startUp(final MinervaStartUpConfig conf) } // set folder to models - LOGGER.info("Model path: "+conf.journalFile); - + LOGGER.info("Model path: "+conf.journalFile); + // create model manager LOGGER.info("Start initializing Minerva"); UndoAwareMolecularModelManager models = new UndoAwareMolecularModelManager(graph.getSourceOntology(), @@ -353,7 +372,7 @@ public static Server startUp(final MinervaStartUpConfig conf) Server server = startUp(models, conf); return server; } - + public static InferenceProviderCreator createInferenceProviderCreator(String reasonerOpt, UndoAwareMolecularModelManager models, ShexValidator shex) { switch(reasonerOpt) { case ("slme-hermit"): return CachingInferenceProviderCreatorImpl.createHermiT(shex); @@ -363,7 +382,7 @@ public static InferenceProviderCreator createInferenceProviderCreator(String rea default: return null; } } - + public static Server startUp(UndoAwareMolecularModelManager models, MinervaStartUpConfig conf) throws Exception { LOGGER.info("Setup Jetty config."); @@ -378,7 +397,7 @@ public static Server startUp(UndoAwareMolecularModelManager models, MinervaStart resourceConfig.register(LoggingApplicationEventListener.class); } //resourceConfig.register(AuthorizationRequestFilter.class); - + LOGGER.info("BatchHandler config inference provider: "+conf.reasonerOpt); LOGGER.info("BatchHandler config importantRelations: "+conf.importantRelations); LOGGER.info("BatchHandler config lookupService: "+conf.lookupService); @@ -389,13 +408,13 @@ public static Server startUp(UndoAwareMolecularModelManager models, MinervaStart conf.golrSeedUrl = conf.golrUrl; } LOGGER.info("SeedHandler config golrUrl: "+conf.golrSeedUrl); - + InferenceProviderCreator ipc = createInferenceProviderCreator(conf.reasonerOpt, models, conf.shex); - + JsonOrJsonpBatchHandler batchHandler = new JsonOrJsonpBatchHandler(models, conf.defaultModelState, ipc, conf.importantRelations, conf.lookupService); batchHandler.CHECK_LITERAL_IDENTIFIERS = conf.checkLiteralIds; - + SimpleEcoMapper ecoMapper = EcoMapperFactory.createSimple(); JsonOrJsonpSeedHandler seedHandler = new JsonOrJsonpSeedHandler(models, conf.defaultModelState, conf.golrSeedUrl, ecoMapper ); SPARQLHandler sparqlHandler = new SPARQLHandler(models, conf.sparqlEndpointTimeout); @@ -413,11 +432,11 @@ public static Server startUp(UndoAwareMolecularModelManager models, MinervaStart //new jetty - does not have setRequestBufferSize at all //seems to push defaults harder here. //to change request header size need to create a new connector and manipulate httpconfiguration - HttpConfiguration http_config = new HttpConfiguration(); - http_config.setRequestHeaderSize(conf.requestHeaderSize); + HttpConfiguration http_config = new HttpConfiguration(); + http_config.setRequestHeaderSize(conf.requestHeaderSize); ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); connector.setPort(conf.port); - + server.addConnector(connector); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java index fd84e291..efa95d0b 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java @@ -676,7 +676,7 @@ private void exportLegacy(M3BatchResponse response, ModelContainer model, String if ("gpad".equals(format)) { initMetaResponse(response); try { - response.data.exportModel = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getDoNotAnnotateSubset()).exportGPAD(m3.createInferredModel(model.getModelId())); + response.data.exportModel = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getDoNotAnnotateSubset()).exportGPAD(m3.createCanonicalInferredModel(model.getModelId())); } catch (InconsistentOntologyException e) { response.messageType = MinervaResponse.MESSAGE_TYPE_ERROR; response.message = "The model is inconsistent; a GPAD cannot be created.";