Реализация рассуждающего не работает в более крупных онтологиях. Как я могу решить? - PullRequest
0 голосов
/ 05 мая 2020

Я новичок в веб-поле semanti c, и я пытаюсь сравнить больше аргументов. Это мой код:

OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
File file = new File(args[0]);
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(file);
Set<OWLClass> classes = ontology.getClassesInSignature();


String inferredFile = args[1];
//test for correctly uploading ontology
OWLDataFactory df = manager.getOWLDataFactory();     
Reasoner jfact = Reasoner.JFACT;
System.out.println(RunReasoner(jfact, df,ontology,manager,inferredFile));



}

//CREATE AN ENUM REASONER
public enum Reasoner{
    HERMIT, 
    PELLET, 
    KONCLUDE,
    JFACT,
    FACT,
    ELK

}   
public static String RunReasoner(Reasoner reasoner, OWLDataFactory df, OWLOntology ontology,                                                         OWLOntologyManager manager, String inferredFile) throws OWLOntologyCreationException, FileNotFoundException, IOException, OWLOntologyStorageException {
String esito = "";
OWLReasoner reasoner_object = null;
if(reasoner == Reasoner.HERMIT) {
    /****************HERMIT****************************************************************************************/

    OWLReasonerFactory rf = new ReasonerFactory();
    TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
    Configuration configuration = new Configuration();
    configuration.reasonerProgressMonitor = progressMonitor;
    configuration.ignoreUnsupportedDatatypes = true;
    reasoner_object = rf.createReasoner(ontology, configuration);


}
else if(reasoner == Reasoner.KONCLUDE) {

    // configure the server end-point
    URL url = new URL("http://localhost:8080");
    OWLlinkHTTPXMLReasonerFactory factory = new OWLlinkHTTPXMLReasonerFactory();
    TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
    //OWLlinkReasonerConfiguration conf = (OWLlinkReasonerConfiguration) new SimpleConfiguration(progressMonitor);
    reasoner_object = factory.createNonBufferingReasoner(ontology);

}
else if(reasoner == Reasoner.JFACT) {
    TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
    OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
    JFactFactory factory = new JFactFactory();          
    reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
}
//      else if(reasoner == Reasoner.FACT) {
//          TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
//          OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
//          FaCTPlusPlusReasonerFactory factory = new FaCTPlusPlusReasonerFactory();
//          reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
//      }
else if(reasoner == Reasoner.ELK) {
    TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
    OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
    ElkReasonerFactory factory = new ElkReasonerFactory();
    reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
}
else if(reasoner == Reasoner.PELLET) {
    TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
    OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
    reasoner_object = OpenlletReasonerFactory.getInstance().createReasoner(ontology,conf);          
}
else{
    esito = "Reasoner non valido";
}
 boolean consistencyCheck = reasoner_object.isConsistent();
        if (consistencyCheck) {
            reasoner_object.precomputeInferences(InferenceType.CLASS_HIERARCHY,
                InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_HIERARCHY,
                InferenceType.DATA_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS);
            List<InferredAxiomGenerator<? extends OWLAxiom>> generators = new ArrayList<>();
            generators.add(new InferredSubClassAxiomGenerator());
            generators.add(new InferredClassAssertionAxiomGenerator());
            generators.add(new InferredDataPropertyCharacteristicAxiomGenerator());
            generators.add(new InferredEquivalentClassAxiomGenerator());
            generators.add(new InferredEquivalentDataPropertiesAxiomGenerator());
            generators.add(new InferredEquivalentObjectPropertyAxiomGenerator());
            generators.add(new InferredInverseObjectPropertiesAxiomGenerator());
            generators.add(new InferredObjectPropertyCharacteristicAxiomGenerator());

            // NOTE: InferredPropertyAssertionGenerator significantly slows down
            // inference computation
            generators.add(new org.semanticweb.owlapi.util.InferredPropertyAssertionGenerator());

            generators.add(new InferredSubClassAxiomGenerator());
            generators.add(new InferredSubDataPropertyAxiomGenerator());
            generators.add(new InferredSubObjectPropertyAxiomGenerator());
            List<InferredIndividualAxiomGenerator<? extends OWLIndividualAxiom>> individualAxioms =
                new ArrayList<>();
            generators.addAll(individualAxioms);

            generators.add(new InferredDisjointClassesAxiomGenerator());
            InferredOntologyGenerator iog = new InferredOntologyGenerator(reasoner_object, generators); //Generates an ontology based on inferred axioms which are essentially supplied by a reasoner

            OWLOntology inferredAxiomsOntology = manager.createOntology();
            iog.fillOntology(df, inferredAxiomsOntology);
            System.out.println(inferredAxiomsOntology.getAxiomCount());
//                  for(InferredAxiomGenerator<?> i : iog.getAxiomGenerators()) {
//                      System.out.println(i);}
            File inferredOntologyFile = new File(inferredFile);
            // Now we create a stream since the ontology manager can then write to that stream.
            try (OutputStream outputStream = new FileOutputStream(inferredOntologyFile)) {
                // We use the same format as for the input ontology.
                manager.saveOntology(inferredAxiomsOntology, outputStream);
            }
            esito = "done "+ reasoner.toString();
            reasoner_object.dispose();
        } // End if consistencyCheck
        else {
            esito = reasoner.toString() +" -- Inconsistent input Ontology, Please check the OWL File";
        }
return esito;
}     

Когда я пытаюсь запустить его на небольшой онтологии (40 аксиом), мой код работает отлично. Если вместо этого я попытаюсь запустить его на более крупной онтологии (750 аксиом), код продолжит работу go часами, но так и не достигнет фактической реализации. Фактически, файл предполагаемых аксиом остается пустым. Думаю, это из-за перегрузки памяти или какого-то скрытого буфера. Надеюсь, вы поможете мне решить эту проблему. Спасибо, Рита

...