1

私はセマンティック Web フィールドの初心者であり、より多くの推論を比較しようとしています。これは私のコードです:

   OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    File file = new File(args[0]);
    OWLOntology ontology = manager.loadOntologyFromOntologyDocument(file);
    Set<OWLClass> classes = ontology.getClassesInSignature();


    String inferredFile = args[1];
    //test for correctly uploading ontology
    OWLDataFactory df = manager.getOWLDataFactory();     
    Reasoner jfact = Reasoner.JFACT;
    System.out.println(RunReasoner(jfact, df,ontology,manager,inferredFile));



}

//CREATE AN ENUM REASONER
public enum Reasoner{
    HERMIT, 
    PELLET, 
    KONCLUDE,
    JFACT,
    FACT,
    ELK

}   
    public static String RunReasoner(Reasoner reasoner, OWLDataFactory df, OWLOntology ontology,                                                         OWLOntologyManager manager, String inferredFile) throws OWLOntologyCreationException, FileNotFoundException, IOException, OWLOntologyStorageException {
    String esito = "";
    OWLReasoner reasoner_object = null;
    if(reasoner == Reasoner.HERMIT) {
        /****************HERMIT****************************************************************************************/

        OWLReasonerFactory rf = new ReasonerFactory();
        TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
        Configuration configuration = new Configuration();
        configuration.reasonerProgressMonitor = progressMonitor;
        configuration.ignoreUnsupportedDatatypes = true;
        reasoner_object = rf.createReasoner(ontology, configuration);


    }
    else if(reasoner == Reasoner.KONCLUDE) {

        // configure the server end-point
        URL url = new URL("http://localhost:8080");
        OWLlinkHTTPXMLReasonerFactory factory = new OWLlinkHTTPXMLReasonerFactory();
        TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
        //OWLlinkReasonerConfiguration conf = (OWLlinkReasonerConfiguration) new SimpleConfiguration(progressMonitor);
        reasoner_object = factory.createNonBufferingReasoner(ontology);

    }
    else if(reasoner == Reasoner.JFACT) {
        TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
        OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
        JFactFactory factory = new JFactFactory();          
        reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
    }
    //      else if(reasoner == Reasoner.FACT) {
    //          TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
    //          OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
    //          FaCTPlusPlusReasonerFactory factory = new FaCTPlusPlusReasonerFactory();
    //          reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
    //      }
    else if(reasoner == Reasoner.ELK) {
        TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
        OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
        ElkReasonerFactory factory = new ElkReasonerFactory();
        reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
    }
    else if(reasoner == Reasoner.PELLET) {
        TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
        OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
        reasoner_object = OpenlletReasonerFactory.getInstance().createReasoner(ontology,conf);          
    }
    else{
        esito = "Reasoner non valido";
    }
     boolean consistencyCheck = reasoner_object.isConsistent();
            if (consistencyCheck) {
                reasoner_object.precomputeInferences(InferenceType.CLASS_HIERARCHY,
                    InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_HIERARCHY,
                    InferenceType.DATA_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS);
                List<InferredAxiomGenerator<? extends OWLAxiom>> generators = new ArrayList<>();
                generators.add(new InferredSubClassAxiomGenerator());
                generators.add(new InferredClassAssertionAxiomGenerator());
                generators.add(new InferredDataPropertyCharacteristicAxiomGenerator());
                generators.add(new InferredEquivalentClassAxiomGenerator());
                generators.add(new InferredEquivalentDataPropertiesAxiomGenerator());
                generators.add(new InferredEquivalentObjectPropertyAxiomGenerator());
                generators.add(new InferredInverseObjectPropertiesAxiomGenerator());
                generators.add(new InferredObjectPropertyCharacteristicAxiomGenerator());

                // NOTE: InferredPropertyAssertionGenerator significantly slows down
                // inference computation
                generators.add(new org.semanticweb.owlapi.util.InferredPropertyAssertionGenerator());

                generators.add(new InferredSubClassAxiomGenerator());
                generators.add(new InferredSubDataPropertyAxiomGenerator());
                generators.add(new InferredSubObjectPropertyAxiomGenerator());
                List<InferredIndividualAxiomGenerator<? extends OWLIndividualAxiom>> individualAxioms =
                    new ArrayList<>();
                generators.addAll(individualAxioms);

                generators.add(new InferredDisjointClassesAxiomGenerator());
                InferredOntologyGenerator iog = new InferredOntologyGenerator(reasoner_object, generators); //Generates an ontology based on inferred axioms which are essentially supplied by a reasoner

                OWLOntology inferredAxiomsOntology = manager.createOntology();
                iog.fillOntology(df, inferredAxiomsOntology);
                System.out.println(inferredAxiomsOntology.getAxiomCount());
    //                  for(InferredAxiomGenerator<?> i : iog.getAxiomGenerators()) {
    //                      System.out.println(i);}
                File inferredOntologyFile = new File(inferredFile);
                // Now we create a stream since the ontology manager can then write to that stream.
                try (OutputStream outputStream = new FileOutputStream(inferredOntologyFile)) {
                    // We use the same format as for the input ontology.
                    manager.saveOntology(inferredAxiomsOntology, outputStream);
                }
                esito = "done "+ reasoner.toString();
                reasoner_object.dispose();
            } // End if consistencyCheck
            else {
                esito = reasoner.toString() +" -- Inconsistent input Ontology, Please check the OWL File";
            }
    return esito;
}     

私の出力は次のようなものです:

Loading ...
    busy ...
    ... finished in 3484.5453
Classifying ...
    1%  73
    2%  56...

誰かが私にこれが何を意味するのか説明できますか? 進行状況モニターの出力に関するドキュメントはありますか? 2 番目の質問: 推論されたクラス/公理の数を取得するにはどうすればよいですか? 助けてくれてありがとう、リタ

4

1 に答える 1

1

クラスの Javadoc は、オンラインで、クラスのソース コードの一部として入手できますhttp://owlcs.github.io/owlapi/apidocs_5/org/semanticweb/owlapi/reasoner/TimedConsoleProgressMonitor.html

増分のパーセンテージは、推論の実装次第です (そして、推論は、実行された作業の量とまだ実行されていない作業の量に関して、経験に基づいた推測のみを行うため、信頼性が低い可能性があります。数値は、実行から経過したミリ秒です)。以前のパーセンテージ増分。

推論された公理はすべて計算されるわけではなく、必要に応じて遅延評価されるため、簡単に数えることはできません。推論を実現したい場合は、利用可能な可能性について InferredAxiomGenerator http://owlcs.github.io/owlapi/apidocs_5/index.html?org/semanticweb/owlapi/reasoner/package-summary.htmlを探してください。

于 2020-04-29T10:39:35.183 に答える