⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 d_normalisation_rdf2xmlimpl.java

📁 normkit is a set of tools supporting ontology learning. from xml,xsd to rdfs.
💻 JAVA
字号:
package it.itc.ectrl.normkit.dnorm_rdf2xml.impl;

import java.util.*;
import java.io.File;
import java.io.FileWriter;
import java.net.URI;

import edu.unika.aifb.kaon.api.KAONException;
import edu.unika.aifb.kaon.api.change.AddIncludedOIModel;
import edu.unika.aifb.kaon.api.oimodel.OIModel;
import edu.unika.aifb.kaon.api.oimodel.Instance;

import edu.unika.aifb.kaon.apionrdf.OIModelImpl;
import edu.unika.aifb.kaon.apionrdf.InstanceImpl;
import edu.unika.aifb.kaon.apionrdf.KAONConnectionImpl;

import org.w3c.dom.Document;

import oracle.xml.parser.schema.*;
import oracle.xml.parser.v2.*;

import org.xml.sax.InputSource;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.AttributesImpl;


import it.itc.ectrl.normkit.common.*;
import it.itc.ectrl.normkit.common.NormMapModel.*;

import it.itc.ectrl.normkit.dnorm_rdf2xml.api.D_Normalisation_RDF2XML;

import com.megginson.sax.DataWriter;

/**
 * Implements D-Normalisation process for RDF->XML transformation.
 * @author Oliver Fodor (fodor@itc.it)
 */
public class D_Normalisation_RDF2XMLImpl implements D_Normalisation_RDF2XML  {
    /**
     * Physical location for target XML document.
     */
    String m_strSourceOntologyPhysicalURI;

    /**
     * Physical location of source data.
     */
    String m_strSourceInstancePhysicalURI;

    /**
     * Physical location of target document governing XML Schema.
     */
    String m_strTargetXSDPhysicalURI;

    /**
     * Physical location of target XML document.
     */
    String m_strTargetXMLPhysicalURI;

    // logical
    /**
     * Logical URI of source ontology.
     */
    String m_strURISourceOntology;

    /**
     * Logical URI of target governing XML Schema.
     */
    String m_strURITargetSchema;

    /**
     * XML Document holding the translated instances.
     */
    Document m_documentTargetXMLDocument;

    /**
     * Model for the source ontology.
     */
    OIModel m_oimodelSourceOntology;

    /**
     * RDF model of instance data to be translated.
     */
    OIModel m_oimodelSourceInstances;

    /**
     * OIModel holding source ontology and instance.
     */
    OIModel m_oimodelSourceModel;

    /**
     * The target XML Schema as DOM document.
     */
    protected XMLSchemaNode m_xmlSchema;

    /**
     * KAON connection for the source OI-models.
     */
    KAONConnectionImpl m_KAONConnection;

    /**
     * Associated Normalisation Map.
     */
    NormMap m_normmap;

    /**
     * Target XML SAX writer.
     */
    DataWriter m_targetDataWriter;


    int m_intRootNodeDTM;

    /**
     * Creates an instance of this class and associates it with source, target and governing map documents.
     * @param sourceInstancePath physical location of source RDF instance file
     * @param targetXMLPath physical location for output XML document
     * @param sourceOntologyPath physical location of source ontology
     * @param normMapPath physical location of Normalisation Map governing this process
     */
    public D_Normalisation_RDF2XMLImpl(String sourceInstancePath, String targetXMLPath, String sourceOntologyPath,  String targetSchemaPath, String normMapPath) throws NormalisationException {
//        m_isSourceXMLDocument = new FileInputStream(sourceXMLPath);
//        m_osTargetInstances = new FileOutputStream(targetRDFPath);

        m_strTargetXMLPhysicalURI = targetXMLPath;
        m_strSourceOntologyPhysicalURI = sourceOntologyPath;
        m_strSourceInstancePhysicalURI = sourceInstancePath;
        m_strURITargetSchema = targetSchemaPath;

        m_normmap = new NormMap(normMapPath);

        initialize();
        readModels();
    }

    private void initialize()  throws NormalisationException {

        try {

            m_KAONConnection = new KAONConnectionImpl();

        } catch (KAONException e) {

            throw new NormalisationException("Exception while creating KAON connection.", e);

        }
    }

    private void readModels() throws NormalisationException {

        try {
            // source Ontology
     //       testURIValidity( m_strSourceOntologyPhysicalURI, "Source Ontology" );
            m_oimodelSourceOntology = m_KAONConnection.openOIModelPhysical( m_strSourceOntologyPhysicalURI );

            m_strURISourceOntology = m_oimodelSourceOntology.getLogicalURI();

            // the source instances
            m_oimodelSourceInstances = m_KAONConnection.openOIModelPhysical( m_strSourceInstancePhysicalURI );

            // the source model
            m_oimodelSourceModel = m_KAONConnection.createOIModel("file:tempSourceModel", "SourceModel");

            List listChanges = new LinkedList();

            listChanges.add( new AddIncludedOIModel(m_oimodelSourceOntology) );
            listChanges.add( new AddIncludedOIModel(m_oimodelSourceInstances) );

            m_oimodelSourceModel.applyChanges( listChanges );

            try {

                m_xmlSchema = TransformationUtil.readXSD(m_strURITargetSchema);

                m_normmap.reparseToXMLSchemaDefinedOrder(m_xmlSchema);

            } catch (Exception e) {

                throw new NormalisationException("Exception while reading target XSD.", e);

            }

        } catch (KAONException e) {

            throw new NormalisationException("Exception while reading source models.", e);

        }
    }


/*
----------------------------------- procedures for execution of d-normalisation process --------------------------------------------
*/

    /**
     * Executes the D-Normalisation process on associated data.
     */
    public void run() throws NormalisationException {

        prepareTargetDocument();

//        findRootInstanceAndStartBuildingXML();
        translateRDFInstanceAndWriteXML();
    }


    private void prepareTargetDocument() throws NormalisationException {

        try {

           File targetFile = new File(new URI(m_strTargetXMLPhysicalURI));
           targetFile.createNewFile();
           m_targetDataWriter = new DataWriter(new FileWriter(targetFile));

        } catch (Exception e) {
            throw new NormalisationException("Exception while preparing the output XML document.", e);
        }
    }

    void recursivelyFlushSegments(String [] segments, int i, String textvalue) throws Exception {

        if (i < segments.length) {
            if (segments[i].equals("text()")) {
                m_targetDataWriter.characters(textvalue);
            } else {
                m_targetDataWriter.startElement(segments[i]);
                recursivelyFlushSegments(segments, i+1, textvalue);
                m_targetDataWriter.endElement(segments[i]);
            }
        }
    }

    private Map resolveHasBridgesXpaths(Set pathBridges) throws NormalisationException {    // an optimization would be possible here, temporarily storing the maps in a structure associated with the respective nodebridge for reusal

        Map pathBridgesWithXpaths = new HashMap();

        try {    Iterator i = pathBridges.iterator();
            for (;i.hasNext();) {

                String pathBridgeURI = (String) i.next();

                String pathBridgeXpath = m_normmap.getPathBridge(pathBridgeURI).getRelatedXPath();

                pathBridgesWithXpaths.put(pathBridgeURI, pathBridgeXpath);

            }

            return pathBridgesWithXpaths;

        } catch (Exception e) {
            e.printStackTrace();
            throw new NormalisationException("Exception while building the output XML document.", e);
        }
    }


    private String getPathBridgeURI(String elmName, Map pathBridges) {      // maybe replace this procedure with a KAON query execution instead, or just beter optimize (reverse the Map)

        Iterator i = pathBridges.keySet().iterator();
        for (;i.hasNext();) {

            String pathBridgeURI = (String) i.next();
            String pathBridgeXPath = (String)pathBridges.get(pathBridgeURI);

            if ((pathBridgeXPath.equals(elmName)) || (pathBridgeXPath.startsWith(elmName + "/")))
                return pathBridgeURI;

        }

        return null;    // or throw an exception
    }





    private void translateRDFInstanceAndWriteXML () throws NormalisationException{

        try {

            NodeBridge rootNodeBridge = m_normmap.getRootNodeBridge();

            String currentXPath = rootNodeBridge.getRelatedXPathEntity();

            NodeBridgeExec_RDF2XML rootNBexec = new NodeBridgeExec_RDF2XML(this, rootNodeBridge, null);

/*
            Collection targetXMLNodes = rootNBexec.getTranslatedNodes();

            if (targetXMLNodes.size() != 1) {
                throw new NormalisationException("RDF input error: there must be exactly one root element instance.");
            }

*/

            String translatedRootXMLNode = rootNBexec.getTranslatedUniqueNode();

            m_targetDataWriter.startDocument();
            m_targetDataWriter.setIndentStep(2);

//            Iterator i = targetXMLNodes.iterator();

//            C_NormalisationEntry node2flush = (C_NormalisationEntry) i.next();

//            m_targetDataWriter.startElement(node2flush.getURITargetElementName());


            processNodeBridgeWithHasBridges(rootNodeBridge, translatedRootXMLNode);

//            m_targetDataWriter.endElement(node2flush.getURITargetElementName());
            m_targetDataWriter.endDocument();

        } catch (Exception e) {
            e.printStackTrace();
            throw new NormalisationException("Exception while building the output XML document.", e);
        }
    }



    protected void processNodeBridgeWithHasBridges(NodeBridge currentNodeBridge , String currentRDFInstance) throws NormalisationException {

        try {

            NodeBridgeExec_RDF2XML nb_exec = new NodeBridgeExec_RDF2XML(this, currentNodeBridge, currentRDFInstance);
            nb_exec.processNodeBridgeWithHasBridges();

        } catch (Exception e) {
            e.printStackTrace();
            throw new NormalisationException("Exception while building the output XML document.", e);
        }
    }

    protected void flushStartElement(String uri, String localName, String qName, Attributes atts) throws Exception {

        m_targetDataWriter.startElement(uri, localName, qName, atts);

    }

    protected void flushEndElement(String name) throws Exception {

        m_targetDataWriter.endElement(name);
    }

/*
----------------------------------- setters and getters --------------------------------------------
*/


    /**
     * Returns the produced source model encompassing ontology and instance.
     */
    public OIModel getSourceModel () {

        return m_oimodelSourceModel;
    }

    /**
     * Returns the governing Normalisation Map.
     */
    public NormMap getNormMap () {

        return m_normmap;
    }
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -