⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 c_normalisationimpl.java

📁 normkit is a set of tools supporting ontology learning. from xml,xsd to rdfs.
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
package it.itc.ectrl.normkit.cnorm.impl;

import oracle.xml.parser.schema.*;
import oracle.xml.parser.v2.*;

import java.net.*;
import java.io.*;
import org.w3c.dom.*;
import java.util.*;

import edu.unika.aifb.kaon.api.*;
import edu.unika.aifb.kaon.api.change.*;
import edu.unika.aifb.kaon.api.oimodel.*;
import edu.unika.aifb.kaon.api.vocabulary.*;

import edu.unika.aifb.kaon.apionrdf.OIModelImpl;
import edu.unika.aifb.kaon.apionrdf.InstanceImpl;
import edu.unika.aifb.kaon.apionrdf.KAONConnectionImpl;

import edu.unika.aifb.rdf.api.util.*;

import it.itc.ectrl.normkit.cnorm.api.C_Normalisation;
import it.itc.ectrl.normkit.common.NormalisationException;
import it.itc.ectrl.normkit.common.NormMapModel.*;
import it.itc.ectrl.normkit.common.TransformationUtil;
import it.itc.ectrl.normkit.common.CNOVocabulary;
import it.itc.ectrl.normkit.cnorm.XPathModel.*;
import it.itc.ectrl.normkit.cnorm.XSModel.*;

/**
 * Implementation of the C-Normalisation process for extraction of RDFS conceptual model out of an XML Schema. Additionaly a Normalisation Map to support D-Normalisation is produced. The extraction process is based on a set of Normalisation Heuristics. Normalisation Map bases on the XPath model defined by the XML Schema.
 * @author Oliver Fodor (fodor@itc.it)
 */
public class C_NormalisationImpl implements C_Normalisation {
    // physical
    /**
     * Physical location of the source XML Schema document (file).
     */
    protected String m_strSourceXSDPhysicalURI;

    /**
     * Physical location of the target conceptual model.
     */
    protected String m_strTargetOntologyPhysicalURI;

    /**
     * Physical location of the produced Normalisation Map.
     */
    protected String m_strNormMapPhysicalURI;

    // logical
    /**
     * Logical URI for the target conceptual model.
     */
    protected String m_strURISourceXSDocument;

    /**
     * Logical URI for the target conceptual model.
     */
    protected String m_strURITargetOntology;

    /**
     * Logical URI for the Normalisation Map.
     */
    protected String m_strURINormMap;

    // models
    /**
     * The source XML Schema as DOM document.
     */
    protected XMLSchemaNode m_xmlSchema;

    /**
     * Physical location of the Normalisation Map.
     */
    protected OIModel m_oimodelTargetOntology;

    /**
     * The Normalisation Map produced within this C-Normalisation process.
     */
    protected NormMap m_normmap;

    /**
     * KAON connection for creating the target and map OI-models.
     */
    KAONConnectionImpl m_KAONConnection;

    /**
     * KAON root concept for the lexical layer.
     */
    Concept m_KAON_RootConcept;

    /**
     * Normalisation table carrying information about just processed components as (XS component <-> RDFS entity) pairs.
     */
    protected Hashtable m_normalisationTable = new Hashtable();

    /**
     * Top level complex type definitions of the processed XML Schema.
     */
    protected Set m_complexTypeDefinitions = new HashSet();

    /**
     * Normalisation table carrying information about just normalized (extracted) concepts as <XS Component<->RDFS Entity> pairs.
     */
    protected Set m_elementDeclarations = new HashSet();

/*
    public C_NormalisationImpl(String strSourceXSDPhysicalURI, String strTargetModelLogicalURI, String strNormMapLogicalURI) throws Exception {

    }

    public C_NormalisationImpl(InputStream strSourceXSDPhysicalURI, OutputStream strTargetModelPhysicalURI, OutputStream strNormMapPhysicalURI) throws Exception {

    }

*/

    /**
     * Creates an instance of this class and associates it with source and target documents.
     * @param strSourceXSDPhysicalURI physical location of the source XML Schema
     * @param strTargetModelPhysicalURI physical location of the target RDFS conceptual model
     * @param strNormMapPhysicalURI physical location of the target Normalisation Map*/
    public C_NormalisationImpl(String strSourceXSDPhysicalURI, String strTargetModelPhysicalURI, String strNormMapPhysicalURI) throws NormalisationException {

        m_strSourceXSDPhysicalURI = strSourceXSDPhysicalURI;
        m_strTargetOntologyPhysicalURI = strTargetModelPhysicalURI;
        m_strNormMapPhysicalURI = strNormMapPhysicalURI;

        // the following 3 lines has to be changed in accordance to harmonise strategy for map repository etc.
        m_strURISourceXSDocument = m_strSourceXSDPhysicalURI;
        m_strURITargetOntology = m_strTargetOntologyPhysicalURI;
        m_strURINormMap = m_strNormMapPhysicalURI;

        initialize();
        readXSD();
    }

    private void initialize() throws NormalisationException {

        try {

            m_KAONConnection = new KAONConnectionImpl();

        } catch (KAONException e) {   // this is just a workaround solution for repeated names in local elm declarations ... find other solution!!!

            throw new NormalisationException("KAON Exception while creating KAON Connection", e);

        }
    }

    private void readXSD() throws NormalisationException {

        try {

            m_xmlSchema = TransformationUtil.readXSD(m_strSourceXSDPhysicalURI);

        } catch (Exception e) {

            throw new NormalisationException("Exception while reading the XSD at input", e);

        }
    }
/*
    private void readXSD() throws NormalisationException {

        try {

            XSDBuilder builder = new XSDBuilder();

            XMLSchema schemadoc = (XMLSchema) builder.build(TransformationUtil.createURL (m_strSourceXSDPhysicalURI));

            Hashtable schemanodes = schemadoc.getXMLSchemaNodeTable();

            m_xmlSchema = chooseSchemaNodeToProceed(schemanodes);

        } catch (Exception e) {

            throw new NormalisationException("Exception while reading the XSD at input", e);

        }
    }

    private XMLSchemaNode chooseSchemaNodeToProceed(Hashtable schemanodes) throws Exception {
        // process all in the input file included schemata
        for(Enumeration enumeration = schemanodes.elements(); enumeration.hasMoreElements();) {

          XMLSchemaNode xmlschemanode = (XMLSchemaNode)enumeration.nextElement();

          if(xmlschemanode.getTargetNS() != "http://www.w3.org/1999/XMLSchema" && xmlschemanode.getTargetNS() != "http://www.w3.org/2000/10/XMLSchema" && xmlschemanode.getTargetNS() != "http://www.w3.org/2001/XMLSchema")
            return xmlschemanode;
        }
        return null;
   }
*/
    private void prepareTargetModel() throws NormalisationException {

        try {

            m_oimodelTargetOntology = m_KAONConnection.createOIModel(m_strTargetOntologyPhysicalURI, m_strURITargetOntology );

            m_KAON_RootConcept = m_oimodelTargetOntology.getRootConcept();

        } catch (KAONException e) {

            throw new NormalisationException("KAON Exception while creating new OIModel for target RDFS", e);

        }

    }

    private void prepareNormMap() throws NormalisationException {

        try {
            m_normmap = new NormMap(m_strNormMapPhysicalURI, m_strNormMapPhysicalURI, m_strURISourceXSDocument, m_strURITargetOntology);
            m_normmap.readModels();
        } catch (Exception e) {

            throw new NormalisationException("Exception while creating new Normalisation Map", e);

        }

    }

    /**
     * Executes the C-Normalisation process on associated XML Schema.
     */
    public void run(String rootElement) throws NormalisationException {

        prepareTargetModel();
       	prepareNormMap();

        m_normalisationTable.clear();

        processTopLevelComplexTypes();

        XSDNode[] topLevelElements = m_xmlSchema.getElementSet();

        processGlobalElements(topLevelElements);
        processGlobalAttributes();

        processContentOfTopLevelComplexTypes();
        processContentOfGlobalElements();

        traverseAndProcessElements(topLevelElements, rootElement);

    }

/*
----------------------------------- procedures for RDFS conceptual model extraction --------------------------------------------
*/

    private void processTopLevelComplexTypes () throws NormalisationException {

        try {

            Hashtable complexTypes = m_xmlSchema.getComplexTypeTable();

            for(Enumeration enum = complexTypes.elements(); enum.hasMoreElements();) {

                XSDComplexType ct = (XSDComplexType)enum.nextElement();

                ComplexTypeDefinition ct_def = new ComplexTypeDefinition(ct, this, null);

                m_complexTypeDefinitions.add(ct_def);

                ct_def.runNormalisationHeuristics();

            }
        } catch (Exception e) {

            e.printStackTrace();
            throw new NormalisationException("Exception while processing XML Schema components", e);

        }
    }

    private void processGlobalElements (XSDNode[] elms) throws NormalisationException{

        try {

            int length=elms.length;

            for (int i = 0; i < length; i++) {

                XSDElement elm = (XSDElement)elms[i];

                ElementDeclaration ed = new ElementDeclaration(elm, this);

                m_elementDeclarations.add(ed);

                ed.runNormalisationHeuristics();

            }
        } catch (Exception e) {

            e.printStackTrace();
            throw new NormalisationException("Exception while processing XML Schema components", e);

        }
    }

    private void processGlobalAttributes() throws NormalisationException {

        try {

            XSDAttribute[] atts = m_xmlSchema.getAttributeDeclarations();

            int length=atts.length;

            for (int i = 0; i < length; i++) {

                XSDAttribute att = (XSDAttribute)atts[i];

                AttributeDeclaration ad = new AttributeDeclaration(att, this);

                ad.runNormalisationHeuristics();

            }

        } catch (Exception e) {

            e.printStackTrace();
            throw new NormalisationException("Exception while processing XML Schema components", e);

        }
    }

    private void processContentOfTopLevelComplexTypes () throws NormalisationException {

        try {

            for(Iterator iterator = m_complexTypeDefinitions.iterator(); iterator.hasNext();) {

                ComplexTypeDefinition ct = (ComplexTypeDefinition)iterator.next();

                ct.processNestedElements();

                ct.processNestedAttributes();
            }

        } catch (Exception e) {

            e.printStackTrace();
            throw new NormalisationException("Exception while processing XML Schema components", e);

        }
    }

    private void processContentOfGlobalElements () throws NormalisationException{

        try {

            for(Iterator iterator = m_elementDeclarations.iterator(); iterator.hasNext();) {

                ElementDeclaration ed = (ElementDeclaration)iterator.next();

                //ed.runXX(); .... run Context processing
                ed.processContext();
            }

        } catch (Exception e) {

            e.printStackTrace();
            throw new NormalisationException("Exception while processing XML Schema components", e);

        }
    }


/*
----------------------------------- procedures for normalisation map definition --------------------------------------------

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -