Skip to content

Commit

Permalink
Merge pull request #25 from rte-france/24_IDUniqueness
Browse files Browse the repository at this point in the history
See #24: new class is introduced to treat cases for which the xmi is …
  • Loading branch information
picaultj authored Jun 9, 2020
2 parents 3d28930 + 51f30c7 commit 0bbe52f
Show file tree
Hide file tree
Showing 4 changed files with 210 additions and 19 deletions.
14 changes: 8 additions & 6 deletions src/main/java/ocl/OCLEvaluator.java
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ private Map<String, List<EvaluationResult>> assessRules(File where) throws IOExc
XMITransformation my_transf = new XMITransformation();

HashMap<String, Document> xmi_list = new HashMap<>();

cacheDir.mkdirs();
try {
my_prep.readZip(where);
LOGGER.info("Reordering done!");
Expand All @@ -101,7 +101,7 @@ private Map<String, List<EvaluationResult>> assessRules(File where) throws IOExc


List<String> files = new ArrayList<>();
cacheDir.mkdirs();


LOGGER.info("Validator ready");

Expand Down Expand Up @@ -313,7 +313,7 @@ private static void write(Document doc, OutputStream out) {
* @return
* @throws IOException
*/
static HashMap<String,String> getConfig() throws IOException, URISyntaxException {
public static HashMap<String,String> getConfig() throws IOException, URISyntaxException {
HashMap<String,String> configs = new HashMap<>();
InputStream config = new FileInputStream(System.getenv("VALIDATOR_CONFIG")+File.separator+"config.properties");
Properties properties = new Properties();
Expand Down Expand Up @@ -350,9 +350,10 @@ static HashMap<String,String> getConfig() throws IOException, URISyntaxException

if(cacheDir_!=null ){
try{
configs.put("cacheDir", IOUtils.resolveEnvVars(cacheDir_)+File.separator+"cache");
cacheDir = new File(IOUtils.resolveEnvVars(cacheDir_)+File.separator+"cache");
}catch (Exception e){

configs.put("cacheDir",where.getAbsolutePath()+File.separator+"/cache" );
cacheDir= new File(where.getAbsolutePath()+File.separator+"/cache");
}

Expand Down Expand Up @@ -418,8 +419,9 @@ public static void main(String[] args) {
// Read rule details
RuleDescriptionParser parser = new RuleDescriptionParser();
HashMap<String, RuleDescription> rules = parser.parseRules("config/UMLRestrictionRules.xml");

getConfig();
OCLEvaluator evaluator = new OCLEvaluator();

if(debugMode)
LOGGER.info("Validator running in debug mode");
Map<String, List<EvaluationResult>> synthesis = evaluator.assessRules(where);
Expand All @@ -432,7 +434,7 @@ public static void main(String[] args) {

evaluator.cleanCache();

} catch (ParserConfigurationException | IOException | SAXException e){
} catch (ParserConfigurationException | IOException | SAXException | URISyntaxException e){
e.printStackTrace();
System.exit(-1);
}
Expand Down
18 changes: 9 additions & 9 deletions src/main/java/ocl/Profile.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@
import java.util.ArrayList;
import java.util.List;

class Profile{
enum Type{
public class Profile{
public enum Type{
EQ, TP, SSH, SV, other
}

Type type;
String id;
List<String> depOn= new ArrayList<>();
File file;
String xml_name;
List<String> DepToBeReplaced= new ArrayList<>();
List<String> modelProfile = new ArrayList<>();
public Type type;
public String id;
public List<String> depOn= new ArrayList<>();
public File file;
public String xml_name;
public List<String> DepToBeReplaced= new ArrayList<>();
public List<String> modelProfile = new ArrayList<>();

Profile(Type type, String id, List<String> deps, File file, String xmlName, List<String> modelProfile){
this.type = type;
Expand Down
20 changes: 16 additions & 4 deletions src/main/java/ocl/XMITransformation.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
**/
package ocl;

import ocl.util.CheckXMLConsistency;
import org.apache.commons.lang3.StringUtils;
import org.w3c.dom.*;

Expand Down Expand Up @@ -150,11 +151,20 @@ HashMap<String, Document> convertData(HashMap<Profile,List<Profile>> IGM_CGM, L
break;
}
}
Document merged_xml = createMerge(EQBD,TPBD, getBusinessProcess(key.xml_name), key, EQ, SSH, TP,defaultBDIds);
resulting_xmi = createXmi(merged_xml);
LOGGER.info("Transformed:"+key.xml_name);
CheckXMLConsistency xmlConsistency = new CheckXMLConsistency(EQ,TP,SSH,key, sv_sn.get(0));

if(!xmlConsistency.isExcluded()){
Document merged_xml = createMerge(EQBD,TPBD, getBusinessProcess(key.xml_name), key, EQ, SSH, TP,defaultBDIds);
resulting_xmi = createXmi(merged_xml);
LOGGER.info("Transformed:"+key.xml_name);

xmi_map.put(sv_sn.get(0),resulting_xmi);
}
else{
LOGGER.info("Xmi not created for "+key.xml_name+". Probably basic xml/rdf consistency is broken.");
}


xmi_map.put(sv_sn.get(0),resulting_xmi);


} catch (Exception e){
Expand Down Expand Up @@ -221,6 +231,8 @@ private NodeList getNodeList(File file) throws ParserConfigurationException, IOE
return nodeList;
}



/**
*
* @param eqbd
Expand Down
177 changes: 177 additions & 0 deletions src/main/java/ocl/util/CheckXMLConsistency.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
package ocl.util;

import com.google.gson.Gson;
import ocl.OCLEvaluator;
import ocl.Profile;

import org.apache.commons.lang3.StringUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;

public class CheckXMLConsistency {

RuleDescriptionParser parser = new RuleDescriptionParser();
HashMap<String, RuleDescription> rules = parser.parseRules("config/UMLRestrictionRules.xml");

private boolean isExcluded = false;
private String caseName;

public boolean isExcluded() {
return isExcluded;
}

class IDUniqueness{
Profile.Type type ;
String Object;
String id;
IDUniqueness(Profile.Type type, String Object, String id){
this.type = type;
this.Object = Object;
this.id=id;
}
}

List<IDUniqueness> idUniquenessList = new ArrayList<>();

/**
* @param EQ
* @param TP
* @param SSH
* @param SV
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
public CheckXMLConsistency(Profile EQ, Profile TP, Profile SSH, Profile SV, String caseName) throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
this.caseName = caseName;
IDUniqueness(EQ,TP, SSH, SV);
}

/**
* @param EQ
* @param TP
* @param SSH
* @param SV
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
public void IDUniqueness(Profile EQ, Profile TP, Profile SSH, Profile SV) throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
checkIDUniqueness(EQ);
checkIDUniqueness(TP);
checkIDUniqueness(SSH);
checkIDUniqueness(SV);
List<EvaluationResult> results = new ArrayList<>();
String ruleName = "IDuniqueness";
String severity = rules.get(ruleName) == null ? "UNKOWN" : rules.get(ruleName).getSeverity();
int level = rules.get(ruleName) == null ? 0 : rules.get(ruleName).getLevel();
String specificMessage= rules.get(ruleName) == null? "mRID (rdf:ID or rdf:about) not unique within model":null;
for(IDUniqueness idUniqueness:idUniquenessList){
EvaluationResult evaluationResult = new EvaluationResult(severity,
ruleName,
level,
idUniqueness.id,
idUniqueness.type.toString()+"."+idUniqueness.Object,
null, specificMessage
);
results.add(evaluationResult);
}
if(results.size()!=0)
writeJsonResults(results);
}

/**
* @param profile
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
*/
public void checkIDUniqueness(Profile profile) throws ParserConfigurationException, SAXException, IOException {
Set ids = new HashSet();
for (Node node : convertToArray(getNodeList(profile))) {
if(!StringUtils.isEmpty(node.getLocalName())){
String id = node.getAttributes().item(0).getNodeValue().replace("#","");
if (ids.contains(id)){
idUniquenessList.add(new IDUniqueness(profile.type,node.getNodeName(), id ));
isExcluded = true;
}
ids.add(id);
}
}
ids = null;
}


public void writeJsonResults(List<EvaluationResult> results) throws IOException, URISyntaxException {

File cachedir = new File(OCLEvaluator.getConfig().get("cacheDir"));
OutputStream zipout = Files.newOutputStream(Paths.get(cachedir.getAbsolutePath()+File.separator+caseName+".json.zip"));
ZipOutputStream zipOutputStream = new ZipOutputStream(zipout);
String json = new Gson().toJson(results);
ZipEntry entry_ = new ZipEntry(caseName + ".xmi.json"); // The name
zipOutputStream.putNextEntry(entry_);
zipOutputStream.write(json.getBytes());
zipOutputStream.closeEntry();
zipOutputStream.close();
}

/**
*
* @param profile
* @return
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
private NodeList getNodeList(Profile profile) throws IOException, SAXException, ParserConfigurationException {
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
builderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = builderFactory.newDocumentBuilder();
Document document = null;
ZipFile zip = new ZipFile(new File(profile.file.getAbsolutePath()));
Enumeration<? extends ZipEntry> entries = zip.entries();
while (entries.hasMoreElements()){
ZipEntry entry = entries.nextElement();
InputStream xmlStream = zip.getInputStream(entry);
document = documentBuilder.parse(xmlStream);
xmlStream.close();
}
NodeList nodeList = document.getDocumentElement().getChildNodes();
return nodeList;

}

/**
*
* @param list
* @return
*/
private Node[] convertToArray(NodeList list)
{
int length = list.getLength();
Node[] copy = new Node[length];

for (int n = 0; n < length; ++n)
copy[n] = list.item(n);

return copy;
}

}

0 comments on commit 0bbe52f

Please sign in to comment.