diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..74affb4
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+.idea
+*.iml
+.idea_modules
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..525088a
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,5 @@
+FROM tomcat
+
+COPY target/SCOPES-ENDPOINT-1.0-SNAPSHOT.war /usr/local/tomcat/webapps/scopes.war
+
+EXPOSE 8080
diff --git a/Dockerfile~ b/Dockerfile~
new file mode 100644
index 0000000..db6d098
--- /dev/null
+++ b/Dockerfile~
@@ -0,0 +1,3 @@
+FROM tomcat
+
+COPY target/SCOPES-ENDPOINT-1.0-SNAPSHOT.war /usr/local/tomcat/webapps/scopes.war
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..8e7cd61
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Jonathan Langens
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..0cc190b
--- /dev/null
+++ b/README.md
@@ -0,0 +1,3 @@
+# Query Pre Processor
+
+The query pre processor is a micro-service that calculates the real difference that a query will have on a database
\ No newline at end of file
diff --git a/README.md~ b/README.md~
new file mode 100644
index 0000000..4695813
--- /dev/null
+++ b/README.md~
@@ -0,0 +1,25 @@
+# JAVA TEMPLATE
+A java template that has SPRING MVC and SESAME set up and working out of the box. It also has some support functionality for easier querying the RDF store.
+
+## Renaming the microservice
+
+Steps needed to rename your microservice:
+1. in the pom.xml replace the artifactId with the name your service will have.
+2. in src/main/webapp/WEB-INF/web.xml replace the display name.
+3. in src/main/webapp/WEB-INF/web.xml replace the servlet-name (2 locations!)
+4. rename the java_template package
+
+## Example
+
+For the example scenario to work read the README under example/
+
+## TODO'S
+
+### Logger
+
+There should be a default logger inserted in the template.
+
+### Query builder
+
+A query builder would be a great help for this template, the main objectives would be:
+
- Facilitate query construction
- Protect against SPARQL injection
\ No newline at end of file
diff --git a/insert-this-into-your-store-or-none-of-this-shit-will-work.txt b/insert-this-into-your-store-or-none-of-this-shit-will-work.txt
new file mode 100644
index 0000000..3ec2d63
--- /dev/null
+++ b/insert-this-into-your-store-or-none-of-this-shit-will-work.txt
@@ -0,0 +1,59 @@
+prefix mu:
+prefix foaf:
+
+insert
+{
+ "PERSONALGRAPH1";
+ "Personal Graph 1";
+ "personal";
+a .
+ "PERSONALGRAPH2";
+ "Personal Graph 2";
+ "personal";
+a .
+ "PERSONALGRAPH3";
+ "Personal Graph 3";
+ "personal";
+a .
+
+ "GRAPH1";
+ "Graph 1";
+ "hive";
+a .
+ "GRAPH2";
+ "Graph 2";
+ "hive";
+a .
+
+ mu:hasAccessToGraph ,
+ ,
+ ;
+ mu:uuid "JONATHANUUID";
+ mu:hasNode , , .
+
+ mu:forGraph .
+ mu:forGraph ; mu:hasParent .
+ mu:forGraph .
+
+
+ mu:hasAccessToGraph ,
+ ,
+ ;
+ mu:uuid "AADUUID";
+ mu:hasNode , , .
+
+ mu:forGraph .
+ mu:forGraph ; mu:hasParent .
+ mu:forGraph .
+
+
+
+ mu:hasAccessToGraph ,
+ ,
+ ;
+ mu:uuid "ERIKAUUID";
+ mu:hasNode , .
+
+ mu:forGraph .
+ mu:forGraph .
+}
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..43da268
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,192 @@
+
+
+ 4.0.0
+
+ mu-semtech
+ SCOPES-ENDPOINT
+ 1.0-SNAPSHOT
+ war
+
+
+ 2.6.3
+ 19.0
+ 1
+ 4.1.0
+ 4.2.4.RELEASE
+ 4.1
+ 4.0
+ 1.7.7
+ 1.1.2
+
+
+
+
+
+ com.tenforce.semtech
+ SPARQL-parser
+
+
+
+ org.springframework
+ spring-webmvc
+
+
+ javax.inject
+ javax.inject
+
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+
+
+ org.openrdf.sesame
+ sesame-runtime
+
+
+ virtuoso
+ jdbc-driver
+
+
+ virtuoso
+ sesame-provider
+
+
+ javax.servlet
+ javax.servlet-api
+ 3.1.0
+ provided
+
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.slf4j
+ jcl-over-slf4j
+
+
+ org.slf4j
+ jul-to-slf4j
+
+
+ org.slf4j
+ log4j-over-slf4j
+
+
+ ch.qos.logback
+ logback-core
+
+
+ ch.qos.logback
+ logback-classic
+
+
+
+
+
+
+
+
+ com.tenforce.semtech
+ SPARQL-parser
+ 0.0.3-SNAPSHOT
+
+
+
+ com.google.guava
+ guava
+ ${guava.version}
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+ ${fasterxml.jackson.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ ${fasterxml.jackson.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ ${fasterxml.jackson.version}
+
+
+
+ javax.inject
+ javax.inject
+ ${inject.version}
+
+
+
+ org.springframework
+ spring-webmvc
+ ${springframework.version}
+
+
+
+
+ org.openrdf.sesame
+ sesame-runtime
+ ${openrdf.sesame.version}
+
+
+ virtuoso
+ jdbc-driver
+ ${virtuoso.jdbc-driver.version}
+
+
+ virtuoso
+ sesame-provider
+ ${virtuoso.sesame-provider.version}
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+
+
+ org.slf4j
+ jcl-over-slf4j
+ ${slf4j.version}
+
+
+ org.slf4j
+ jul-to-slf4j
+ ${slf4j.version}
+
+
+ org.slf4j
+ log4j-over-slf4j
+ ${slf4j.version}
+
+
+ ch.qos.logback
+ logback-core
+ ${logback.version}
+
+
+ ch.qos.logback
+ logback-classic
+ ${logback.version}
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/java/scopes_service/config/Configuration.java b/src/main/java/scopes_service/config/Configuration.java
new file mode 100644
index 0000000..7e724c7
--- /dev/null
+++ b/src/main/java/scopes_service/config/Configuration.java
@@ -0,0 +1,30 @@
+package scopes_service.config;
+
+/**
+ * Created by jonathan-langens on 3/7/16.
+ *
+ * This file contains configuration constants used in all microservices
+ */
+public class Configuration {
+ /**
+ * PREFIXES
+ */
+ public final static String prefixUsers = "http://users.com/";
+ public final static String prefixObjects = "http://objects.com/";
+ public final static String prefixMu = "http://mu.semte.ch/vocabularies/";
+ public final static String prefixAuth = "http://mu.semte.ch/vocabularies/authorization/";
+ public final static String prefixUUID = "http://mu.semte.ch/vocabularies/core/uuid";
+
+ /**
+ * PREDICATES
+ */
+ public final static String predicateAuthBelongsTo = "auth:belongsTo";
+ public final static String predicateCanUpdate = "auth:canUpdate";
+ public final static String predicateCanRead = "auth:canRead";
+
+ /**
+ * DEFINITIONS
+ */
+ public final static String definitionUser = "users:class";
+ public final static String definitionUserGroup = "auth:Group";
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/Scopes/Scope.java b/src/main/java/scopes_service/query_pre_processor/Scopes/Scope.java
new file mode 100644
index 0000000..e1eecea
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/Scopes/Scope.java
@@ -0,0 +1,119 @@
+package scopes_service.query_pre_processor.Scopes;
+
+import scopes_service.query_pre_processor.query.SPARQLService;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Created by langens-jonathan on 30.08.16.
+ */
+public class Scope {
+ private List nodes = new ArrayList();
+ private String name;
+
+ public Scope(String name)
+ {
+ this.name = name;
+ }
+
+ public String getName()
+ {
+ return this.name;
+ }
+
+ public void setName(String name)
+ {
+ this.name = name;
+ }
+
+ public List getNodes() {
+ return nodes;
+ }
+
+ public void setNodes(List nodes) {
+ this.nodes = nodes;
+ }
+
+ public String calculateGraphToQuery()
+ {
+ String instanceGraph = this.name + "/instance";
+
+ // clear the instance graph
+ String clearQuery = "WITH <" + instanceGraph + ">\nDELETE\n{\n ?s ?p ?o .\n}\nWHERE\n{\n ?s ?p ?o .\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", clearQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ List parentNodes = new ArrayList();
+
+ for(ScopeNode child : this.nodes)
+ {
+ if(child.getParent() == null)
+ parentNodes.add(child);
+ }
+
+ for(int level = 4; level > 0; --level) {
+ List names = new ArrayList();
+
+ // then get all level 4
+ for (ScopeNode levelnodes : this.nodes) {
+ if (levelnodes.getScopeNodeType() == level && levelnodes.getParent() == null) {
+ String instanceName = levelnodes.calculateScopes();
+ instanceName = instanceName.substring(0, instanceName.length() - 9);
+ names.add(instanceName);
+ }
+ }
+
+ // then add in all levelnode's
+ for (String name : names) {
+ // adding my own inserts
+ String pullInInsertsQuery = "INSERT\n{\n GRAPH <" + instanceGraph + ">\n {\n ?s ?p ?o .\n }\n}";
+ pullInInsertsQuery += "WHERE\n{\n GRAPH <" + name + "/instance>\n {\n ?s ?p ?o.\n }\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", pullInInsertsQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ // then add in all levelnode's
+ for (String name : names) {
+ // remove deletes
+ String pullInDeletesQuery = "DELETE\n{\n GRAPH <" + instanceGraph + ">\n {\n ?s ?p ?o .\n }\n}";
+ pullInDeletesQuery += "WHERE\n{\n GRAPH <" + name + "/delete>\n {\n ?s ?p ?o.\n }\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", pullInDeletesQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ String clearMetaInfoQuery = "WITH \n DELETE\n{\n";
+ clearMetaInfoQuery += "?user ?uuid .\n";
+ clearMetaInfoQuery += "?uuid ?gname .\n}";
+ clearMetaInfoQuery += "WHERE\n{\n?user \"" + this.name + "\" .\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", clearMetaInfoQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ String UUID = java.util.UUID.randomUUID().toString();
+ String setMetaInfoQuery = "WITH \n INSERT\n{\n";
+ setMetaInfoQuery += "?user .\n";
+ setMetaInfoQuery += " \"" + instanceGraph + "\".\n}";
+ setMetaInfoQuery += "WHERE\n{\n?user \"" + this.name + "\" .\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", setMetaInfoQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return instanceGraph;
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/Scopes/ScopeNode.java b/src/main/java/scopes_service/query_pre_processor/Scopes/ScopeNode.java
new file mode 100644
index 0000000..151b56f
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/Scopes/ScopeNode.java
@@ -0,0 +1,126 @@
+package scopes_service.query_pre_processor.Scopes;
+
+import scopes_service.query_pre_processor.query.SPARQLService;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Created by langens-jonathan on 30.08.16.
+ */
+public class ScopeNode {
+ private String name;
+ private String UUID = java.util.UUID.randomUUID().toString();
+ private String query = "select * from { ?s ?p ?o .}";
+ private ScopeNode parent;
+ private int scopeNodeType;
+ private List children = new ArrayList();
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getQuery() {
+ return query;
+ }
+
+ public void setQuery(String query) {
+ this.query = query;
+ }
+
+ public ScopeNode getParent() {
+ return parent;
+ }
+
+ public void setParent(ScopeNode parent) {
+ this.parent = parent;
+ parent.addChild(this);
+ }
+
+ public int getScopeNodeType() {
+ return scopeNodeType;
+ }
+
+ public void setScopeNodeType(int scopeNodeType) {
+ this.scopeNodeType = scopeNodeType;
+ }
+
+ public void addChild(ScopeNode child)
+ {
+ this.children.add(child);
+ }
+
+ public void setUUID(String uuid)
+ {
+ this.UUID = uuid;
+ }
+
+ public String getUUID()
+ {
+ return this.UUID;
+ }
+
+ private String getInsertName(String n)
+ {
+ return n + "";
+ }
+
+ private String getDeleteName(String n)
+ {
+ return n + "/delete";
+ }
+
+ public String calculateScopes()
+ {
+ // calculate the name of the instance graph
+ String instanceGraph = this.name + "/instance";
+
+ // clear the instance graph
+ String clearQuery = "WITH <" + instanceGraph + ">\nDELETE\n{\n ?s ?p ?o .\n}\nWHERE\n{\n ?s ?p ?o .\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", clearQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ // adding in everything from my children
+ for(ScopeNode child : this.children)
+ {
+ String childgraphname = child.calculateScopes();
+ String pullInChildGraphQuery = "INSERT\n{\n GRAPH <" + instanceGraph + ">\n {\n ?s ?p ?o .\n }\n}";
+ pullInChildGraphQuery += "WHERE\n{\n GRAPH <" + childgraphname + ">\n {\n ?s ?p ?o.\n }\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", pullInChildGraphQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ // adding my own inserts
+ String pullInInsertsQuery = "INSERT\n{\n GRAPH <" + instanceGraph + ">\n {\n ?s ?p ?o .\n }\n}";
+ pullInInsertsQuery += "WHERE\n{\n GRAPH <" + getInsertName(this.getUUID()) + ">\n {\n ?s ?p ?o.\n }\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", pullInInsertsQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ // remove deletes
+ String pullInDeletesQuery = "DELETE\n{\n GRAPH <" + instanceGraph + ">\n {\n ?s ?p ?o .\n }\n}";
+ pullInDeletesQuery += "WHERE\n{\n GRAPH <" + getDeleteName(this.getUUID()) + ">\n {\n ?s ?p ?o.\n }\n}";
+ try {
+ SPARQLService.getInstance().postSPARQLResponse("http://localhost:8890/sparql", pullInDeletesQuery);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ // then return my own instance name
+ return instanceGraph;
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/callback/CallBack.java b/src/main/java/scopes_service/query_pre_processor/callback/CallBack.java
new file mode 100644
index 0000000..9a4f04a
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/callback/CallBack.java
@@ -0,0 +1,113 @@
+package scopes_service.query_pre_processor.callback;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+import java.net.URL;
+
+/**
+ * Created by langens-jonathan on 10.06.16.
+ *
+ * A callback has a url to which it will try to connect and on which
+ * it will perform then POST requests with a given body. The body is
+ * presented raw.
+ */
+public class CallBack
+{
+ // the location to which the call back needs to be made
+ private String url;
+
+ /**
+ * default getter for the url
+ * @return this.url
+ */
+ public String getUrl() {
+ return url;
+ }
+
+ /**
+ * default setter for the url
+ * @param url the url to which this call back needs to perform its requests
+ */
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+ /**
+ * notify will perform a POST request to this.url and send the passed body-parameter raw as
+ * the request body.
+ *
+ * @param body the raw form of the request body
+ */
+ public void notify(String body)
+ {
+ try {
+ URL u = new URL(this.url);
+ byte[] postData = body.getBytes( "UTF-8" );
+ int postDataLength = postData.length;
+ HttpURLConnection connection = (HttpURLConnection) u.openConnection();
+
+ connection.setRequestMethod("POST");
+ connection.setRequestProperty( "Content-Type", "application/x-www-form-urlencoded");
+ connection.setRequestProperty( "charset", "utf-8");
+ connection.setRequestProperty( "Content-Length", Integer.toString( postDataLength ));
+ connection.setInstanceFollowRedirects( false );
+ connection.setUseCaches( false );
+
+ connection.setDoOutput(true);
+
+ DataOutputStream wr = new DataOutputStream(connection.getOutputStream());
+ wr.write(postData);
+ wr.flush();
+
+ connection.getResponseCode();
+ }
+ catch (ProtocolException e)
+ {
+ System.out.println("[!] Caught protocol exception, stack trace:");
+ e.printStackTrace();
+ }
+ catch(MalformedURLException e)
+ {
+ System.out.println("[!] Malformed URL: " + this.url);
+ e.printStackTrace();
+ }
+ catch(IOException e)
+ {
+ System.out.println("[!] Could not connect...");
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * class comparator. will return true if the passed object is of type call back
+ * and if it's url matches this url
+ * @param o the object for which equality should be checked
+ * @return true if the passed object is logically the same as this object
+ */
+ @Override
+ public boolean equals(Object o)
+ {
+ if(o.getClass().equals(CallBack.class))
+ {
+ if(this.url.equals(((CallBack)o).getUrl()))
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * class hash code generator
+ * @return the hash code for the url string
+ */
+ @Override
+ public int hashCode()
+ {
+ return this.url.hashCode();
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/callback/CallBackService.java b/src/main/java/scopes_service/query_pre_processor/callback/CallBackService.java
new file mode 100644
index 0000000..27d1ab4
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/callback/CallBackService.java
@@ -0,0 +1,103 @@
+package scopes_service.query_pre_processor.callback;
+
+import org.springframework.stereotype.Service;
+
+import java.util.*;
+
+/**
+ * Created by langens-jonathan on 10.06.16.
+ *
+ * Callback Service
+ *
+ * Allows to register different callback sets, to add callbacks to them, to call the
+ * notify for each callback registerd in the set, to check if a set exists and if a
+ * set contains callbacks.
+ */
+@Service
+public class CallBackService
+{
+ // the map that contains the call back sets
+ private Map> callBackSets;
+
+ /**
+ * default constructor
+ */
+ public CallBackService()
+ {
+ this.callBackSets = new HashMap>();
+ }
+
+ /**
+ * Checks if the set is already present in the map of sets and if not it creates
+ * a new one.
+ * @param setName the name of the list to be added
+ */
+ public void addCallBackSet(String setName)
+ {
+ if(!this.containsCallBackList(setName))
+ {
+ this.callBackSets.put(setName, new HashSet());
+ }
+ }
+
+ /**
+ * checks if the map contains a sest with that name
+ * @param setName the name of the set for which the inspection is required
+ * @return true if the map contains a set with the specified name
+ */
+ public boolean containsCallBackList(String setName)
+ {
+ return this.callBackSets.containsKey(setName);
+ }
+
+ /**
+ * Adds a call back ot the set with the given name
+ * @param setName the name of the set to which the call back should be added
+ * @param callBack the callback that should be added to the set
+ * @throws CallBackSetNotFoundException if the set to which you want to add things isnt in the map of sets
+ */
+ public void addCallBack(String setName, CallBack callBack) throws CallBackSetNotFoundException
+ {
+ if(this.containsCallBackList(setName))
+ {
+ this.callBackSets.get(setName).add(callBack);
+ }
+ else
+ {
+ throw new CallBackSetNotFoundException(setName);
+ }
+ }
+
+ /**
+ * Calls the notify of each callback for the set with the given name. The call back will be made
+ * with the presented body.
+ * @param setName the name of the set for which all callbacks should be made
+ * @param body the body the needs to be posted to the call back location
+ * @throws CallBackSetNotFoundException if the set with the given name is not present in the map
+ */
+ public void notifyCallBacks(String setName, String body) throws CallBackSetNotFoundException
+ {
+ if(this.containsCallBackList(setName))
+ {
+ for(CallBack callBack : this.callBackSets.get(setName))
+ {
+ callBack.notify(body);
+ }
+ }
+ else
+ {
+ throw new CallBackSetNotFoundException(setName);
+ }
+ }
+
+ /**
+ * To check if the set with the given name is present in the map of sets and, if it is, to return
+ * if there are callbacks in that set
+ * @param setName the name of the set
+ * @return true if the set is in the map and if that set is not empty
+ */
+ public boolean containsCallBacksForSet(String setName)
+ {
+ return ((this.containsCallBackList(setName)) && (!this.callBackSets.get(setName).isEmpty()));
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/callback/CallBackSetNotFoundException.java b/src/main/java/scopes_service/query_pre_processor/callback/CallBackSetNotFoundException.java
new file mode 100644
index 0000000..35e5af9
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/callback/CallBackSetNotFoundException.java
@@ -0,0 +1,18 @@
+package scopes_service.query_pre_processor.callback;
+
+/**
+ * Created by langens-jonathan on 14.06.16.
+ *
+ * This exception symbolyfies that a set with the given name was not found
+ */
+public class CallBackSetNotFoundException extends Exception {
+ public CallBackSetNotFoundException()
+ {
+ super("A Call Back Set was not found!");
+ }
+
+ public CallBackSetNotFoundException(String setName)
+ {
+ super("The Call Back Set \"" + setName + "\" was not found in the map of sets");
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/config/RootConfiguration.java b/src/main/java/scopes_service/query_pre_processor/config/RootConfiguration.java
new file mode 100644
index 0000000..9725136
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/config/RootConfiguration.java
@@ -0,0 +1,11 @@
+package scopes_service.query_pre_processor.config;
+
+import scopes_service.query_pre_processor.callback.CallBackService;
+import scopes_service.query_pre_processor.query.QueryService;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+@ComponentScan(basePackageClasses = {QueryService.class, CallBackService.class})
+public class RootConfiguration {
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/config/WebConfiguration.java b/src/main/java/scopes_service/query_pre_processor/config/WebConfiguration.java
new file mode 100644
index 0000000..7e9fe28
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/config/WebConfiguration.java
@@ -0,0 +1,15 @@
+package scopes_service.query_pre_processor.config;
+
+import scopes_service.query_pre_processor.web.RootController;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.servlet.config.annotation.EnableWebMvc;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
+
+@Configuration
+@EnableWebMvc
+@ComponentScan(basePackageClasses = RootController.class)
+public class WebConfiguration extends WebMvcConfigurationSupport {
+
+
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/query/Binding.java b/src/main/java/scopes_service/query_pre_processor/query/Binding.java
new file mode 100644
index 0000000..34f1d37
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/query/Binding.java
@@ -0,0 +1,32 @@
+package scopes_service.query_pre_processor.query;
+
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * Created by langens-jonathan on 31.05.16.
+ */
+public class Binding
+{
+ private HashMap bindings = new HashMap();
+
+ public void addBinding(String k, String v)
+ {
+ this.bindings.put(k, v);
+ }
+
+ public String getBindingFor(String k)
+ {
+ return this.bindings.get(k);
+ }
+
+ public boolean canHandle(List unknowns)
+ {
+ for(String k : unknowns)
+ {
+ if(this.bindings.containsKey(k) == false)
+ return false;
+ }
+ return true;
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/query/DifferenceTriples.java b/src/main/java/scopes_service/query_pre_processor/query/DifferenceTriples.java
new file mode 100644
index 0000000..55f1d17
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/query/DifferenceTriples.java
@@ -0,0 +1,177 @@
+package scopes_service.query_pre_processor.query;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Created by langens-jonathan on 31.05.16.
+ *
+ * This is a data structure
+ *
+ * The class difference triples holds a set of all triples that will
+ * be updated, one set for all triples that will be deleted on a
+ * certain data set, on set for all triples that will effectivly be
+ * updated and one set for all triples that will effectivly be deleted.
+ */
+public class DifferenceTriples
+{
+ // a set with all triples that will be updated in the store
+ private Set allInsertTriples;
+
+ // a set with all triples that will be deleted in the store
+ private Set allDeleteTriples;
+
+ // a set with allt triples that will EFFECTIVLY be inserted in the graph
+ private Set effectiveInsertTriples;
+
+ // a set will all triples that will EFFECTIVLY be deleted from the graph
+ private Set effectiveDeleteTriples;
+
+ /**
+ * default constructor
+ */
+ public DifferenceTriples()
+ {
+ this.allInsertTriples = new HashSet();
+ this.allDeleteTriples = new HashSet();
+ this.effectiveInsertTriples = new HashSet();
+ this.effectiveDeleteTriples = new HashSet();
+ }
+
+ /**
+ * adds the given update triple to the set of update triples
+ * @param triple
+ */
+ public void addAllInsertTriple(Triple triple)
+ {
+ this.allInsertTriples.add(triple);
+ }
+
+ /**
+ * adds the given delete triple to the set of delete triples
+ * @param triple
+ */
+ public void addAllDeleteTripel(Triple triple)
+ {
+ this.allDeleteTriples.add(triple);
+ }
+
+ /**
+ * adds the given update triple to the set of update triples
+ * @param triple
+ */
+ public void addEffectiveInsertTriple(Triple triple)
+ {
+ this.effectiveInsertTriples.add(triple);
+ }
+
+ /**
+ * adds the given delete triple to the set of delete triples
+ * @param triple
+ */
+ public void addEffectiveDeleteTripel(Triple triple)
+ {
+ this.effectiveDeleteTriples.add(triple);
+ }
+
+ public String getAllChangesAsJSON()
+ {
+ String jsonString = "";
+
+ jsonString += "{\n\"inserts\":\n[\n";
+
+ for(Triple t : this.getAllInsertTriples())
+ {
+ jsonString += "{\"s\":\"" + t.getSubject() + "\",\"p\":\"";
+ jsonString += t.getPredicate() + "\",\"o\":\"" + t.getObjectString() + "\"},";
+ }
+
+ if(!this.getAllInsertTriples().isEmpty())
+ {
+ jsonString = jsonString.substring(0, jsonString.length() - 1);
+ }
+
+ jsonString += "\n]\n\"deletes\":\n[\n";
+
+ for(Triple t : this.getAllDeleteTriples())
+ {
+ jsonString += "{\"s\":\"" + t.getSubject() + "\",\"p\":\"";
+ jsonString += t.getPredicate() + "\",\"o\":\"" + t.getObjectString() + "\"},";
+ }
+
+ if(!this.getAllDeleteTriples().isEmpty())
+ {
+ jsonString = jsonString.substring(0, jsonString.length() - 1);
+ }
+
+ jsonString += "\n]\n}";
+
+ return jsonString;
+ }
+
+ public String getEffectiveChangesAsJSON()
+ {
+ String jsonString = "";
+
+ jsonString += "{\n\"inserts\":\n[\n";
+
+ for(Triple t : this.getEffectiveInsertTriples())
+ {
+ jsonString += "{\"s\":\"" + t.getSubject() + "\",\"p\":\"";
+ jsonString += t.getPredicate() + "\",\"o\":\"" + t.getObjectString() + "\"},";
+ }
+
+ if(!this.getEffectiveInsertTriples().isEmpty())
+ {
+ jsonString = jsonString.substring(0, jsonString.length() - 1);
+ }
+
+ jsonString += "\n]\n\"deletes\":\n[\n";
+
+ for(Triple t : this.getEffectiveDeleteTriples())
+ {
+ jsonString += "{\"s\":\"" + t.getSubject() + "\",\"p\":\"";
+ jsonString += t.getPredicate() + "\",\"o\":\"" + t.getObjectString() + "\"},";
+ }
+
+ if(!this.getEffectiveDeleteTriples().isEmpty())
+ {
+ jsonString = jsonString.substring(0, jsonString.length() - 1);
+ }
+
+ jsonString += "\n]\n}";
+
+ return jsonString;
+ }
+ public Set getAllInsertTriples() {
+ return allInsertTriples;
+ }
+
+ public void setAllInsertTriples(Set allInsertTriples) {
+ this.allInsertTriples = allInsertTriples;
+ }
+
+ public Set getAllDeleteTriples() {
+ return allDeleteTriples;
+ }
+
+ public void setAllDeleteTriples(Set allDeleteTriples) {
+ this.allDeleteTriples = allDeleteTriples;
+ }
+
+ public Set getEffectiveDeleteTriples() {
+ return effectiveDeleteTriples;
+ }
+
+ public void setEffectiveDeleteTriples(Set effectiveDeleteTriples) {
+ this.effectiveDeleteTriples = effectiveDeleteTriples;
+ }
+
+ public Set getEffectiveInsertTriples() {
+ return effectiveInsertTriples;
+ }
+
+ public void setEffectiveInsertTriples(Set effectiveInsertTriples) {
+ this.effectiveInsertTriples = effectiveInsertTriples;
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/query/QueryService.java b/src/main/java/scopes_service/query_pre_processor/query/QueryService.java
new file mode 100644
index 0000000..052d451
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/query/QueryService.java
@@ -0,0 +1,522 @@
+package scopes_service.query_pre_processor.query;
+
+import com.tenforce.semtech.SPARQLParser.SPARQL.InvalidSPARQLException;
+import com.tenforce.semtech.SPARQLParser.SPARQL.SPARQLQuery;
+import com.tenforce.semtech.SPARQLParser.SPARQLStatements.BlockStatement;
+import com.tenforce.semtech.SPARQLParser.SPARQLStatements.IStatement;
+import com.tenforce.semtech.SPARQLParser.SPARQLStatements.UpdateBlockStatement;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.TupleQueryResult;
+import org.springframework.stereotype.Service;
+import java.net.URLEncoder;
+import java.util.*;
+
+/**
+ * Created by langens-jonathan on 31.05.16.
+ */
+@Service
+public class QueryService
+{
+ public SPARQLService sparqlService;
+ public QueryService() {
+ this.sparqlService = new SPARQLService();
+ }
+ public QueryService(SPARQLService service){this.sparqlService = service;}
+
+ /**
+ * returns a construct that describes the triples that will EFFECTIVELY be inserted
+ * or deleted by running a certain query on the triple store.
+ *
+ * the triple store on which the query will be exectued is assumed to be the
+ * default store for the sparql service.
+ *
+ * @param parsedQuery
+ * @return
+ */
+
+ public DifferenceTriples getDifferenceTriples(SPARQLQuery parsedQuery) throws InvalidSPARQLException
+ {
+ DifferenceTriples differenceTriples = new DifferenceTriples();
+
+ String queryPrefix = "";
+ for(String key : parsedQuery.getPrefixes().keySet())
+ {
+ queryPrefix += "PREFIX " + key + ": <" + parsedQuery.getPrefixes().get(key) + ">\n";
+ }
+
+ List deleteTriples = new ArrayList();
+ List insertTriples = new ArrayList();
+
+ SPARQLQuery clonedQuery = parsedQuery.clone();
+ parsedQuery.replaceGraphStatements("");
+ clonedQuery.replaceGraphStatements("");
+
+ for(IStatement statement : parsedQuery.getStatements()) {
+ if (statement.getType().equals(IStatement.StatementType.UPDATEBLOCK)) {
+ UpdateBlockStatement updateBlockStatement = (UpdateBlockStatement)statement;
+
+ String graph = this.sparqlService.getDefaultGraph();
+ if(!parsedQuery.getGraph().isEmpty())
+ {
+ graph = parsedQuery.getGraph();
+ }
+ if(!updateBlockStatement.getGraph().isEmpty())
+ {
+ graph = updateBlockStatement.getGraph();
+ }
+
+ String extractQuery = queryPrefix + "WITH <" + graph + ">\n";
+ extractQuery += "CONSTRUCT\n{\n";
+
+ for(IStatement innerStatement : updateBlockStatement.getStatements())
+ {
+ extractQuery += innerStatement.toString() + "\n";
+ }
+
+ extractQuery += "}\nWHERE\n{\n";
+
+ if(updateBlockStatement.getWhereBlock() != null) {
+ for (IStatement whereStatement : updateBlockStatement.getWhereBlock().getStatements()) {
+ extractQuery += whereStatement.toString() + "\n";
+ }
+ }
+
+ extractQuery += "}";
+
+ TupleQueryResult result = this.sparqlService.selectQuery(extractQuery);
+
+ while (result.hasNext()) {
+ Triple triple = new Triple();
+ BindingSet bs = result.next();
+ Iterator b = bs.iterator();
+ while (b.hasNext()) {
+ org.openrdf.query.Binding bind = b.next();
+ if (bind.getName().equals("P"))
+ triple.setPredicate(bind.getValue().stringValue());
+ if (bind.getName().equals("S"))
+ triple.setSubject(bind.getValue().stringValue());
+ if (bind.getName().equals("O"))
+ triple.setObject(bind.toString().substring(2, bind.toString().length()));
+ }
+ if (updateBlockStatement.getUpdateType().equals(BlockStatement.BLOCKTYPE.INSERT)) {
+ insertTriples.add(triple);
+ }
+ else
+ {
+ deleteTriples.add(triple);
+ }
+ }
+ }
+ }
+
+ // now insert the delete triples in a temporary graph
+ String deleteGraph = "";
+
+ // first clear the graph
+ this.sparqlService.deleteQuery("with " + deleteGraph + " delete {?s ?p ?o} where {?s ?p ?o.}");
+
+ String tmpDeleteInsert = queryPrefix + "\n with " + deleteGraph + "\ninsert data\n{\n";
+ for(Triple t : deleteTriples)
+// tmpDeleteInsert += "<" + t.getSubject() + "> <" + t.getPredicate() + "> " + t.getObjectAsString() + " .\n";
+ if(t.getObjectType().endsWith(".org/2001/XMLSchema#string>"))
+ tmpDeleteInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> " + t.getObjectAsString() + " .\n";
+ else
+ tmpDeleteInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> " + t.getObjectAsString() + " .\n";
+
+ tmpDeleteInsert += "}";
+
+ // then we store it
+ this.sparqlService.insertQuery(tmpDeleteInsert);
+
+ // then we insert the insert triples in a temporary graph
+ String insertGraph = "";
+
+ // again first clear it
+ this.sparqlService.deleteQuery("with " + insertGraph + " delete {?s ?p ?o} where {?s ?p ?o.}");
+
+ String tmpInsertInsert = queryPrefix + "\n with " + insertGraph + "\ninsert data\n{\n";
+ for(Triple t : insertTriples)
+// tmpInsertInsert += "<" + t.getSubject() + "> <" + t.getPredicate() + "> " + t.getObjectAsString() + " .\n";
+ if(t.getObjectType().endsWith("w3.org/2001/XMLSchema#string>"))
+ tmpInsertInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> " + t.getObjectAsString() + " .\n";
+ else
+ tmpInsertInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> " + t.getObjectAsString() + " .\n";
+
+ tmpInsertInsert += "}";
+
+ // then we store it
+ this.sparqlService.insertQuery(tmpInsertInsert);
+
+ differenceTriples.setAllDeleteTriples(new HashSet(deleteTriples));
+ differenceTriples.setAllInsertTriples(new HashSet(insertTriples));
+
+
+ // then we check what would be deleted by doing an intersection between the
+ // real graph and our tmp delete graph
+ String newQuery = "SELECT ?s ?p ?o WHERE { GRAPH <" + this.sparqlService.getDefaultGraph() + "> { ?s ?p ?o . } .\n GRAPH " + deleteGraph + " { ?s ?p ?o . } .\n}";
+
+ List confirmedDeletes = new ArrayList();
+
+ try
+ {
+ String url = "http://localhost:8890/sparql?query=" + URLEncoder.encode(newQuery, "UTF-8");
+ confirmedDeletes = this.sparqlService.getTriplesViaGet(url);
+ } catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+
+ differenceTriples.setEffectiveDeleteTriples(new HashSet(confirmedDeletes));
+
+
+ // same thing for inserts
+ List confirmedInserts = new ArrayList();
+ newQuery = "SELECT ?s ?p ?o WHERE {graph " + insertGraph + " {?s ?p ?o.}.\nminus\n{\ngraph <" + this.sparqlService.getDefaultGraph() + "> {?s ?p ?o.}.}\n}";
+ try
+ {
+ String url = "http://localhost:8890/sparql?query=" + URLEncoder.encode(newQuery, "UTF-8");
+ confirmedInserts = this.sparqlService.getTriplesViaGet(url);
+ } catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+
+ differenceTriples.setEffectiveInsertTriples(new HashSet(confirmedInserts));
+
+ return differenceTriples;
+
+ }
+/*
+ public DifferenceTriples getDifferenceTriples(String query) throws InvalidSPARQLException
+ {
+ SPARQLQuery pq = new SPARQLQuery(query);
+
+ DifferenceTriples differenceTriples = new DifferenceTriples();
+
+ if(!pq.getType().name().equals("UPDATE"))
+ {
+ return differenceTriples;
+ }
+
+ // extract all prefixes
+ Map prefixes = getPrefixes(query);
+
+ // extract all insert blocks
+ List insertBlocks = getTypedBlocks(query, "INSERT");
+
+ // all the delete blocks
+ List deleteBlocks = getTypedBlocks(query, "DELETE");
+
+ // then all the where blocks
+ List whereBlocks = getTypedBlocks(query, "WHERE");
+
+ // get a list of unknowns
+ Set unkowns = getUnknowns(insertBlocks);
+
+ // add the unknowns from the delete blocks
+ unkowns.addAll(getUnknowns(deleteBlocks));
+
+ // build the prefix for the queries
+ String queryPrefix = "";
+ for(String k : prefixes.keySet())
+ {
+ queryPrefix += "PREFIX " + k + ": <" + prefixes.get(k) + ">\n";
+ }
+
+ String whereBlock = "";
+
+ for(String block : whereBlocks)
+ {
+ if(!block.endsWith("."))block += ".";
+ whereBlock += "\n" + block;
+ }
+
+ List deleteTriples = new ArrayList();
+ for(String block : deleteBlocks)
+ {
+ String q = queryPrefix + "\nWITH <" + this.sparqlService.getDefaultGraph() + ">\nCONSTRUCT\n{\n";
+ q += block;
+ q += "\n}\nWHERE\n{\n" + whereBlock + "\n}";
+
+ TupleQueryResult result = this.sparqlService.selectQuery(q);
+
+ while (result.hasNext()) {
+ Triple triple = new Triple();
+ BindingSet bs = result.next();
+ Iterator b = bs.iterator();
+ while (b.hasNext()) {
+ org.openrdf.query.Binding bind = b.next();
+ if(bind.getName().equals("P"))
+ triple.setPredicate(bind.getValue().stringValue());
+ if(bind.getName().equals("S"))
+ triple.setSubject(bind.getValue().stringValue());
+ if(bind.getName().equals("O"))
+ triple.setObject(bind.getValue());
+ }
+ deleteTriples.add(triple);
+ }
+ }
+
+ List insertTriples = new ArrayList();
+ for(String block : insertBlocks)
+ {
+ String q = queryPrefix + "\nWITH <" + this.sparqlService.getDefaultGraph() + ">\nCONSTRUCT\n{\n";
+ q += block;
+ q += "\n}\nWHERE\n{\n" + whereBlock + "\n}";
+
+ TupleQueryResult result = this.sparqlService.selectQuery(q);
+
+ while (result.hasNext()) {
+ Triple triple = new Triple();
+ BindingSet bs = result.next();
+ Iterator b = bs.iterator();
+ while (b.hasNext()) {
+ org.openrdf.query.Binding bind = b.next();
+ if(bind.getName().equals("P"))
+ triple.setPredicate(bind.getValue().stringValue());
+ if(bind.getName().equals("S"))
+ triple.setSubject(bind.getValue().stringValue());
+ if(bind.getName().equals("O"))
+ triple.setObject(bind.getValue());
+ }
+ insertTriples.add(triple);
+ }
+ }
+
+ // now insert the delete triples in a temporary graph
+ String deleteGraph = "";
+
+ // first clear the graph
+ this.sparqlService.deleteQuery("with " + deleteGraph + " delete {?s ?p ?o} where {?s ?p ?o.}");
+
+ String tmpDeleteInsert = queryPrefix + "\n with " + deleteGraph + "\ninsert data\n{\n";
+ for(Triple t : deleteTriples)
+ if(t.getObject().toString().endsWith("^^"))
+ tmpDeleteInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> \"" + t.getObject().stringValue() + "\" .\n";
+ else
+ tmpDeleteInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> " + t.getObject().toString() + " .\n";
+
+ tmpDeleteInsert += "}";
+
+ // then we store it
+ this.sparqlService.insertQuery(tmpDeleteInsert);
+
+ // then we insert the insert triples in a temporary graph
+ String insertGraph = "";
+
+ // again first clear it
+ this.sparqlService.deleteQuery("with " + insertGraph + " delete {?s ?p ?o} where {?s ?p ?o.}");
+
+ String tmpInsertInsert = queryPrefix + "\n with " + insertGraph + "\ninsert data\n{\n";
+ for(Triple t : insertTriples)
+ if(t.getObject().toString().endsWith("^^"))
+ tmpInsertInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> \"" + t.getObject().stringValue() + "\" .\n";
+ else
+ tmpInsertInsert += "<" + t.getSubject().substring(1, t.getSubject().length()) + "> <" + t.getPredicate() + "> " + t.getObject().toString() + " .\n";
+
+ tmpInsertInsert += "}";
+
+ // then we store it
+ this.sparqlService.insertQuery(tmpInsertInsert);
+
+ differenceTriples.setAllDeleteTriples(new HashSet(deleteTriples));
+ differenceTriples.setAllInsertTriples(new HashSet(insertTriples));
+
+
+ // then we check what would be deleted by doing an intersection between the
+ // real graph and our tmp delete graph
+ String newQuery = "SELECT ?s ?p ?o WHERE { GRAPH <" + this.sparqlService.getDefaultGraph() + "> { ?s ?p ?o . } .\n GRAPH " + deleteGraph + " { ?s ?p ?o . } .\n}";
+
+ List confirmedDeletes = new ArrayList();
+
+ try
+ {
+ String url = "http://localhost:8890/sparql?query=" + URLEncoder.encode(newQuery, "UTF-8");
+ confirmedDeletes = this.sparqlService.getTriplesViaGet(url);
+ } catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+
+ differenceTriples.setEffectiveDeleteTriples(new HashSet(confirmedDeletes));
+
+
+ // same thing for inserts
+ List confirmedInserts = new ArrayList();
+ newQuery = "SELECT ?s ?p ?o WHERE {graph " + insertGraph + " {?s ?p ?o.}.\nminus\n{\ngraph <" + this.sparqlService.getDefaultGraph() + "> {?s ?p ?o.}.}\n}";
+ try
+ {
+ String url = "http://localhost:8890/sparql?query=" + URLEncoder.encode(newQuery, "UTF-8");
+ confirmedInserts = this.sparqlService.getTriplesViaGet(url);
+ } catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+
+ differenceTriples.setEffectiveInsertTriples(new HashSet(confirmedInserts));
+
+ return differenceTriples;
+ }*/
+
+ /**
+ * returns a set with all unknowns in the query blocks
+ * @param blocks
+ * @return a set with all unknowns
+ */
+ public Set getUnknowns(List blocks)
+ {
+ Set unknowns = new HashSet();
+
+ for(String block : blocks)
+ {
+ for(String s : block.split(" "))
+ {
+ s = s.trim();
+ if(s.startsWith("?"))
+ {
+ unknowns.add(s);
+ }
+ }
+ }
+
+ return unknowns;
+ }
+
+ /**
+ * Splits a query in to an array of elements with instead of 1 split character all
+ * whitespace characters
+ * @param query
+ * @return an array of strings that are split based on whitespace characters
+ */
+ public String [] splitQuery(String query)
+ {
+ Vector splitQuery = new Vector();
+
+ String [] myArr;
+
+ String currentBuffer = "";
+
+ for(byte b : query.getBytes())
+ {
+ if(((char) b)==' ' || ((char) b) == '\n' || ((char) b) == '\t' || ((char) b) == '\r')
+ {
+ currentBuffer = currentBuffer.trim();
+ if(currentBuffer.length() > 0)splitQuery.add(currentBuffer);
+ currentBuffer = "";
+ continue;
+ }
+ currentBuffer += ((char) b);
+ }
+
+ String [] arr = new String[splitQuery.size()];
+
+ for(int i = 0; i < splitQuery.size(); ++i)
+ {
+ arr[i] = splitQuery.elementAt(i);
+ }
+
+ return arr;
+ }
+
+ /**
+ * extracts the prefixes from a query and returns them as a hash
+ * @param query
+ * @return an map of prefixes and replacements
+ */
+ public Map getPrefixes(String query)
+ {
+ Map prefixes = new HashMap();
+
+ boolean inPrefix = false;
+ String currentPrefix = "";
+
+ for(String s: splitQuery(query))//query.split(" "))
+ {
+ s = s.trim();
+ if(s.length()==0)
+ continue;
+
+ if(s.toLowerCase().equals("prefix"))
+ {
+ inPrefix = true;
+ currentPrefix = "";
+ continue;
+ }
+
+ if(s.toLowerCase().equals(":"))continue;
+
+ if(inPrefix)
+ {
+ if(currentPrefix=="")
+ {
+ if(s.endsWith(":"))
+ s = s.substring(0, s.length()-1);
+ currentPrefix=s;
+ continue;
+ }
+ else
+ {
+ s = s.substring(1, s.length()-1);
+ prefixes.put(currentPrefix, s);
+ currentPrefix = "";
+ inPrefix = false;
+ continue;
+ }
+ }
+ }
+
+ return prefixes;
+ }
+
+ /**
+ * returns the first index of the searchString in the baseString or -1 if it was not found
+ * @param baseString
+ * @param searchString
+ * @return the index of the searchString
+ */
+ public int indexOfInsertCase(String baseString, String searchString)
+ {
+ return baseString.toLowerCase().indexOf(searchString.toLowerCase());
+ }
+
+
+ /**
+ * Returns a list of 'block' contents for blocks that followed
+ * the given block type.
+ *
+ * @param query the query from which the blocks need to be extracted
+ * @param type the key word that will proceed the { block contents }
+ * @return a list with 1 string per block
+ */
+ public List getTypedBlocks(String query, String type)
+ {
+ List blocks = new ArrayList();
+
+ int nextOccurance = indexOfInsertCase(query, type);
+
+ String subQuery = query.substring(0, query.length());
+
+ while(nextOccurance > -1)
+ {
+ // restrict the subquery to the first occurrence of the start block
+ subQuery = subQuery.substring(nextOccurance, subQuery.length());
+
+ // first find the '{' that follows the next occurance
+ int startOfBlock = subQuery.indexOf('{') + 1;
+
+ // next find the first '}' of that block
+ int endOfBlock = subQuery.indexOf('}');
+
+ // add the block to the list
+ blocks.add(subQuery.substring(startOfBlock, endOfBlock));
+
+ // strip away the current block
+ subQuery = subQuery.substring(endOfBlock);
+
+ // update the next occurance
+ nextOccurance = indexOfInsertCase(subQuery, type);
+ }
+
+ return blocks;
+ }
+}
diff --git a/src/main/java/scopes_service/query_pre_processor/query/SPARQLService.java b/src/main/java/scopes_service/query_pre_processor/query/SPARQLService.java
new file mode 100644
index 0000000..715804c
--- /dev/null
+++ b/src/main/java/scopes_service/query_pre_processor/query/SPARQLService.java
@@ -0,0 +1,338 @@
+package scopes_service.query_pre_processor.query;
+
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResult;
+import org.openrdf.query.Update;
+import org.openrdf.repository.Repository;
+import org.openrdf.repository.RepositoryConnection;
+import virtuoso.sesame4.driver.VirtuosoRepository;
+
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.List;
+
+/**
+ * Created by jonathan-langens on 3/4/16.
+ */
+public class SPARQLService
+{
+ /**
+ * Data base connection constants
+ *
+ *These are private but they can be overridden by the child class by using the special constructor
+ */
+ private String url = "jdbc:virtuoso://localhost:1111";
+
+ private String user = "dba";
+
+ private String pwd = "dba";
+
+ private String defgraph = "http://mu.semte.ch/application";
+
+ /**
+ * Other class member variables
+ */
+ private Repository repository;
+
+ private RepositoryConnection connection;
+
+ private boolean initialized = false;
+
+
+ /**
+ * Special Constructor that allows the child class to set the database constants needed to
+ * query the RDF store.
+ *
+ * @param url the url where the RDF store resides
+ * @param user the username needed to connect
+ * @param pwd the password for that user
+ * @param defgraph the default graph
+ *
+ * @result the connection is set up for this object and the query functionss will work as expected.
+ */
+ protected SPARQLService(String url, String user, String pwd, String defgraph)
+ {
+ this.url = url;
+ this.user = user;
+ this.pwd = pwd;
+ this.defgraph = defgraph;
+
+ initializeConnection();
+ }
+
+ public String getDefaultGraph()
+ {
+ return this.defgraph;
+ }
+
+ /**
+ * The default constructor will initialize the connection with the default settings
+ *
+ * @result the connection is set up for this object and the query functions will work as expected
+ */
+ protected SPARQLService()
+ {
+ initializeConnection();
+ }
+
+ /**
+ * Private method
+ *
+ * Goes through the trouble of setting up the connection.
+ */
+ private void initializeConnection()
+ {
+ System.out.println("loading jdbc driver");
+ try
+ {
+ Class.forName("virtuoso.jdbc4.Driver");
+ }
+ catch (ClassNotFoundException e)
+ {
+ e.printStackTrace();
+ }
+
+ System.out.println("loading repository connection");
+ repository = new VirtuosoRepository(url, user, pwd);//new VirtuosoRepository(url, user, pwd, defgraph);
+
+ try
+ {
+ repository.initialize();
+ connection = repository.getConnection();
+ this.initialized = true;
+ }catch(Exception e)
+ {
+ e.printStackTrace();
+ }
+
+ System.out.println("connection was successful");
+ }
+
+ /**
+ * Returns true if the initialization process was succesful
+ *
+ * @return true if the connection was setup correctly
+ */
+ protected boolean isInitialized()
+ {
+ return this.initialized;
+ }
+
+ /**
+ * This function expects and executes a select query on the initialized connection.
+ *
+ * @param query a string representation of a SPARQL select query
+ * @return a tuple query result containing the result of your query
+ */
+ protected TupleQueryResult selectQuery(String query)
+ {
+ try {
+ TupleQuery tupleQuery = connection.prepareTupleQuery(org.openrdf.query.QueryLanguage.SPARQL, query);
+ return tupleQuery.evaluate();
+ } catch(Exception e)
+ {
+ e.printStackTrace();
+ }
+ return null;
+ }
+
+ /**
+ * Expects an insert query and executes it.
+ *
+ * @param query the insert query to be executed.
+ */
+ protected void insertQuery(String query)
+ {
+ try{
+ Update update = connection.prepareUpdate(org.openrdf.query.QueryLanguage.SPARQL, query);
+ update.execute();
+ connection.commit();
+ } catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Expects a delete query and executes it
+ *
+ * @param query the string representation of a SPARQL delete query
+ */
+ protected void deleteQuery(String query)
+ {
+ this.insertQuery(query);
+ }
+
+ /**
+ * this sends a GET request to the given URL and produces a list of triple objects
+ * that match the query.
+ *
+ * the url is supposed to be formatted as follows
+ * http://localhost:8890/sparql?query=URL_ENCODED_SELECT_QUERY
+ *
+ * TODO this method is still quite ugly, the main reason for implementing this is that
+ * TODO the sesame library does not allow you to post a query with 2 graphs
+ *
+ * TODO this method is also not generic and will only work for the specific use case for which it was implemented
+ *
+ * @param url a fully url-endpoint with query url that you would use to do a GET with postman
+ * @return a list of triples that were returned by the SPARQL endpoint
+ * @throws MalformedURLException if the URL cannot be passed to the constructor of a java.util.URL object
+ * @throws IOException if the connection to the SPARQL enpoint cannot be opened
+ */
+ @SuppressWarnings("unchecked")
+ public List getTriplesViaGet(String url) throws MalformedURLException, IOException
+ {
+ URL u = new URL(url);
+ HttpURLConnection connection = (HttpURLConnection) u.openConnection();
+
+ // just want to do an HTTP GET here
+ connection.setRequestMethod("GET");
+ connection.setRequestProperty("Accept", "application/json");
+
+ // uncomment this if you want to write output to this url
+ //connection.setDoOutput(true);
+
+ // give it 15 seconds to respond
+ connection.setReadTimeout(15*1000);
+ connection.connect();
+
+ BufferedReader reader = null;
+ StringBuilder stringBuilder;
+
+ // read the output from the server
+ reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
+ stringBuilder = new StringBuilder();
+
+ String line = null;
+ while ((line = reader.readLine()) != null)
+ {
+ stringBuilder.append(line + "\n");
+ }
+ String jsonString = stringBuilder.toString();
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map jsonMap = mapper.readValue(jsonString, Map.class);
+
+
+ List l =((List