Skip to content
This repository has been archived by the owner on Apr 13, 2024. It is now read-only.

Commit

Permalink
Merge branch 'master' of https://github.com/wso2/product-das
Browse files Browse the repository at this point in the history
# Conflicts:
#	pom.xml
  • Loading branch information
ramindu90 committed Jul 6, 2017
2 parents 6c98150 + 96a61a2 commit e16382a
Show file tree
Hide file tree
Showing 53 changed files with 485 additions and 178 deletions.
38 changes: 31 additions & 7 deletions modules/distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,6 @@
<version>${siddhi.execution.map.version}</version>
<type>jar</type>
</artifactItem>
<!--IO Extensions-->
<artifactItem>
<groupId>org.wso2.extension.siddhi.execution.math</groupId>
<artifactId>siddhi-execution-math</artifactId>
Expand All @@ -276,6 +275,31 @@
<version>${siddhi.execution.extrema.version}</version>
<type>jar</type>
</artifactItem>
<artifactItem>
<groupId>org.wso2.extension.siddhi.execution.timeseries</groupId>
<artifactId>siddhi-execution-timeseries</artifactId>
<version>${siddhi.execution.timeseries.version}</version>
<type>jar</type>
</artifactItem>
<artifactItem>
<groupId>org.wso2.extension.siddhi.execution.geo</groupId>
<artifactId>siddhi-execution-geo</artifactId>
<version>${siddhi.execution.geo.version}</version>
<type>jar</type>
</artifactItem>
<artifactItem>
<groupId>org.wso2.extension.siddhi.execution.ml</groupId>
<artifactId>siddhi-execution-ml</artifactId>
<version>${siddhi.execution.ml.version}</version>
<type>jar</type>
</artifactItem>
<artifactItem>
<groupId>org.wso2.extension.siddhi.script.js</groupId>
<artifactId>siddhi-script-js</artifactId>
<version>${siddhi.script.js.version}</version>
<type>jar</type>
</artifactItem>
<!--IO Extensions-->
<artifactItem>
<groupId>org.wso2.extension.siddhi.io.jms</groupId>
<artifactId>siddhi-io-jms</artifactId>
Expand Down Expand Up @@ -326,6 +350,12 @@
<version>${siddhi.map.json.version}</version>
<type>jar</type>
</artifactItem>
<artifactItem>
<groupId>org.wso2.extension.siddhi.map.binary</groupId>
<artifactId>siddhi-map-binary</artifactId>
<version>${siddhi.map.binary.version}</version>
<type>jar</type>
</artifactItem>
<!--Store Extensions-->
<artifactItem>
<groupId>org.wso2.extension.siddhi.store.solr</groupId>
Expand All @@ -345,12 +375,6 @@
<version>${siddhi.store.rdbms.version}</version>
<type>jar</type>
</artifactItem>
<artifactItem>
<groupId>org.wso2.extension.siddhi.execution.timeseries</groupId>
<artifactId>siddhi-execution-timeseries</artifactId>
<version>${siddhi.execution.timeseries.version}</version>
<type>jar</type>
</artifactItem>
<!--TODO remove after fixing jms-->
<artifactItem>
<groupId>org.apache.geronimo.specs</groupId>
Expand Down
4 changes: 2 additions & 2 deletions modules/samples/artifacts/0001/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0001/kafka-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0001/kafka-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files

Kafka libs to be added and converted to OSGI from {KafkaHome}/libs are as follows
* kafka_2.10-0.9.0.1.jar
Expand All @@ -9,7 +9,7 @@ Kafka libs to be added and converted to OSGI from {KafkaHome}/libs are as follow
* zookeeper-3.4.6.jar

2. Add the OSGI converted kafka libs to {WSO2DASHome}/lib
3. Add the kafka libs to {WSO2DASHome}/samples/sample-clients/lib
3. Add the kafka libs to {WSO2DASHome}/samples/artifacts/sample-clients/lib

4. Navigate to {KafkaHome} and start zookeeper node using bin/zookeeper-server-start.sh config/zookeeper.properties
5. Navigate to {KafkaHome} and start kafka server node using bin/kafka-server-start.sh config/server.properties
Expand Down
7 changes: 7 additions & 0 deletions modules/samples/artifacts/0002/README.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
1. Copy {WSO2DASHome}/samples/0002/SmartHomePlan.siddhi file to {WSO2DASHome}/deployment/siddhi-files

2. Navigate to {WSO2DASHome}/samples/sample-clients/tcp-server and run ant command without arguments.

2. Start the worker using ./{WSO2DASHome}/bin/worker.sh

3. Navigate to {WSO2DASHome}/samples/sample-clients/tcp-client and run ant command without arguments.
5 changes: 2 additions & 3 deletions modules/samples/artifacts/0002/SmartHomePlan.siddhi
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
@App:name("SmartHomePlan")

@Source(type = 'tcp', context='SmartHomeData',
@map(type='passThrough'))
@map(type='binary'))
define stream SmartHomeData (id string, value float, property bool, plugId int, householdId int, houseId int, currentTime string);

@sink(type='tcp', context='UsageStream', port='9893',
@map(type='passThrough'))
@sink(type='tcp', url='tcp://localhost:9893/UsageStream', sync='true', @map(type='binary'))
define stream UsageStream (houseId int, maxVal float, minVal float, avgVal double);

from SmartHomeData#window.timeBatch(10 sec)
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0003/LoginAnalysisPlan.siddhi
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ from UserLoginStream
update or insert into UserLoginTable
on userId == UserLoginTable.userId;

from UserLoginCheckStream join UserLoginTable
from UserLoginCheckStream#winsow.length(1) join UserLoginTable
on UserLoginCheckStream.userId == UserLoginTable.userId
select UserLoginTable.userId, UserLoginTable.lastLoginTime
insert into OutputStream;
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0003/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0003/LoginAnalysisPlan.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0003/LoginAnalysisPlan.siddhi file to {WSO2DASHome}/deployment/siddhi-files

2. Start the worker using ./{WSO2DASHome}/bin/worker.sh

Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0004/READMe.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0003/RoundRobinPlan.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0003/RoundRobinPlan.siddhi file to {WSO2DASHome}/deployment/siddhi-files

2. Run two tcp server clients on two terminals with follow commands at {WSO2DASHome}/samples/sample-clients/tcp-server
To receive events in round robin manner
Expand Down
8 changes: 4 additions & 4 deletions modules/samples/artifacts/0004/RoundRobinPlan.siddhi
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@

define stream UsageInputStream (houseId int, maxVal float, minVal float, avgVal double);

@sink(type='tcp', context='UsageStream',
@distribution(strategy='roundRobin',
@destination(port = '8081'),
@destination(port = '8082')))
@sink(type='tcp', url='tcp://localhost:9893/UsageStream',context='UsageStream', @map(type='binary'),
@distribution(strategy='roundRobin',
@destination(port = '8081'),
@destination(port = '8082')))
define stream UsageStream (houseId int, maxVal float, minVal float, avgVal double);

from UsageInputStream
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0005/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0003/xml-default-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0003/xml-default-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files

Kafka libs to be added and converted to OSGI from {KafkaHome}/libs are as follows
* kafka_2.10-0.9.0.1.jar
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0006/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0004/xml-custom-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0004/xml-custom-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files

Kafka libs to be added and converted to OSGI from {KafkaHome}/libs are as follows
* kafka_2.10-0.9.0.1.jar
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0007/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[1] Copy {WSO2DASHome}/samples/0007/store-test-plan.siddhi file to {WSO2DASHome}/deployment/siddhi-files.
[1] Copy {WSO2DASHome}/samples/artifacts/0007/store-test-plan.siddhi file to {WSO2DASHome}/deployment/siddhi-files.

[*] The OSGi-fied H2 JDBC client should be added to the DAS classpath. This can be downloaded from the WSO2 Nexus at:
http://maven.wso2.org/nexus/content/groups/wso2-public/org/wso2/orbit/com/h2database/h2/1.4.191.wso2v1/h2-1.4.191.wso2v1.jar
Expand Down
4 changes: 2 additions & 2 deletions modules/samples/artifacts/0007/store-test-plan.siddhi
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
@App:name("store-test-plan")

@Source(type = 'tcp', context='SmartHomeData',
@map(type='passThrough'))
@map(type='binary'))
define stream SmartHomeData (id string, value float, property bool, plugId int, householdId int, houseId int, currentTime string);

@Source(type = 'tcp', context='TestData',
@map(type='passThrough'))
@map(type='binary'))
define stream TestData (property bool);

@Store(type='rdbms', jdbc.url='jdbc:h2:./dasdb', username='root', password='root', jdbc.driver.name='org.h2.Driver', field.length='currentTime:100')
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0008/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0008/json-mapper-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0008/json-mapper-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files

2. Start the worker using ./{WSO2DASHome}/bin/worker.sh

Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0008/json-mapper-sample.siddhi
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
@App:name("json-mapper-sample")

@source(type='tcp', context='SmartHomeData', port='9892',
@map(type='passThrough'))
@map(type='binary'))
define stream SmartHomeData (houseId int, maxVal float, minVal float, avgVal double);

@sink(type='inMemory', topic='home', @map(type='json', @payload("""{"portfolio":{
Expand Down
4 changes: 2 additions & 2 deletions modules/samples/artifacts/0009/MongoDBStoreTestPlan.siddhi
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
@App:name("MongoDBStoreTestPlan")

@Source(type = 'tcp', context='SmartHomeData',
@map(type='passThrough'))
@map(type='binary'))
define stream SmartHomeData (id string, value float, property bool, plugId int, householdId int, houseId int, currentTime string);

@Source(type = 'tcp', context='TestData',
@map(type='passThrough'))
@map(type='binary'))
define stream TestData (property bool);

@Store(type='mongodb', mongodb.uri='mongodb://admin:[email protected]/Foo')
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0010/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[1] Copy {WSO2DASHome}/samples/0010/http-io-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files.
[1] Copy {WSO2DASHome}/samples/artifacts/0010/http-io-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files.
[2] Navigate to {WSO2DASHome}/bin and start the server using ./worker.sh
[3] Navigate to {WSO2DASHome}/samples/sample-clients/http-server and run the "ant" command without arguments to start the HTTP Server
[4] Navigate to {WSO2DASHome}/samples/sample-clients/http-client and run the "ant" command without arguments to publish events to HTTP/HTTPS endpoint of the DAS server.
Expand Down
4 changes: 2 additions & 2 deletions modules/samples/artifacts/0011/store-solr-test-plan.siddhi
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
@App:name("store-solr-test-plan")

@Source(type = 'tcp', context='SmartHomeData',
@map(type='passThrough'))
@map(type='binary'))
define stream SmartHomeData (id string, value float, property bool, plugId int, householdId int, houseId int, currentTime string);

@Source(type = 'tcp', context='TestData',
@map(type='passThrough'))
@map(type='binary'))
define stream TestData (property bool);

@PrimaryKey('recordId')
Expand Down
2 changes: 1 addition & 1 deletion modules/samples/artifacts/0012/README.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1. Copy {WSO2DASHome}/samples/0012/jms-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files
1. Copy {WSO2DASHome}/samples/artifacts/0012/jms-sample.siddhi file to {WSO2DASHome}/deployment/siddhi-files
2. Copy following libs in {ActiveMQHome}/lib to {WSO2DASHome}/lib
* hawtbuf-1.9.jar
* geronimo-j2ee-management_1.1_spec-1.0.1.jar
Expand Down
94 changes: 93 additions & 1 deletion modules/samples/artifacts/0013/README
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,96 @@

2. Start the worker using ./{WSO2DASHome}/bin/worker.sh

3. Navigate to {WSO2DASHome}/samples/sample-clients/tcp-client and run ant command without arguments
3. Run following curls commands to send some login events

curl -X POST \
http://localhost:9090/simulation/single \
-H 'content-type: text/plain' \
-d '{
"siddhiAppName": "UniqueLengthBatchWindowSiddhiAppTest",
"streamName": "SmartHomeData",
"timestamp": null,
"data": [
"10",
"55.6",
"55.6",
"10.5"
]
}'

curl -X POST \
http://localhost:9090/simulation/single \
-H 'content-type: text/plain' \
-d '{
"siddhiAppName": "UniqueLengthBatchWindowSiddhiAppTest",
"streamName": "SmartHomeData",
"timestamp": null,
"data": [
"10",
"30.0",
"55.6",
"10.5"
]
}'

curl -X POST \
http://localhost:9090/simulation/single \
-H 'content-type: text/plain' \
-d '{
"siddhiAppName": "UniqueLengthBatchWindowSiddhiAppTest",
"streamName": "SmartHomeData",
"timestamp": null,
"data": [
"10",
"40.0",
"55.6",
"10.5"
]
}'

curl -X POST \
http://localhost:9090/simulation/single \
-H 'content-type: text/plain' \
-d '{
"siddhiAppName": "UniqueLengthBatchWindowSiddhiAppTest",
"streamName": "SmartHomeData",
"timestamp": null,
"data": [
"10",
"53.0",
"55.6",
"10.5"
]
}'

curl -X POST \
http://localhost:9090/simulation/single \
-H 'content-type: text/plain' \
-d '{
"siddhiAppName": "UniqueLengthBatchWindowSiddhiAppTest",
"streamName": "SmartHomeData",
"timestamp": null,
"data": [
"10",
"90.0",
"55.6",
"10.5"
]
}'

curl -X POST \
http://localhost:9090/simulation/single \
-H 'content-type: text/plain' \
-d '{
"siddhiAppName": "UniqueLengthBatchWindowSiddhiAppTest",
"streamName": "SmartHomeData",
"timestamp": null,
"data": [
"10",
"75.0",
"55.6",
"10.5"
]
}'

4. See the output in the WSO2DAS terminal
7 changes: 0 additions & 7 deletions modules/samples/artifacts/0013/README.txt

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
@App:name("UniqueLengthBatchWindowSiddhiAppTest")

@source(type='tcp', context='SmartHomeData', port='9892',
@map(type='passThrough'))
define stream SmartHomeData(houseId int, maxVal float, minVal float, avgVal double);

define stream OutputStream(houseId int, maxVal float, minVal float, avgVal double);

from SmartHomeData#window.unique:lengthBatch(maxVal,4)
from SmartHomeData#window.unique:lengthBatch(maxVal,3)
select houseId , maxVal , minVal , avgVal
insert expired events into OutputStream;

Expand Down
12 changes: 0 additions & 12 deletions modules/samples/artifacts/0013/time-execution-sample.siddhi

This file was deleted.

Loading

0 comments on commit e16382a

Please sign in to comment.