From 3080d49141e36fa423a4685a06167a37d57efff5 Mon Sep 17 00:00:00 2001 From: Jason Fox Date: Tue, 28 May 2024 17:44:42 +0200 Subject: [PATCH] Remove tutorial from master --- .env | 36 - .gitpod.yml | 41 - ...Processing (Spark).postman_collection.json | 307 -------- README.ja.md | 721 ------------------ README.md | 668 +--------------- cosmos-examples/.java-version | 1 - cosmos-examples/dependency-reduced-pom.xml | 369 --------- cosmos-examples/pom.xml | 293 ------- .../src/main/resources/log4j.properties | 25 - .../org/fiware/cosmos/tutorial/Feedback.scala | 48 -- .../org/fiware/cosmos/tutorial/Logger.scala | 33 - .../org/fiware/cosmos/tutorial/LoggerLD.scala | 33 - docker-compose.yml | 152 ---- import-data | 525 ------------- log4j.properties | 40 - provision-devices | 332 -------- services | 156 +--- subscription | 21 - 18 files changed, 7 insertions(+), 3794 deletions(-) delete mode 100644 .env delete mode 100644 .gitpod.yml delete mode 100644 FIWARE Real-time Processing (Spark).postman_collection.json delete mode 100644 README.ja.md delete mode 100644 cosmos-examples/.java-version delete mode 100644 cosmos-examples/dependency-reduced-pom.xml delete mode 100644 cosmos-examples/pom.xml delete mode 100644 cosmos-examples/src/main/resources/log4j.properties delete mode 100644 cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala delete mode 100644 cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala delete mode 100644 cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/LoggerLD.scala delete mode 100644 docker-compose.yml delete mode 100755 import-data delete mode 100644 log4j.properties delete mode 100755 provision-devices delete mode 100755 subscription diff --git a/.env b/.env deleted file mode 100644 index d43cd8b..0000000 --- a/.env +++ /dev/null @@ -1,36 +0,0 @@ -# Project name -COMPOSE_PROJECT_NAME=fiware - -# Orion variables -ORION_PORT=1026 -ORION_VERSION=3.11.0 - -# MongoDB variables -MONGO_DB_PORT=27017 -MONGO_DB_VERSION=6.0 - -# IoT Agent Ultralight Variables -ULTRALIGHT_VERSION=3.1.0-distroless -IOTA_NORTH_PORT=4041 -IOTA_SOUTH_PORT=7896 - -# Tutorial variables -TUTORIAL_APP_PORT=3000 -TUTORIAL_DUMMY_DEVICE_PORT=3001 - -# Spark variables -SPARK_VERSION=2.4.5-hadoop2.7 -SPARK_WEB_APP_PORT=8080 -SPARK_SERVICE_PORT=9001 -SPARK_MASTER_PORT=7077 -SPARK_WORKER_1_PORT=8081 -# -# The SPARK_VERSION - 2.4.5 must match the values in the pom.xml -# -# -# UTF-8 -# 2.4.5 -# 2.11 -# 2.11.2 -# 2.4.6 -# \ No newline at end of file diff --git a/.gitpod.yml b/.gitpod.yml deleted file mode 100644 index 6fb63c9..0000000 --- a/.gitpod.yml +++ /dev/null @@ -1,41 +0,0 @@ -tasks: - - name: Pull Images - init: ./services create - -ports: - - name: Orion - description: Context Broker - port: 1026 - onOpen: notify - - name: Wilma - description: PEP Proxy - port: 1027 - onOpen: ignore - - name: Tutorial App - description: Web app displaying context data - port: 3000 - onOpen: open-preview - - name: Tutorial Devices - description: Dummy IoT Sensors over HTTP - port: 3001 - onOpen: ignore - - name: Keyrock - description: Identity Manager - port: 3005 - onOpen: open-preview - - name: MySQL - description: Database for Keyrock - port: 3306 - onOpen: ignore - - name: IoT Agent (North Port) - description: NGSI data and device provisioning - port: 4041 - onOpen: ignore - - name: IoT Agent (South Port) - description: Ultralight HTTP measures - port: 7896 - onOpen: ignore - - name: MongoDB - description: Database for Orion + IoT Agent - port: 27017 - onOpen: ignore \ No newline at end of file diff --git a/FIWARE Real-time Processing (Spark).postman_collection.json b/FIWARE Real-time Processing (Spark).postman_collection.json deleted file mode 100644 index 67a8bdb..0000000 --- a/FIWARE Real-time Processing (Spark).postman_collection.json +++ /dev/null @@ -1,307 +0,0 @@ -{ - "info": { - "_postman_id": "c5e9ec44-1e49-4561-8b40-9c8636838f08", - "name": "FIWARE Real-time Processing (Spark)", - "description": "[![FIWARE Core Context Management](https://nexus.lab.fiware.org/static/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/processing/README.md)\n[![NGSI v2](https://img.shields.io/badge/NGSI-v2-5dc0cf.svg)](https://fiware-ges.github.io/orion/api/v2/stable/)\n\nThis tutorial is an introduction to the [FIWARE Cosmos Orion Spark Connector](http://fiware-cosmos-spark.rtfd.io), which\nenables easier Big Data analysis over context, integrated with one of the most popular BigData platforms:\n[Apache Spark](https://spark.apache.org/). Apache Spark is a framework and distributed processing engine for stateful\ncomputations over unbounded and bounded data streams. Spark has been designed to run in all common cluster environments,\nperform computations at in-memory speed and at any scale.\n\nThe `docker-compose` files for this tutorial can be found on GitHub: \n\n![GitHub](https://fiware.github.io/tutorials.Big-Data-Spark/icon/GitHub-Mark-32px.png) [https://github.com/FIWARE/tutorials.Big-Data-Spark](https://github.com/FIWARE/tutorials.Big-Data-Spark)\n\n# Real-time Processing and Big Data Analysis\n\n> \"You have to find what sparks a light in you so that you in your own way can illuminate the world.\"\n>\n> — Oprah Winfrey\n\nSmart solutions based on FIWARE are architecturally designed around microservices. They are therefore are designed to\nscale-up from simple applications (such as the Supermarket tutorial) through to city-wide installations base on a large\narray of IoT sensors and other context data providers.\n\nThe massive amount of data involved eventually becomes too much for a single machine to analyse, process and store, and\ntherefore the work must be delegated to additional distributed services. These distributed systems form the basis of\nso-called **Big Data Analysis**. The distribution of tasks allows developers to be able to extract insights from huge\ndata sets which would be too complex to be dealt with using traditional methods. and uncover hidden patterns and\ncorrelations.\n\nAs we have seen, context data is core to any Smart Solution, and the Context Broker is able to monitor changes of state\nand raise [subscription events](https://github.com/Fiware/tutorials.Subscriptions) as the context changes. For smaller\ninstallations, each subscription event can be processed one-by-one by a single receiving endpoint, however as the system\ngrows, another technique will be required to avoid overwhelming the listener, potentially blocking resources and missing\nupdates.\n\n**Apache Spark** is an open-source distributed general-purpose cluster-computing framework. It provides an interface for\nprogramming entire clusters with implicit data parallelism and fault tolerance. The **Cosmos Spark** connector allows\ndevelopers write custom business logic to listen for context data subscription events and then process the flow of the\ncontext data. Spark is able to delegate these actions to other workers where they will be acted upon either in\nsequentially or in parallel as required. The data flow processing itself can be arbitrarily complex.\n\nObviously, in reality, our existing Supermarket scenario is far too small to require the use of a Big Data solution, but\nwill serve as a basis for demonstrating the type of real-time processing which may be required in a larger solution\nwhich is processing a continuous stream of context-data events.\n\n# Architecture\n\nThis application builds on the components and dummy IoT devices created in\n[previous tutorials](https://github.com/FIWARE/tutorials.IoT-Agent/). It will make use of three FIWARE components - the\n[Orion Context Broker](https://fiware-orion.readthedocs.io/en/latest/), the\n[IoT Agent for Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/), and the\n[Cosmos Orion Spark Connector](https://fiware-cosmos-spark.readthedocs.io/en/latest/) for connecting Orion to an\n[Apache Spark cluster](https://spark.apache.org/docs/latest/cluster-overview.html). The Spark cluster itself will\nconsist of a single **Cluster Manager** _master_ to coordinate execution and some **Worker Nodes** _worker_ to execute\nthe tasks.\n\nBoth the Orion Context Broker and the IoT Agent rely on open source [MongoDB](https://www.mongodb.com/) technology to\nkeep persistence of the information they hold. We will also be using the dummy IoT devices created in the\n[previous tutorial](https://github.com/FIWARE/tutorials.IoT-Agent/).\n\nTherefore the overall architecture will consist of the following elements:\n\n- Two **FIWARE Generic Enablers** as independent microservices:\n - The FIWARE [Orion Context Broker](https://fiware-orion.readthedocs.io/en/latest/) which will receive requests\n using [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2)\n - The FIWARE [IoT Agent for Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/) which will\n receive northbound measurements from the dummy IoT devices in\n [Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/usermanual/index.html#user-programmers-manual)\n format and convert them to [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2) requests for the\n context broker to alter the state of the context entities\n- An [Apache Spark cluster](https://spark.apache.org/docs/latest/cluster-overview.html) consisting of a single\n **ClusterManager** and **Worker Nodes**\n - The FIWARE [Cosmos Orion Spark Connector](https://fiware-cosmos-spark.readthedocs.io/en/latest/) will be\n deployed as part of the dataflow which will subscribe to context changes and make operations on them in\n real-time\n- One [MongoDB](https://www.mongodb.com/) **database** :\n - Used by the **Orion Context Broker** to hold context data information such as data entities, subscriptions and\n registrations\n - Used by the **IoT Agent** to hold device information such as device URLs and Keys\n- Three **Context Providers**:\n - A webserver acting as set of [dummy IoT devices](https://github.com/FIWARE/tutorials.IoT-Sensors) using the\n [Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/usermanual/index.html#user-programmers-manual)\n protocol running over HTTP.\n - The **Stock Management Frontend** is not used in this tutorial. It does the following:\n - Display store information and allow users to interact with the dummy IoT devices\n - Show which products can be bought at each store\n - Allow users to \"buy\" products and reduce the stock count.\n - The **Context Provider NGSI** proxy is not used in this tutorial. It does the following:\n - receive requests using [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2)\n - makes requests to publicly available data sources using their own APIs in a proprietary format\n - returns context data back to the Orion Context Broker in\n [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2) format.\n\nThe overall architecture can be seen below:\n\n![](https://fiware.github.io/tutorials.Big-Data-Spark/img/Tutorial%20FIWARE%20Spark.png)\n\n## Spark Cluster Configuration\n\n```yaml\nspark-master:\n image: bde2020/spark-master:2.4.5-hadoop2.7\n container_name: spark-master\n expose:\n - \"8080\"\n - \"9001\"\n ports:\n - \"8080:8080\"\n - \"7077:7077\"\n - \"9001:9001\"\n environment:\n - INIT_DAEMON_STEP=setup_spark\n - \"constraint:node==spark-master\"\n```\n\n```yaml\nspark-worker-1:\n image: bde2020/spark-worker:2.4.5-hadoop2.7\n container_name: spark-worker-1\n depends_on:\n - spark-master\n ports:\n - \"8081:8081\"\n environment:\n - \"SPARK_MASTER=spark://spark-master:7077\"\n - \"constraint:node==spark-master\"\n```\n\nThe `spark-master` container is listening on three ports:\n\n- Port `8080` is exposed so we can see the web frontend of the Apache Spark-Master Dashboard.\n- Port `7070` is used for internal communications.\n\nThe `spark-worker-1` container is listening on one port:\n\n- Port `9001` is exposed so that the installation can receive context data subscriptions.\n- Ports `8081` is exposed so we can see the web frontend of the Apache Spark-Worker-1 Dashboard.\n\n# Prerequisites\n\n## Docker and Docker Compose\n\nTo keep things simple, all components will be run using [Docker](https://www.docker.com). **Docker** is a container\ntechnology which allows to different components isolated into their respective environments.\n\n- To install Docker on Windows follow the instructions [here](https://docs.docker.com/docker-for-windows/)\n- To install Docker on Mac follow the instructions [here](https://docs.docker.com/docker-for-mac/)\n- To install Docker on Linux follow the instructions [here](https://docs.docker.com/install/)\n\n**Docker Compose** is a tool for defining and running multi-container Docker applications. A series of\n[YAML files](https://github.com/FIWARE/tutorials.Big-Data-Spark/tree/master/docker-compose) are used to configure the\nrequired services for the application. This means all container services can be brought up in a single command. Docker\nCompose is installed by default as part of Docker for Windows and Docker for Mac, however Linux users will need to\nfollow the instructions found [here](https://docs.docker.com/compose/install/)\n\nYou can check your current **Docker** and **Docker Compose** versions using the following commands:\n\n```console\ndocker-compose -v\ndocker version\n```\n\nPlease ensure that you are using Docker version 20.10 or higher and Docker Compose 1.29 or higher and upgrade if\nnecessary.\n\n## Maven\n\n[Apache Maven](https://maven.apache.org/download.cgi) is a software project management and comprehension tool. Based on\nthe concept of a project object model (POM), Maven can manage a project's build, reporting and documentation from a\ncentral piece of information. We will use Maven to define and download our dependencies and to build and package our\ncode into a JAR file.\n\n## Cygwin for Windows\n\nWe will start up our services using a simple Bash script. Windows users should download [cygwin](http://www.cygwin.com/)\nto provide a command-line functionality similar to a Linux distribution on Windows.\n\n# Start Up\n\nBefore you start, you should ensure that you have obtained or built the necessary Docker images locally. Please clone\nthe repository and create the necessary images by running the commands shown below. Note that you might need to run some\nof the commands as a privileged user:\n\n```console\ngit clone https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial.git\ncd fiware-cosmos-orion-spark-connector-tutorial\n./services create\n```\n\nThis command will also import seed data from the previous tutorials and provision the dummy IoT sensors on startup.\n\nTo start the system, run the following command:\n\n```console\n./services start\n```\n\n> :information_source: **Note:** If you want to clean up and start over again you can do so with the following command:\n>\n> ```console\n> ./services stop\n> ```", - "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" - }, - "item": [ - { - "name": "Receiving context data and performing operations", - "item": [ - { - "name": "Orion - Subscribe to Context Changes", - "request": { - "method": "POST", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - }, - { - "key": "fiware-service", - "value": "openiot" - }, - { - "key": "fiware-servicepath", - "value": "/" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"description\": \"Notify Spark of all context changes\",\n \"subject\": {\n \"entities\": [\n {\n \"idPattern\": \".*\"\n }\n ]\n },\n \"notification\": {\n \"http\": {\n \"url\": \"http://spark-worker-1:9001\"\n }\n }\n}" - }, - "url": { - "raw": "http://{{orion}}/v2/subscriptions/", - "protocol": "http", - "host": [ - "{{orion}}" - ], - "path": [ - "v2", - "subscriptions", - "" - ] - }, - "description": "Once a dynamic context system is up and running (we have deployed the `Logger` job in the Spark cluster), we need to\ninform **Spark** of changes in context.\n\nThis is done by making a POST request to the `/v2/subscription` endpoint of the Orion Context Broker.\n\n- The `fiware-service` and `fiware-servicepath` headers are used to filter the subscription to only listen to\n measurements from the attached IoT Sensors, since they had been provisioned using these settings\n\n- The notification `url` must match the one our Spark program is listening to.\n\n- The `throttling` value defines the rate that changes are sampled.\n" - }, - "response": [] - }, - { - "name": "Orion - Check Subscription is working", - "request": { - "method": "GET", - "header": [ - { - "key": "fiware-service", - "value": "openiot" - }, - { - "key": "fiware-servicepath", - "value": "/" - } - ], - "url": { - "raw": "http://{{orion}}/v2/subscriptions/", - "protocol": "http", - "host": [ - "{{orion}}" - ], - "path": [ - "v2", - "subscriptions", - "" - ] - }, - "description": "If a subscription has been created, we can check to see if it is firing by making a GET request to the\n`/v2/subscriptions` endpoint.\n\nWithin the `notification` section of the response, you can see several additional `attributes` which describe the health\nof the subscription\n\nIf the criteria of the subscription have been met, `timesSent` should be greater than `0`. A zero value would indicate\nthat the `subject` of the subscription is incorrect or the subscription has created with the wrong `fiware-service-path`\nor `fiware-service` header\n\nThe `lastNotification` should be a recent timestamp - if this is not the case, then the devices are not regularly\nsending data. Remember to unlock the **Smart Door** and switch on the **Smart Lamp**\n\nThe `lastSuccess` should match the `lastNotification` date - if this is not the case then **Cosmos** is not receiving\nthe subscription properly. Check that the hostname and port are correct.\n\nFinally, check that the `status` of the subscription is `active` - an expired subscription will not fire." - }, - "response": [] - } - ], - "description": "According to the [Apache Spark documentation](https://spark.apache.org/documentation.html), Spark Streaming is an\nextension of the core Spark API that enables scalable, high-throughput, fault-tolerant stream processing of live data\nstreams. Data can be ingested from many sources like Kafka, Flume, Kinesis, or TCP sockets, and can be processed using\ncomplex algorithms expressed with high-level functions like map, reduce, join and window. Finally, processed data can be\npushed out to filesystems, databases, and live dashboards. In fact, you can apply Spark’s machine learning and graph\nprocessing algorithms on data streams.\n\n![](https://spark.apache.org/docs/latest/img/streaming-arch.png)\n\nInternally, it works as follows. Spark Streaming receives live input data streams and divides the data into batches,\nwhich are then processed by the Spark engine to generate the final stream of results in batches.\n\n![](https://spark.apache.org/docs/latest/img/streaming-flow.png)\n\nThis means that to create a streaming data flow we must supply the following:\n\n- A mechanism for reading Context data as a **Source Operator**\n- Business logic to define the transform operations\n- A mechanism for pushing Context data back to the context broker as a **Sink Operator**\n\nThe **Cosmos Spark** connector - `orion.spark.connector-1.2.1.jar` offers both **Source** and **Sink** operators. It\ntherefore only remains to write the necessary Scala code to connect the streaming dataflow pipeline operations together.\nThe processing code can be complied into a JAR file which can be uploaded to the spark cluster. Two examples will be\ndetailed below, all the source code for this tutorial can be found within the\n[cosmos-examples](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/tree/master/cosmos-examples)\ndirectory.\n\nFurther Spark processing examples can be found on\n[Spark Connector Examples](https://fiware-cosmos-spark-examples.readthedocs.io/).\n\n### Compiling a JAR file for Spark\n\nAn existing `pom.xml` file has been created which holds the necessary prerequisites to build the examples JAR file\n\nIn order to use the Orion Spark Connector we first need to manually install the connector JAR as an artifact using\nMaven:\n\n```console\ncd cosmos-examples\ncurl -LO https://github.com/ging/fiware-cosmos-orion-spark-connector/releases/download/FIWARE_7.9/orion.spark.connector-1.2.1.jar\nmvn install:install-file \\\n -Dfile=./orion.spark.connector-1.2.1.jar \\\n -DgroupId=org.fiware.cosmos \\\n -DartifactId=orion.spark.connector \\\n -Dversion=1.2.1 \\\n -Dpackaging=jar\n```\n\nThereafter the source code can be compiled by running the `mvn package` command within the same directory\n(`cosmos-examples`):\n\n```console\nmvn package\n```\n\nA new JAR file called `cosmos-examples-1.2.1.jar` will be created within the `cosmos-examples/target` directory.\n\n### Generating a stream of Context Data\n\nFor the purpose of this tutorial, we must be monitoring a system in which the context is periodically being updated. The\ndummy IoT Sensors can be used to do this. Open the device monitor page at `http://localhost:3000/device/monitor` and\nunlock a **Smart Door** and switch on a **Smart Lamp**. This can be done by selecting an appropriate the command from\nthe drop down list and pressing the `send` button. The stream of measurements coming from the devices can then be seen\non the same page:\n\n![](https://fiware.github.io/tutorials.Big-Data-Spark/img/door-open.gif)\n\n## Logger - Reading Context Data Streams\n\nThe first example makes use of the `OrionReceiver` operator in order to receive notifications from the Orion Context\nBroker. Specifically, the example counts the number notifications that each type of device sends in one minute. You can\nfind the source code of the example in\n[org/fiware/cosmos/tutorial/Logger.scala](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/blob/master/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala)\n\n### Logger - Installing the JAR\n\nRestart the containers if necessary, then access the worker container:\n\n```console\ndocker exec -it spark-worker-1 bin/bash\n```\n\nAnd run the following command to run the generated JAR package in the Spark cluster:\n\n```console\n/spark/bin/spark-submit \\\n--class org.fiware.cosmos.tutorial.Logger \\\n--master spark://spark-master:7077 \\\n--deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.1.jar \\\n--conf \"spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console\"\n```\n", - "event": [ - { - "listen": "prerequest", - "script": { - "id": "0477a1da-a1f5-42f3-a91e-fc90c4ed139a", - "type": "text/javascript", - "exec": [ - "" - ] - } - }, - { - "listen": "test", - "script": { - "id": "121ba677-9693-4482-89b3-2b30dc08fbf8", - "type": "text/javascript", - "exec": [ - "" - ] - } - } - ], - "protocolProfileBehavior": {} - }, - { - "name": "Logger - Checking the Output", - "item": [], - "description": "Leave the subscription running for **one minute**. Then, the output on the console on which you ran the Spark job will\nbe like the following:\n\n```text\nSensor(Bell,3)\nSensor(Door,4)\nSensor(Lamp,7)\nSensor(Motion,6)\n```\n\n### Logger - Analyzing the Code\n\n```scala\npackage org.fiware.cosmos.tutorial\n\nimport org.apache.spark.SparkConf\nimport org.apache.spark.streaming.{Seconds, StreamingContext}\nimport org.fiware.cosmos.orion.spark.connector.OrionReceiver\n\nobject Logger{\n\n def main(args: Array[String]): Unit = {\n\n val conf = new SparkConf().setAppName(\"Logger\")\n val ssc = new StreamingContext(conf, Seconds(60))\n // Create Orion Receiver. Receive notifications on port 9001\n val eventStream = ssc.receiverStream(new OrionReceiver(9001))\n\n // Process event stream\n val processedDataStream= eventStream\n .flatMap(event => event.entities)\n .map(ent => {\n new Sensor(ent.`type`)\n })\n .countByValue()\n .window(Seconds(60))\n\n processedDataStream.print()\n\n ssc.start()\n ssc.awaitTermination()\n }\n case class Sensor(device: String)\n}\n```\n\nThe first lines of the program are aimed at importing the necessary dependencies, including the connector. The next step\nis to create an instance of the `OrionReceiver` using the class provided by the connector and to add it to the\nenvironment provided by Spark.\n\nThe `OrionReceiver` constructor accepts a port number (`9001`) as a parameter. This port is used to listen to the\nsubscription notifications coming from Orion and converted to a `DataStream` of `NgsiEvent` objects. The definition of\nthese objects can be found within the\n[Orion-Spark Connector documentation](https://github.com/ging/fiware-cosmos-orion-spark-connector/blob/master/README.md#orionreceiver).\n\nThe stream processing consists of five separate steps. The first step (`flatMap()`) is performed in order to put\ntogether the entity objects of all the NGSI Events received in a period of time. Thereafter the code iterates over them\n(with the `map()` operation) and extracts the desired attributes. In this case, we are interested in the sensor `type`\n(`Door`, `Motion`, `Bell` or `Lamp`).\n\nWithin each iteration, we create a custom object with the property we need: the sensor `type`. For this purpose, we can\ndefine a case class as shown:\n\n```scala\ncase class Sensor(device: String)\n```\n\nThereafter can count the created objects by the type of device (`countByValue()`) and perform operations such as\n`window()` on them.\n\nAfter the processing, the results are output to the console:\n\n```scala\nprocessedDataStream.print()\n```\n", - "protocolProfileBehavior": {} - }, - { - "name": "Receiving context data, performing operations and persisting context data", - "item": [ - { - "name": "Orion - Subscribe to Context Changes", - "request": { - "method": "POST", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - }, - { - "key": "fiware-service", - "value": "openiot" - }, - { - "key": "fiware-servicepath", - "value": "/" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"description\": \"Notify Spark of all motion sensor\",\n \"subject\": {\n \"entities\": [\n {\n \"idPattern\": \"Motion.*\"\n }\n ]\n },\n \"notification\": {\n \"http\": {\n \"url\": \"http://spark-worker-1:9001\"\n }\n }\n}" - }, - "url": { - "raw": "http://{{orion}}/v2/subscriptions/", - "protocol": "http", - "host": [ - "{{orion}}" - ], - "path": [ - "v2", - "subscriptions", - "" - ] - }, - "description": "If the previous example has not been run, a new subscription will need to be set up. A narrower subscription can be set\nup to only trigger a notification when a motion sensor detects movement.\n\n> **Note:** If the previous subscription already exists, this step creating a second narrower Motion-only subscription\n> is unnecessary. There is a filter within the business logic of the scala task itself." - }, - "response": [] - }, - { - "name": "Orion - Check Subscription is working", - "request": { - "method": "GET", - "header": [ - { - "key": "fiware-service", - "value": "openiot" - }, - { - "key": "fiware-servicepath", - "value": "/" - } - ], - "url": { - "raw": "http://{{orion}}/v2/subscriptions/", - "protocol": "http", - "host": [ - "{{orion}}" - ], - "path": [ - "v2", - "subscriptions", - "" - ] - }, - "description": "If a subscription has been created, you can check to see if it is firing by making a GET \nrequest to the `/v2/subscriptions` endpoint.\n\nWithin the `notification` section of the response, you can see several additional `attributes` which describe the health of the subscription\n\nIf the criteria of the subscription have been met, `timesSent` should be greater than `0`.\nA zero value would indicate that the `subject` of the subscription is incorrect or the subscription \nhas created with the wrong `fiware-service-path` or `fiware-service` header\n\nThe `lastNotification` should be a recent timestamp - if this is not the case, then the devices\nare not regularly sending data. Remember to unlock the **Smart Door** and switch on the **Smart Lamp**\n\nThe `lastSuccess` should match the `lastNotification` date - if this is not the case \nthen **Draco** is not receiving the subscription properly. Check that the host name\nand port are correct. \n\nFinally, check that the `status` of the subscription is `active` - an expired subscription\nwill not fire." - }, - "response": [] - }, - { - "name": "Orion - Delete Subscription", - "request": { - "method": "DELETE", - "header": [ - { - "key": "fiware-service", - "value": "openiot" - }, - { - "key": "fiware-servicepath", - "value": "/" - } - ], - "body": { - "mode": "raw", - "raw": "" - }, - "url": { - "raw": "http://{{orion}}/v2/subscriptions/{{subscriptionId}}", - "protocol": "http", - "host": [ - "{{orion}}" - ], - "path": [ - "v2", - "subscriptions", - "{{subscriptionId}}" - ] - }, - "description": "If a subscription has been created, you can check to see if it is firing by making a GET \nrequest to the `/v2/subscriptions` endpoint.\n\nWithin the `notification` section of the response, you can see several additional `attributes` which describe the health of the subscription\n\nIf the criteria of the subscription have been met, `timesSent` should be greater than `0`.\nA zero value would indicate that the `subject` of the subscription is incorrect or the subscription \nhas created with the wrong `fiware-service-path` or `fiware-service` header\n\nThe `lastNotification` should be a recent timestamp - if this is not the case, then the devices\nare not regularly sending data. Remember to unlock the **Smart Door** and switch on the **Smart Lamp**\n\nThe `lastSuccess` should match the `lastNotification` date - if this is not the case \nthen **Draco** is not receiving the subscription properly. Check that the host name\nand port are correct. \n\nFinally, check that the `status` of the subscription is `active` - an expired subscription\nwill not fire." - }, - "response": [] - } - ], - "description": "The second example switches on a lamp when its motion sensor detects movement.\n\nThe dataflow stream uses the `OrionReceiver` operator in order to receive notifications and filters the input to only\nrespond to motion senseors and then uses the `OrionSink` to push processed context back to the Context Broker. You can\nfind the source code of the example in\n[org/fiware/cosmos/tutorial/Feedback.scala](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/blob/master/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala)\n\n### Feedback Loop - Installing the JAR\n\n```console\n/spark/bin/spark-submit \\\n--class org.fiware.cosmos.tutorial.Feedback \\\n--master spark://spark-master:7077 \\\n--deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.1.jar \\\n--conf \"spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console\"\n```", - "event": [ - { - "listen": "prerequest", - "script": { - "id": "304ca99a-e4d0-42b1-bb12-c622a653d9af", - "type": "text/javascript", - "exec": [ - "" - ] - } - }, - { - "listen": "test", - "script": { - "id": "cb7aca3f-0b0a-4afa-850b-af050c4240f7", - "type": "text/javascript", - "exec": [ - "" - ] - } - } - ], - "protocolProfileBehavior": {} - }, - { - "name": "Feedback Loop - Checking the Output", - "item": [], - "description": "Go to `http://localhost:3000/device/monitor`\n\nWithin any Store, unlock the door and wait. Once the door opens and the Motion sensor is triggered, the lamp will switch\non directly\n\n### Feedback Loop - Analyzing the Code\n\n```scala\npackage org.fiware.cosmos.tutorial\n\nimport org.apache.spark.SparkConf\nimport org.apache.spark.streaming.{Seconds, StreamingContext}\nimport org.fiware.cosmos.orion.spark.connector._\n\nobject Feedback {\n final val CONTENT_TYPE = ContentType.JSON\n final val METHOD = HTTPMethod.PATCH\n final val CONTENT = \"{\\n \\\"on\\\": {\\n \\\"type\\\" : \\\"command\\\",\\n \\\"value\\\" : \\\"\\\"\\n }\\n}\"\n final val HEADERS = Map(\"fiware-service\" -> \"openiot\",\"fiware-servicepath\" -> \"/\",\"Accept\" -> \"*/*\")\n\n def main(args: Array[String]): Unit = {\n\n val conf = new SparkConf().setAppName(\"Feedback\")\n val ssc = new StreamingContext(conf, Seconds(10))\n // Create Orion Receiver. Receive notifications on port 9001\n val eventStream = ssc.receiverStream(new OrionReceiver(9001))\n\n // Process event stream\n val processedDataStream = eventStream\n .flatMap(event => event.entities)\n .filter(entity=>(entity.attrs(\"count\").value == \"1\"))\n .map(entity=> new Sensor(entity.id))\n .window(Seconds(10))\n\n val sinkStream= processedDataStream.map(sensor => {\n val url=\"http://localhost:1026/v2/entities/Lamp:\"+sensor.id.takeRight(3)+\"/attrs\"\n OrionSinkObject(CONTENT,url,CONTENT_TYPE,METHOD,HEADERS)\n })\n // Add Orion Sink\n OrionSink.addSink( sinkStream )\n\n // print the results with a single thread, rather than in parallel\n processedDataStream.print()\n ssc.start()\n\n ssc.awaitTermination()\n }\n\n case class Sensor(id: String)\n}\n```\n\nAs you can see, it is similar to the previous example. The main difference is that it writes the processed data back in\nthe Context Broker through the **`OrionSink`**.\n\nThe arguments of the **`OrionSinkObject`** are:\n\n- **Message**: `\"{\\n \\\"on\\\": {\\n \\\"type\\\" : \\\"command\\\",\\n \\\"value\\\" : \\\"\\\"\\n }\\n}\"`. We send 'on' command\n- **URL**: `\"http://localhost:1026/v2/entities/Lamp:\"+node.id.takeRight(3)+\"/attrs\"`. TakeRight(3) gets the number of\n the room, for example '001')\n- **Content Type**: `ContentType.Plain`.\n- **HTTP Method**: `HTTPMethod.POST`.\n- **Headers**: `Map(\"fiware-service\" -> \"openiot\",\"fiware-servicepath\" -> \"/\",\"Accept\" -> \"*/*\")`. Optional parameter.\n We add the headers we need in the HTTP Request.", - "event": [ - { - "listen": "prerequest", - "script": { - "id": "c38a5871-639b-4f83-8326-2d0c6e247a87", - "type": "text/javascript", - "exec": [ - "" - ] - } - }, - { - "listen": "test", - "script": { - "id": "7c1b3690-4a40-419d-9e4f-dd8f3acde622", - "type": "text/javascript", - "exec": [ - "" - ] - } - } - ], - "protocolProfileBehavior": {} - } - ], - "event": [ - { - "listen": "prerequest", - "script": { - "id": "e3ee3d45-9b02-4548-b269-b1d13f810c9f", - "type": "text/javascript", - "exec": [ - "" - ] - } - }, - { - "listen": "test", - "script": { - "id": "b18ca78d-898a-4efc-97cd-95e8072bee01", - "type": "text/javascript", - "exec": [ - "" - ] - } - } - ], - "variable": [ - { - "id": "016189d7-34ff-4413-953e-15ea05c4842d", - "key": "orion", - "value": "localhost:1026" - }, - { - "id": "798ae722-6f27-4ccb-8ce4-eb184528e4ec", - "key": "subscriptionId", - "value": "5e134a0c924f6d7d27b63844" - } - ], - "protocolProfileBehavior": {} -} \ No newline at end of file diff --git a/README.ja.md b/README.ja.md deleted file mode 100644 index 7006b06..0000000 --- a/README.ja.md +++ /dev/null @@ -1,721 +0,0 @@ -[![FIWARE Banner](https://fiware.github.io/tutorials.Big-Data-Spark/img/fiware.png)](https://www.fiware.org/developers) -[![NGSI v2](https://img.shields.io/badge/NGSI-v2-5dc0cf.svg)](https://fiware-ges.github.io/orion/api/v2/stable/) - -[![FIWARE Core Context Management](https://nexus.lab.fiware.org/static/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/processing/README.md) -[![License: MIT](https://img.shields.io/github/license/fiware/tutorials.Big-Data-Spark.svg)](https://opensource.org/licenses/MIT) -[![Support badge](https://nexus.lab.fiware.org/repository/raw/public/badges/stackoverflow/fiware.svg)](https://stackoverflow.com/questions/tagged/fiware) -
[![Documentation](https://img.shields.io/readthedocs/fiware-tutorials.svg)](https://fiware-tutorials.rtfd.io) - -このチュートリアルは、[FIWARE Cosmos Orion Spark Connector](http://fiware-cosmos-spark.rtfd.io) の概要です。これにより、 -最も人気のある BigData プラットフォームの1つである [Apache Spark](https://spark.apache.org/) と統合され、 -コンテキスト全体でビッグデータ分析が容易になります。Apache Spark は、制限のないデータ・ストリームと制限のあるデータ・ -ストリームでステートフルな計算を行うためのフレームワークおよび分散処理エンジンです。Spark は、すべての一般的な -クラスタ環境で実行され、メモリ内の速度と任意のスケールで計算を実行するように設計されています。 - -チュートリアルでは全体で [cUrl](https://ec.haxx.se/) コマンドを使用しますが、Postman のドキュメントとしても利用できます: - -[![Run in Postman](https://run.pstmn.io/button.svg)](https://www.getpostman.com/collections/9e508e30f737e7db4fa9) -[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/FIWARE/tutorials.Big-Data-Spark/tree/NGSI-v2) - -## コンテンツ - -
-詳細 - -- [リアルタイム処理とビッグデータ分析](#real-time-processing-and-big-data-analysis) -- [アーキテクチャ](#architecture) - - [Spark Cluster の構成](#spark-cluster-configuration) -- [前提条件](#prerequisites) - - [Docker および Docker Compose](#docker-and-docker-compose) - - [Maven](#maven) - - [Cygwin for Windows](#cygwin-for-windows) -- [起動](#start-up) -- [リアルタイム処理操作](#real-time-processing-operations) - - [Spark 用の JAR ファイルのコンパイル](#compiling-a-jar-file-for-spark) - - [コンテキスト・データのストリームの生成](#generating-a-stream-of-context-data) - - [ロガー - コンテキスト・データのストリームの読み取り](#logger---reading-context-data-streams) - - [ロガー - JAR のインストール](#logger---installing-the-jar) - - [ロガー - コンテキスト変更のサブスクライブ](#logger---subscribing-to-context-changes) - - [ロガー - 出力の確認](#logger---checking-the-output) - - [ロガー - コードの分析](#logger---analyzing-the-code) - - [フィードバック・ループ - コンテキスト・データの永続化](#feedback-loop---persisting-context-data) - - [フィードバック・ループ - JAR のインストール](#feedback-loop---installing-the-jar) - - [フィードバック・ループ - コンテキスト変更のサブスクライブ](#feedback-loop---subscribing-to-context-changes) - - [フィードバック・ループ - 出力の確認](#feedback-loop---checking-the-output) - - [フィードバック・ループ - コードの分析](#feedback-loop---analyzing-the-code) -- [次のステップ](#next-steps) - -
- - - -# リアルタイム処理とビッグデータ分析 - -> "You have to find what sparks a light in you so that you in your own way can illuminate the world." -> -> — Oprah Winfrey - -FIWARE に基づくスマート・ソリューションは、マイクロサービスを中心に設計されています。したがって、シンプルな -アプリケーション (スーパーマーケット・チュートリアルなど) から、IoT センサやその他のコンテキスト・データ・プロバイダの -大規模な配列に基づく都市全体のインストールにスケールアップするように設計されています。 - -関与する膨大な量のデータは、1台のマシンで分析、処理、保存するには膨大な量になるため、追加の分散サービスに作業を委任する -必要があります。これらの分散システムは、いわゆる **ビッグデータ分析** の基礎を形成します。タスクの分散により、開発者は、 -従来の方法では処理するには複雑すぎる巨大なデータ・セットから洞察を抽出することができます。隠れたパターンと相関関係を -明らかにします。 - -これまで見てきたように、コンテキスト・データはすべてのスマート・ソリューションの中核であり、Context Broker は状態の変化を -監視し、コンテキストの変化に応じて、[サブスクリプション・イベント](https://github.com/Fiware/tutorials.Subscriptions) -を発生させることができます。小規模なインストールの場合、各サブスクリプション・イベントは単一の受信エンドポイントで1つずつ -処理できますが、システムが大きくなると、リスナーを圧倒し、潜在的にリソースをブロックし、更新が失われないようにするために -別の手法が必要になります。 - -**Apache Spark** は、オープンソースの分散型汎用クラスタ・コンピューティング・フレームワークです。 暗黙的なデータ並列性と -フォールト・トレランスを備えたクラスタ全体をプログラミングするためのインターフェイスを提供します。 **Cosmos Spark** -コネクタを使用すると、開発者はカスタム・ビジネス・ロジックを記述して、コンテキスト・データのサブスクリプション・イベント -をリッスンし、コンテキスト・データのフローを処理できます。Spark は、これらのアクションを他のワーカーに委任して、 -必要に応じて順次または並行してアクションを実行することができます。 データ・フロー処理自体は、任意に複雑になる可能性が -あります。 - -実際には、明らかに、既存のスーパーマーケットのシナリオは小さすぎて、ビッグデータ・ソリューションの使用を必要としません。 -しかし、コンテキスト・データ・イベントの連続ストリームを処理する大規模なソリューションで必要になる可能性のある -リアルタイム処理のタイプを示すための基礎として機能します。 - - - -# アーキテクチャ - -このアプリケーションは、[以前のチュートリアル](https://github.com/FIWARE/tutorials.IoT-Agent/)で作成されたコンポーネントと -ダミー IoT デバイス上に構築されます。 3つの FIWARE コンポーネントを使用します。 -[Orion Context Broker](https://fiware-orion.readthedocs.io/en/latest/), -[IoT Agent for Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/) および Orion を -[Apache Spark cluster](https://spark.apache.org/docs/latest/cluster-overview.html) に接続するための -[Cosmos Orion Spark Connector](https://fiware-cosmos-spark.readthedocs.io/en/latest/) です。Spark クラスタ自体は、 -実行を調整する単一の **Cluster Manager** _master_ と、タスクを実行する単一の **Worker Nodes** _worker_ で構成されます。 - -Orion Context Broker と IoT Agent はどちらも、オープンソースの [MongoDB](https://www.mongodb.com/) テクノロジーに依存して、 -保持している情報の永続性を維持しています。また、[以前のチュートリアル](https://github.com/FIWARE/tutorials.IoT-Agent/)で -作成したダミー IoT デバイスを使用します。 - -したがって、全体的なアーキテクチャは次の要素で構成されます : - -- 独立したマイクロサービスとしての2つの **FIWARE Generic Enablers** : - - FIWARE [Orion Context Broker](https://fiware-orion.readthedocs.io/en/latest/)は、 - [NGSI-v2](https://fiware.github.io/specifications/OpenAPI/ngsiv2) を使用してリクエストを受信します - - FIWARE [IoT Agent for Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/) は、ダミー IoT - デバイスから Ultralight 2.0 形式のノースバウンド測定値を受信し、Context Broker の - [NGSI-v2](https://fiware.github.io/specifications/OpenAPI/ngsiv2) リクエストに変換して、コンテキスト・ - エンティティの状態を変更します -- [Apache Spark cluster](https://spark.apache.org/docs/latest/cluster-overview.html) は、 - 単一の **ClusterManager** と **Worker Nodes** で構成されます - - FIWARE [Cosmos Orion Spark Connector](https://fiware-cosmos-spark.readthedocs.io/en/latest/) は、 - コンテキストの変更をサブスクライブし、リアルタイムで操作を行うデータフローの一部としてデプロイされます -- 1つの [MongoDB](https://www.mongodb.com/) **データベース** : - - **Orion Context Broker** がデータ・エンティティ、サブスクリプション、レジストレーションなどの - コンテキスト・データ情報を保持するために使用します - - **IoT Agent** がデバイスの URL やキーなどのデバイス情報を保持するために使用します -- 3つの**コンテキスト・プロバイダ** : - - HTTP 上で実行される - [Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/usermanual/index.html#user-programmers-manual) - を使用する、[ダミー IoT デバイス](https://github.com/FIWARE/tutorials.IoT-Sensors/tree/NGSI-v2) のセットとして - 機能する Webサーバ - - **在庫管理フロントエンド** は、このチュートリアルでは使用しません。次のことを行います : - - ストア情報を表示し、ユーザがダミー IoT デバイスと対話できるようにします - - 各ストアで購入できる製品を表示します - - ユーザが製品を "購入" して在庫数を減らすことを許可します - - **Context Provider NGSI** プロキシは、このチュートリアルでは使用しません。次のことを行います : - - [NGSI-v2](https://fiware.github.io/specifications/OpenAPI/ngsiv2) を使用してリクエストを受信します - - 独自形式の独自 API を使用して、公開されているデータソースへのリクエストを行います - - コンテキスト・データを[NGSI-v2](https://fiware.github.io/specifications/OpenAPI/ngsiv2) 形式で - Orion Context Broker に返します - -全体のアーキテクチャを以下に示します : - -![](https://fiware.github.io/tutorials.Big-Data-Spark/img/Tutorial%20FIWARE%20Spark.png) - - - -## Spark Cluster の構成 - -```yaml -spark-master: - image: bde2020/spark-master:2.4.5-hadoop2.7 - container_name: spark-master - expose: - - "8080" - - "9001" - ports: - - "8080:8080" - - "7077:7077" - - "9001:9001" - environment: - - INIT_DAEMON_STEP=setup_spark - - "constraint:node==spark-master" -``` - -```yaml -spark-worker-1: - image: bde2020/spark-worker:2.4.5-hadoop2.7 - container_name: spark-worker-1 - depends_on: - - spark-master - ports: - - "8081:8081" - environment: - - "SPARK_MASTER=spark://spark-master:7077" - - "constraint:node==spark-master" -``` - -`spark-master` コンテナは、3つのポートでリッスンしています: - -- ポート `8080` は、Apache Spark-Master ダッシュボードの Web フロントエンドを見ることができる - ように、公開されます -- ポート `7070` は内部通信に使用されます -- ポート `9001` は、インストレーションがコンテキスト・データのサブスクリプションを受信できるように、 - 公開されます - -`spark-worker-1` コンテナは、1つのポートで待機しています: - -- ポート `8081` は、Apache Spark-Worker-1 ダッシュボードの Web フロントエンドを見ることができる - ように、公開されます - - - -# 前提条件 - - - -## Docker および Docker Compose - -物事を単純にするために、すべてのコンポーネントは [Docker](https://www.docker.com) を使用して実行されます。 -**Docker** は、さまざまなコンポーネントをそれぞれの環境に分離できるようにするコンテナ・テクノロジーです。 - -- Windows に Docker をインストールするには、[こちら](https://docs.docker.com/docker-for-windows/)の指示に従って - ください -- Mac に Docker をインストールするには、[こちら](https://docs.docker.com/docker-for-mac/)の指示に従ってください -- Linux に Docker をインストールするには、[こちら](https://docs.docker.com/install/)の指示に従ってください - -**Docker Compose** は、マルチ・コンテナ Docker アプリケーションを定義および実行するためのツールです。一連の -[YAML files](https://github.com/FIWARE/tutorials.Big-Data-Spark/tree/master/docker-compose) は、アプリケーション -に必要なサービスを構成するために使用されます。これは、すべてのコンテナ・サービスを単一のコマンドで起動できることを -意味します。Docker Compose は、デフォルトで Docker for Windows および Docker for Mac の一部としてインストール -されますが、Linux ユーザは[こちら](https://docs.docker.com/compose/install/)にある指示に従う必要があります。 - -次のコマンドを使用して、現在の **Docker** および **Docker Compose** バージョンを確認できます : - -```console -docker-compose -v -docker version -``` - -Docker バージョン20.10 以降および Docker Compose 1.29 以降を使用していることを確認し、必要に応じてアップグレード -してください。 - - - -## Maven - -[Apache Maven](https://maven.apache.org/download.cgi) は、ソフトウェア・プロジェクト管理ツールです。プロジェクト・ -オブジェクト・モデル (POM) の概念に基づいて、Maven は情報の中心部分からプロジェクトのビルド、レポート、および -ドキュメントを管理できます。Maven を使用して、依存関係を定義およびダウンロードし、コードをビルドして JAR ファイルに -パッケージ化します。 - - - -## Cygwin for Windows - -簡単な Bash スクリプトを使用してサービスを開始します。Windows ユーザは、[cygwin](http://www.cygwin.com/) を -ダウンロードして、Windows 上の Linux ディストリビューションに類似したコマンドライン機能を提供する必要があります。 - - - -# 起動 - -開始する前に、必要な Docker イメージをローカルで取得または構築したことを確認する必要があります。以下に示すコマンド -を実行して、リポジトリを複製し、必要なイメージを作成してください。いくつかのコマンドを特権ユーザとして実行する -必要がある場合があることに注意してください : - -```console -git clone https://github.com/FIWARE/tutorials.Big-Data-Spark.git -cd tutorials.Big-Data-Spark -checkout NGSI-v2 -./services create -``` - -このコマンドは、以前のチュートリアルからシードデータをインポートし、起動時にダミー IoT センサをプロビジョニング -します。 - -システムを起動するには、次のコマンドを実行します: - -```console -./services start -``` - -> :information_source: **注 :** クリーンアップしてやり直す場合は、次のコマンドを使用します : -> -> ```console -> ./services stop -> ``` - - - -# リアルタイム処理操作 - -[Apache Spark のドキュメント](https://spark.apache.org/documentation.html) によると、Spark Streaming はコア Spark API -の拡張であり、ライブ・データ・ストリームのスケーラブルで高スループットのフォールト・トレラントなストリーム処理を -可能にします。データは、Kafka, Flume, Kinesis, TCP ソケットなどの多くのソースから取り込むことができ、map, reduce, join, -window などの高レベル関数で表現された複雑なアルゴリズムを使用して処理できます。最後に、処理されたデータを -ファイルシステム、データベース、およびライブ・ダッシュボードにプッシュできます。 -実際、Spark の機械学習とグラフ処理アルゴリズムをデータ・ストリームに適用できます。 - -![](https://spark.apache.org/docs/latest/img/streaming-arch.png) - -内部的には、次のように動作します。Spark Streaming は、ライブ入力データ・ストリームを受信し、データをバッチに分割します。 -バッチは、Spark エンジンによって処理され、結果の最終ストリームがバッチで生成されます。 - -![](https://spark.apache.org/docs/latest/img/streaming-flow.png) - -これは、ストリーミング・データフローを作成するには、以下を提供する必要があることを意味します: - -- **Source Operator** としてコンテキスト・データを読み取るためのメカニズム -- 変換操作 (transform operations) を定義するビジネスロジック -- **Sink Operator** としてコンテキスト・データを Context Broker にプッシュバックするメカニズム - -**Cosmos Spark** connector - `orion.spark.connector-1.2.2.jar` は、**Source** および **Sink** operators の両方を -提供します。したがって、ストリーミング・データフロー・パイプライン操作を相互に接続するために必要な Scala コードを -記述するだけです。処理コードは、Spark クラスタにアップロードできる JAR ファイルにコンパイルできます。 -以下に2つの例を詳しく説明します。このチュートリアルのすべてのソースコードは、 -[cosmos-examples](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/tree/master/cosmos-examples) -ディレクトリにあります。 - -その他の Spark 処理の例は、 -[Spark Connector の例](https://fiware-cosmos-spark-examples.readthedocs.io/) にあります。 - - - -### Spark 用の JAR ファイルのコンパイル - -サンプル JAR ファイルをビルドするために必要な前提条件を保持する既存の `pom.xml` ファイルが作成されました。 - -Orion Spark Connector を使用するには、最初に Maven を使用してアーティファクトとしてコネクタ JAR -を手動でインストールする必要があります: - -```console -cd cosmos-examples -curl -LO https://github.com/ging/fiware-cosmos-orion-spark-connector/releases/download/FIWARE_7.9.1/orion.spark.connector-1.2.2.jar -mvn install:install-file \ - -Dfile=./orion.spark.connector-1.2.2.jar \ - -DgroupId=org.fiware.cosmos \ - -DartifactId=orion.spark.connector \ - -Dversion=1.2.2 \ - -Dpackaging=jar -``` - -その後、同じディレクトリ (`cosmos-examples`) 内で `mvn package` コマンドを実行することにより、 -ソースコードをコンパイルできます: - -```console -mvn package -``` - -`cosmos-examples-1.2.2.jar` と呼ばれる新しい JAR ファイルが `cosmos-examples/target` ディレクトリ内に作成されます。 - - - -### コンテキスト・データのストリームの生成 - -このチュートリアルでは、コンテキストが定期的に更新されているシステムを監視する必要があります。 ダミー IoT センサを -使用してこれを行うことができます。`http://localhost:3000/device/monitor` でデバイス・モニタ。ページを開き、 -**Smart Door** のロックを解除して、**Smart Lamp** をオンにします。これは、ドロップ・ダウン。リストから適切なコマンドを -選択し、`send` ボタンを押すことで実行できます。デバイスからの測定の流れは、同じページで見ることができます: - -![](https://fiware.github.io/tutorials.Big-Data-Spark/img/door-open.gif) - - - -## ロガー - コンテキスト・データのストリームの読み取り - -最初の例では、Orion Context Broker からノーティフィケーションを受信するために `OrionReceiver` operator を使用します。 -具体的には、この例では、各タイプのデバイスが1分間に送信するノーティフィケーションの数をカウントします。 -この例のソースコードは、 -[org/fiware/cosmos/tutorial/Logger.scala](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/blob/master/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala) -にあります。 - - - -### ロガー - JAR のインストール - -必要に応じてコンテナを再起動し、ワーカー・コンテナにアクセスします: - -```console -docker exec -it spark-worker-1 bin/bash -``` - -そして、次のコマンドを実行して、生成された JAR パッケージを Spark クラスタで実行します: - -```console -/spark/bin/spark-submit \ ---class org.fiware.cosmos.tutorial.Logger \ ---master spark://spark-master:7077 \ ---deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.2.jar \ ---conf "spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console" -``` - - - -### ロガー - コンテキスト変更のサブスクライブ - -動的コンテキスト・システムが稼働し始めたら (Spark クラスタに `Logger` ジョブをデプロイしました)、コンテキストの変更を -**Spark** にノーティフィケーションする必要があります。 - -これは、Orion Context Broker の `/v2/subscription` エンドポイントに POST リクエストを行うことによって行われます。 - -- `fiware-service` および `fiware-servicepath` ヘッダは、これらの設定を使用してプロビジョニングされているため、 - 接続された IoT センサからの測定値のみをリッスンするようにサブスクリプションをフィルタリングするために使用されます - -- ノーティフィケーション url は、Spark プログラムがリッスンしているものと一致する必要があります - -- この `throttling` 値は、変更がサンプリングされるレートを定義します - -別のターミナルを開き、次のコマンドを実行します: - -#### 1️⃣ リクエスト: - -```console -curl -iX POST \ - 'http://localhost:1026/v2/subscriptions' \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "description": "Notify Spark of all context changes", - "subject": { - "entities": [ - { - "idPattern": ".*" - } - ] - }, - "notification": { - "http": { - "url": "http://spark-worker-1:9001" - } - } -}' -``` - -レスポンスは、**`201 - Created`** です。 - -サブスクリプションが作成されている場合は、`/v2/subscriptions` エンドポイントに対して GET リクエストを行うことで、 -サブスクリプションが起動しているかどうかを確認できます。 - -#### 2️⃣ リクエスト: - -```console -curl -X GET \ -'http://localhost:1026/v2/subscriptions/' \ --H 'fiware-service: openiot' \ --H 'fiware-servicepath: /' -``` - -#### レスポンス: - -```json -[ - { - "id": "5d76059d14eda92b0686f255", - "description": "Notify Spark of all context changes", - "status": "active", - "subject": { - "entities": [ - { - "idPattern": ".*" - } - ], - "condition": { - "attrs": [] - } - }, - "notification": { - "timesSent": 362, - "lastNotification": "2019-09-09T09:36:33.00Z", - "attrs": [], - "attrsFormat": "normalized", - "http": { - "url": "http://spark-worker-1:9001" - }, - "lastSuccess": "2019-09-09T09:36:33.00Z", - "lastSuccessCode": 200 - } - } -] -``` - -レスポンスの `notification` セクション内に、サブスクリプションの状態を説明するいくつかの追加 `attributes` が表示されます。 - -サブスクリプションの基準が満たされている場合は、`timesSent` は、`0` より大きい必要があります。ゼロの場合は、 -サブスクリプションの `subject` が正しくないか、サブスクリプションが間違った `fiware-service-path` または `fiware-service` -ヘッダで作成されたことを示します。 - -`lastNotification` は、最近のタイムスタンプでなければなりません。そうでない場合は、デバイスが定期的にデータを -送信していません。**Smart Door** のロックを解除し、**Smart Lamp** をオンにすることを忘れないでください。 - -`lastSuccess` は、`lastNotification` date に一致している必要があります。そうでない場合は、**Cosmos** は、 -適切にサブスクリプションを受信していません。ホスト名とポートが正しいことを確認してください。 - -最後に、サブスクリプションの `status` が `active` であるかどうかを確認します。期限切れのサブスクリプションは起動しません。 - - - -### ロガー - 出力の確認 - -サブスクリプションを**1分間**実行したままにします。次に、Spark ジョブを実行したコンソールでの出力は次のようになります: - -```text -Sensor(Bell,3) -Sensor(Door,4) -Sensor(Lamp,7) -Sensor(Motion,6) -``` - - - -### ロガー - コードの分析 - -```scala -package org.fiware.cosmos.tutorial - -import org.apache.spark.SparkConf -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector.OrionReceiver - -object Logger{ - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Logger") - val ssc = new StreamingContext(conf, Seconds(60)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new OrionReceiver(9001)) - - // Process event stream - val processedDataStream= eventStream - .flatMap(event => event.entities) - .map(ent => { - new Sensor(ent.`type`) - }) - .countByValue() - .window(Seconds(60)) - - processedDataStream.print() - - ssc.start() - ssc.awaitTermination() - } - case class Sensor(device: String) -} -``` - -プログラムの最初の行は、コネクタを含む必要な依存関係をインポートすることを目的としています。次のステップは、 -コネクタによって提供されるクラスを使用して `OrionReceiver` のインスタンスを作成し、それを Spark -によって提供される環境に追加することです。 - -`OrionReceiver` コンストラクタは、パラメータとしてポート番号 (`9001`) を受け入れます。このポートは、Orion から来る -サブスクリプションのノーティフィケーションをリッスンするために使用され、`NgsiEvent` オブジェクトの `DataStream` -に変換されます。これらのオブジェクトの定義は、 -[Orion-Spark Connector ドキュメント](https://github.com/ging/fiware-cosmos-orion-spark-connector/blob/master/README.md#orionreceiver) -に記載されています。 - -ストリーム処理は、5つの別々のステップで構成されています。最初のステップ (`flatMap()`) は、一定期間に受信したすべての -NGSI イベントのエンティティ・オブジェクトをまとめるために実行されます。その後、コードは (`map()` 操作を使用して) -それらを反復処理し、目的の属性を抽出します。この場合、我々は、センサ `type` (`Door`, `Motion`, `Bell` または `Lamp`) -に興味があります。 - -各反復内で、必要なプロパティであるセンサ `type` を使用してカスタム・オブジェクトを作成します。この目的のために、 -次のようにケース class を定義できます: - -```scala -case class Sensor(device: String) -``` - -その後、作成されたオブジェクトをデバイスのタイプ (`countByValue()`) でカウントし、それらに対して `window()` -などの操作を実行できます。 - -処理後、結果はコンソールに出力されます: - -```scala -processedDataStream.print() -``` - -#### Logger - NGSI-LD: - -同じ例が NGSI-LD 形式 (`LoggerLD.scala`) のデータに提供されています。この例では、NGSI-LD 形式でメッセージを -受信するために、Orion Spark Connector によって提供される NGSILDReceiver を利用します。変更されるコードの唯一の部分は、 -レシーバー (receiver) の宣言です: - -```scala -... -import org.fiware.cosmos.orion.spark.connector.NGSILDReceiver -... -val eventStream = env.addSource(new NGSILDReceiver(9001)) -... -``` - -このジョブを実行するには、spark-submit コマンドを再度使用して、次の`Logger` の代わりに `LoggerLD` -クラスを指定する必要があります: - -```console -/spark/bin/spark-submit \ ---class org.fiware.cosmos.tutorial.LoggerLD \ ---master spark://spark-master:7077 \ ---deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.2.jar \ ---conf "spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console" -``` - - - -## フィードバック・ループ - コンテキスト・データの永続化 - -2番目の例では、**Motion Sensor**が動きを検出すると**Smart Lamp** をオンにします。 - -データフロー・ストリームは、ノーティフィケーションを受信するために `OrionReceiver` operator を使用し、入力を -フィルタリングして **Motion Sensor** にのみレスポンスするように入力をフィルタ処理し、次に `OrionSink` -を使用して処理されたコンテキストを Context Broker にプッシュバックします。この例のソースコードは -[org/fiware/cosmos/tutorial/Feedback.scala](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/blob/master/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala) -にあります。 - - - -### フィードバック・ループ - JAR のインストール - -```console -/spark/bin/spark-submit \ ---class org.fiware.cosmos.tutorial.Feedback \ ---master spark://spark-master:7077 \ ---deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.2.jar \ ---conf "spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console" -``` - - - -### フィードバック・ループ - コンテキスト変更のサブスクライブ - -前の例が実行されていない場合は、新しいサブスクリプションを設定する必要があります。**Motion Sensor** -が動きを検出したときにのみノーティフィケーションをトリガーするように、より狭いサブスクリプションを設定できます。 - -> **注:** 前のサブスクリプションがすでに存在する場合、2番目のより狭いモーションのみのサブスクリプションを作成する、 -> このステップは不要です。Scala タスク自体のビジネス・ロジック内にフィルタがあります。 - -```console -curl -iX POST \ - 'http://localhost:1026/v2/subscriptions' \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "description": "Notify Spark of all context changes", - "subject": { - "entities": [ - { - "idPattern": "Motion.*" - } - ] - }, - "notification": { - "http": { - "url": "http://spark-worker-1:9001" - } - } -}' -``` - - - -### フィードバック・ループ - 出力の確認 - -`http://localhost:3000/device/monitor` に移動します。 - -ストア内で、**Smart Door** のロックを解除して待ちます。**Smart Door** が開いて **Motion Sensor** がトリガーされると、 -**Smart Lamp** が直接オンになります。 - - - -### フィードバック・ループ - コードの分析 - -```scala -package org.fiware.cosmos.tutorial - -import org.apache.spark.SparkConf -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector._ - -object Feedback { - final val CONTENT_TYPE = ContentType.JSON - final val METHOD = HTTPMethod.PATCH - final val CONTENT = "{\n \"on\": {\n \"type\" : \"command\",\n \"value\" : \"\"\n }\n}" - final val HEADERS = Map("fiware-service" -> "openiot","fiware-servicepath" -> "/","Accept" -> "*/*") - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Feedback") - val ssc = new StreamingContext(conf, Seconds(10)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new OrionReceiver(9001)) - - // Process event stream - val processedDataStream = eventStream - .flatMap(event => event.entities) - .filter(entity=>(entity.attrs("count").value == "1")) - .map(entity=> new Sensor(entity.id)) - .window(Seconds(10)) - - val sinkStream= processedDataStream.map(sensor => { - val url="http://localhost:1026/v2/entities/Lamp:"+sensor.id.takeRight(3)+"/attrs" - OrionSinkObject(CONTENT,url,CONTENT_TYPE,METHOD,HEADERS) - }) - // Add Orion Sink - OrionSink.addSink( sinkStream ) - - // print the results with a single thread, rather than in parallel - processedDataStream.print() - ssc.start() - - ssc.awaitTermination() - } - - case class Sensor(id: String) -} -``` - -ご覧のとおり、前の例と似ています。主な違いは、処理されたデータを **`OrionSink`** を介して Context Broker -に書き戻すことです。 - -**`OrionSinkObject`** の引数は次のとおりです: - -- **Message**: `"{\n \"on\": {\n \"type\" : \"command\",\n \"value\" : \"\"\n }\n}"`. 'on' コマンドを送信します -- **URL**: `"http://localhost:1026/v2/entities/Lamp:"+node.id.takeRight(3)+"/attrs"`. TakeRight(3) は、Room - の番号を取得します (例: '001') -- **Content Type**: `ContentType.Plain`. -- **HTTP Method**: `HTTPMethod.POST`. -- **Headers**: `Map("fiware-service" -> "openiot","fiware-servicepath" -> "/","Accept" -> "*/*")`. - オプション・パラメータ。HTTP リクエストに必要なヘッダを追加します。 - - - -# 次のステップ - -データ処理エンジンとして Flink を使用したい場合は、 -[この Flink 用チュートリアル](https://github.com/ging/tutorials.Big-Data-Analysis) も利用できます。 - -このチュートリアルでデータに対して実行される操作は非常に単純でした。機械学習を使用してリアルタイム予測を実行する -シナリオを設定する方法を知りたい場合は、ベルリンで開催された FIWARE Global Summit (2019) で発表された -[デモ](https://github.com/ging/fiware-global-summit-berlin-2019-ml/) をご覧ください。 - -高度な機能を追加することで、アプリケーションに複雑さを加える方法を知りたいですか? このシリーズの -[他のチュートリアル](https://www.letsfiware.jp/fiware-tutorials)を読むことで見つけることができます - ---- - -## License - -[MIT](LICENSE) © 2020-2024 FIWARE Foundation e.V. diff --git a/README.md b/README.md index f14b348..a05ace1 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,8 @@ [![FIWARE Banner](https://fiware.github.io/tutorials.Big-Data-Spark/img/fiware.png)](https://www.fiware.org/developers) -[![NGSI v2](https://img.shields.io/badge/NGSI-v2-5dc0cf.svg)](https://fiware-ges.github.io/orion/api/v2/stable/) -[![FIWARE Core Context Management](https://nexus.lab.fiware.org/static/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/processing/README.md) -[![License: MIT](https://img.shields.io/github/license/fiware/tutorials.Big-Data-Spark.svg)](https://opensource.org/licenses/MIT) -[![Support badge](https://nexus.lab.fiware.org/repository/raw/public/badges/stackoverflow/fiware.svg)](https://stackoverflow.com/questions/tagged/fiware) -
[![Documentation](https://img.shields.io/readthedocs/fiware-tutorials.svg)](https://fiware-tutorials.rtfd.io) +[![FIWARE Core Context Management](https://nexus.lab.fiware.org/static/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/core/README.md) +[![License: MIT](https://img.shields.io/github/license/fiware/tutorials.Big-Data-Flink.svg)](https://opensource.org/licenses/MIT) +[![Support badge](https://img.shields.io/badge/tag-fiware-orange.svg?logo=stackoverflow)](https://stackoverflow.com/questions/tagged/fiware) This tutorial is an introduction to the [FIWARE Cosmos Orion Spark Connector](http://fiware-cosmos-spark.rtfd.io), which enables easier Big Data analysis over context, integrated with one of the most popular BigData platforms: @@ -12,664 +10,8 @@ enables easier Big Data analysis over context, integrated with one of the most p computations over unbounded and bounded data streams. Spark has been designed to run in all common cluster environments, perform computations at in-memory speed and at any scale. -The tutorial uses [cUrl](https://ec.haxx.se/) commands throughout, but is also available as Postman documentation: - -[![Run in Postman](https://run.pstmn.io/button.svg)](https://www.getpostman.com/collections/9e508e30f737e7db4fa9) -[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/FIWARE/tutorials.Big-Data-Spark/tree/NGSI-v2) - -- このチュートリアルは[日本語](README.ja.md)でもご覧いただけます。 - -## Contents - -
-Details - -- [Real-time Processing and Big Data Analysis](#real-time-processing-and-big-data-analysis) -- [Architecture](#architecture) - - [Spark Cluster Configuration](#spark-cluster-configuration) -- [Prerequisites](#prerequisites) - - [Docker and Docker Compose](#docker-and-docker-compose) - - [Maven](#maven) - - [Cygwin for Windows](#cygwin-for-windows) -- [Start Up](#start-up) -- [Real-time Processing Operations](#real-time-processing-operations) - - [Compiling a JAR file for Spark](#compiling-a-jar-file-for-spark) - - [Generating a stream of Context Data](#generating-a-stream-of-context-data) - - [Logger - Reading Context Data Streams](#logger---reading-context-data-streams) - - [Logger - Installing the JAR](#logger---installing-the-jar) - - [Logger - Subscribing to context changes](#logger---subscribing-to-context-changes) - - [Logger - Checking the Output](#logger---checking-the-output) - - [Logger - Analyzing the Code](#logger---analyzing-the-code) - - [Feedback Loop - Persisting Context Data](#feedback-loop---persisting-context-data) - - [Feedback Loop - Installing the JAR](#feedback-loop---installing-the-jar) - - [Feedback Loop - Subscribing to context changes](#feedback-loop---subscribing-to-context-changes) - - [Feedback Loop - Checking the Output](#feedback-loop---checking-the-output) - - [Feedback Loop - Analyzing the Code](#feedback-loop---analyzing-the-code) - -
- -# Real-time Processing and Big Data Analysis - -> "You have to find what sparks a light in you so that you in your own way can illuminate the world." -> -> — Oprah Winfrey - -Smart solutions based on FIWARE are architecturally designed around microservices. They are therefore are designed to -scale-up from simple applications (such as the Supermarket tutorial) through to city-wide installations base on a large -array of IoT sensors and other context data providers. - -The massive amount of data involved eventually becomes too much for a single machine to analyse, process and store, and -therefore the work must be delegated to additional distributed services. These distributed systems form the basis of -so-called **Big Data Analysis**. The distribution of tasks allows developers to be able to extract insights from huge -data sets which would be too complex to be dealt with using traditional methods. and uncover hidden patterns and -correlations. - -As we have seen, context data is core to any Smart Solution, and the Context Broker is able to monitor changes of state -and raise [subscription events](https://github.com/Fiware/tutorials.Subscriptions) as the context changes. For smaller -installations, each subscription event can be processed one-by-one by a single receiving endpoint, however as the system -grows, another technique will be required to avoid overwhelming the listener, potentially blocking resources and missing -updates. - -**Apache Spark** is an open-source distributed general-purpose cluster-computing framework. It provides an interface for -programming entire clusters with implicit data parallelism and fault tolerance. The **Cosmos Spark** connector allows -developers write custom business logic to listen for context data subscription events and then process the flow of the -context data. Spark is able to delegate these actions to other workers where they will be acted upon either in -sequentially or in parallel as required. The data flow processing itself can be arbitrarily complex. - -Obviously, in reality, our existing Supermarket scenario is far too small to require the use of a Big Data solution, but -will serve as a basis for demonstrating the type of real-time processing which may be required in a larger solution -which is processing a continuous stream of context-data events. - -# Architecture - -This application builds on the components and dummy IoT devices created in -[previous tutorials](https://github.com/FIWARE/tutorials.IoT-Agent/). It will make use of three FIWARE components - the -[Orion Context Broker](https://fiware-orion.readthedocs.io/en/latest/), the -[IoT Agent for Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/), and the -[Cosmos Orion Spark Connector](https://fiware-cosmos-spark.readthedocs.io/en/latest/) for connecting Orion to an -[Apache Spark cluster](https://spark.apache.org/docs/latest/cluster-overview.html). The Spark cluster itself will -consist of a single **Cluster Manager** _master_ to coordinate execution and some **Worker Nodes** _worker_ to execute -the tasks. - -Both the Orion Context Broker and the IoT Agent rely on open source [MongoDB](https://www.mongodb.com/) technology to -keep persistence of the information they hold. We will also be using the dummy IoT devices created in the -[previous tutorial](https://github.com/FIWARE/tutorials.IoT-Agent/). - -Therefore the overall architecture will consist of the following elements: - -- Two **FIWARE Generic Enablers** as independent microservices: - - The FIWARE [Orion Context Broker](https://fiware-orion.readthedocs.io/en/latest/) which will receive requests - using [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2) - - The FIWARE [IoT Agent for Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/) which will - receive northbound measurements from the dummy IoT devices in - [Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/usermanual/index.html#user-programmers-manual) - format and convert them to [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2) requests for the - context broker to alter the state of the context entities -- An [Apache Spark cluster](https://spark.apache.org/docs/latest/cluster-overview.html) consisting of a single - **ClusterManager** and **Worker Nodes** - - The FIWARE [Cosmos Orion Spark Connector](https://fiware-cosmos-spark.readthedocs.io/en/latest/) will be - deployed as part of the dataflow which will subscribe to context changes and make operations on them in - real-time -- One [MongoDB](https://www.mongodb.com/) **database** : - - Used by the **Orion Context Broker** to hold context data information such as data entities, subscriptions and - registrations - - Used by the **IoT Agent** to hold device information such as device URLs and Keys -- Three **Context Providers**: - - A webserver acting as set of [dummy IoT devices](https://github.com/FIWARE/tutorials.IoT-Sensors) using the - [Ultralight 2.0](https://fiware-iotagent-ul.readthedocs.io/en/latest/usermanual/index.html#user-programmers-manual) - protocol running over HTTP. - - The **Stock Management Frontend** is not used in this tutorial. It does the following: - - Display store information and allow users to interact with the dummy IoT devices - - Show which products can be bought at each store - - Allow users to "buy" products and reduce the stock count. - - The **Context Provider NGSI** proxy is not used in this tutorial. It does the following: - - receive requests using [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2) - - makes requests to publicly available data sources using their own APIs in a proprietary format - - returns context data back to the Orion Context Broker in - [NGSI](https://fiware.github.io/specifications/OpenAPI/ngsiv2) format. - -The overall architecture can be seen below: - -![](https://fiware.github.io/tutorials.Big-Data-Spark/img/Tutorial%20FIWARE%20Spark.png) - -## Spark Cluster Configuration - -```yaml -spark-master: - image: bde2020/spark-master:2.4.5-hadoop2.7 - container_name: spark-master - expose: - - '8080' - - '9001' - ports: - - '8080:8080' - - '7077:7077' - - '9001:9001' - environment: - - INIT_DAEMON_STEP=setup_spark - - 'constraint:node==spark-master' -``` - -```yaml -spark-worker-1: - image: bde2020/spark-worker:2.4.5-hadoop2.7 - container_name: spark-worker-1 - depends_on: - - spark-master - ports: - - '8081:8081' - environment: - - 'SPARK_MASTER=spark://spark-master:7077' - - 'constraint:node==spark-master' -``` - -The `spark-master` container is listening on three ports: - -- Port `8080` is exposed so we can see the web frontend of the Apache Spark-Master Dashboard. -- Port `7070` is used for internal communications. -- Port `9001` is exposed so that the installation can receive context data subscriptions. - -The `spark-worker-1` container is listening on one port: - -- Port `8081` is exposed so we can see the web frontend of the Apache Spark-Worker-1 Dashboard. - -# Prerequisites - -## Docker and Docker Compose - -To keep things simple, all components will be run using [Docker](https://www.docker.com). **Docker** is a container -technology which allows to different components isolated into their respective environments. - -- To install Docker on Windows follow the instructions [here](https://docs.docker.com/docker-for-windows/) -- To install Docker on Mac follow the instructions [here](https://docs.docker.com/docker-for-mac/) -- To install Docker on Linux follow the instructions [here](https://docs.docker.com/install/) - -**Docker Compose** is a tool for defining and running multi-container Docker applications. A series of -[YAML files](https://github.com/FIWARE/tutorials.Big-Data-Spark/tree/master/docker-compose) are used to configure the -required services for the application. This means all container services can be brought up in a single command. Docker -Compose is installed by default as part of Docker for Windows and Docker for Mac, however Linux users will need to -follow the instructions found [here](https://docs.docker.com/compose/install/) - -You can check your current **Docker** and **Docker Compose** versions using the following commands: - -```console -docker-compose -v -docker version -``` - -Please ensure that you are using Docker version 20.10 or higher and Docker Compose 1.29 or higher and upgrade if -necessary. - -## Maven - -[Apache Maven](https://maven.apache.org/download.cgi) is a software project management and comprehension tool. Based on -the concept of a project object model (POM), Maven can manage a project's build, reporting and documentation from a -central piece of information. We will use Maven to define and download our dependencies and to build and package our -code into a JAR file. - -## Cygwin for Windows - -We will start up our services using a simple Bash script. Windows users should download [cygwin](http://www.cygwin.com/) -to provide a command-line functionality similar to a Linux distribution on Windows. - -# Start Up - -Before you start, you should ensure that you have obtained or built the necessary Docker images locally. Please clone -the repository and create the necessary images by running the commands shown below. Note that you might need to run some -of the commands as a privileged user: - -```console -git clone https://github.com/FIWARE/tutorials.Big-Data-Spark.git -cd tutorials.Big-Data-Spark -checkout NGSI-v2 -./services create -``` - -This command will also import seed data from the previous tutorials and provision the dummy IoT sensors on startup. - -To start the system, run the following command: - -```console -./services start -``` - -> [!NOTE] -> If you want to clean up and start over again you can do so with the following command: -> -> ```console -> ./services stop -> ``` - -# Real-time Processing Operations - -According to the [Apache Spark documentation](https://spark.apache.org/documentation.html), Spark Streaming is an -extension of the core Spark API that enables scalable, high-throughput, fault-tolerant stream processing of live data -streams. Data can be ingested from many sources like Kafka, Flume, Kinesis, or TCP sockets, and can be processed using -complex algorithms expressed with high-level functions like map, reduce, join and window. Finally, processed data can be -pushed out to filesystems, databases, and live dashboards. In fact, you can apply Spark’s machine learning and graph -processing algorithms on data streams. - -![](https://spark.apache.org/docs/latest/img/streaming-arch.png) - -Internally, it works as follows. Spark Streaming receives live input data streams and divides the data into batches, -which are then processed by the Spark engine to generate the final stream of results in batches. - -![](https://spark.apache.org/docs/latest/img/streaming-flow.png) - -This means that to create a streaming data flow we must supply the following: - -- A mechanism for reading Context data as a **Source Operator** -- Business logic to define the transform operations -- A mechanism for pushing Context data back to the context broker as a **Sink Operator** - -The **Cosmos Spark** connector - `orion.spark.connector-1.2.2.jar` offers both **Source** and **Sink** operators. It -therefore only remains to write the necessary Scala code to connect the streaming dataflow pipeline operations together. -The processing code can be complied into a JAR file which can be uploaded to the spark cluster. Two examples will be -detailed below, all the source code for this tutorial can be found within the -[cosmos-examples](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/tree/master/cosmos-examples) -directory. - -Further Spark processing examples can be found on -[Spark Connector Examples](https://fiware-cosmos-spark-examples.readthedocs.io/). - -### Compiling a JAR file for Spark - -An existing `pom.xml` file has been created which holds the necessary prerequisites to build the examples JAR file - -In order to use the Orion Spark Connector we first need to manually install the connector JAR as an artifact using -Maven: - -```console -cd cosmos-examples -curl -LO https://github.com/ging/fiware-cosmos-orion-spark-connector/releases/download/FIWARE_7.9.1/orion.spark.connector-1.2.2.jar -mvn install:install-file \ - -Dfile=./orion.spark.connector-1.2.2.jar \ - -DgroupId=org.fiware.cosmos \ - -DartifactId=orion.spark.connector \ - -Dversion=1.2.2 \ - -Dpackaging=jar -``` - -Thereafter the source code can be compiled by running the `mvn package` command within the same directory -(`cosmos-examples`): - -```console -mvn package -``` - -A new JAR file called `cosmos-examples-1.2.2.jar` will be created within the `cosmos-examples/target` directory. - -### Generating a stream of Context Data - -For the purpose of this tutorial, we must be monitoring a system in which the context is periodically being updated. The -dummy IoT Sensors can be used to do this. Open the device monitor page at `http://localhost:3000/device/monitor` and -unlock a **Smart Door** and switch on a **Smart Lamp**. This can be done by selecting an appropriate the command from -the drop down list and pressing the `send` button. The stream of measurements coming from the devices can then be seen -on the same page: - -![](https://fiware.github.io/tutorials.Big-Data-Spark/img/door-open.gif) - -## Logger - Reading Context Data Streams - -The first example makes use of the `OrionReceiver` operator in order to receive notifications from the Orion Context -Broker. Specifically, the example counts the number notifications that each type of device sends in one minute. You can -find the source code of the example in -[org/fiware/cosmos/tutorial/Logger.scala](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/blob/master/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala) - -### Logger - Installing the JAR - -Restart the containers if necessary, then access the worker container: - -```console -docker exec -it spark-worker-1 bin/bash -``` - -And run the following command to run the generated JAR package in the Spark cluster: - -```console -/spark/bin/spark-submit \ ---class org.fiware.cosmos.tutorial.Logger \ ---master spark://spark-master:7077 \ ---deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.2.jar \ ---conf "spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console" -``` - -### Logger - Subscribing to context changes - -Once a dynamic context system is up and running (we have deployed the `Logger` job in the Spark cluster), we need to -inform **Spark** of changes in context. - -This is done by making a POST request to the `/v2/subscription` endpoint of the Orion Context Broker. - -- The `fiware-service` and `fiware-servicepath` headers are used to filter the subscription to only listen to - measurements from the attached IoT Sensors, since they had been provisioned using these settings - -- The notification `url` must match the one our Spark program is listening to. - -- The `throttling` value defines the rate that changes are sampled. - -Open another terminal and run the following command: - -#### 1️⃣ Request: - -```console -curl -iX POST \ - 'http://localhost:1026/v2/subscriptions' \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "description": "Notify Spark of all context changes", - "subject": { - "entities": [ - { - "idPattern": ".*" - } - ] - }, - "notification": { - "http": { - "url": "http://spark-worker-1:9001" - } - } -}' -``` - -The response will be **`201 - Created`** - -If a subscription has been created, we can check to see if it is firing by making a GET request to the -`/v2/subscriptions` endpoint. - -#### 2️⃣ Request: - -```console -curl -X GET \ -'http://localhost:1026/v2/subscriptions/' \ --H 'fiware-service: openiot' \ --H 'fiware-servicepath: /' -``` - -#### Response: - -```json -[ - { - "id": "5d76059d14eda92b0686f255", - "description": "Notify Spark of all context changes", - "status": "active", - "subject": { - "entities": [ - { - "idPattern": ".*" - } - ], - "condition": { - "attrs": [] - } - }, - "notification": { - "timesSent": 362, - "lastNotification": "2019-09-09T09:36:33.00Z", - "attrs": [], - "attrsFormat": "normalized", - "http": { - "url": "http://spark-worker-1:9001" - }, - "lastSuccess": "2019-09-09T09:36:33.00Z", - "lastSuccessCode": 200 - } - } -] -``` - -Within the `notification` section of the response, you can see several additional `attributes` which describe the health -of the subscription - -If the criteria of the subscription have been met, `timesSent` should be greater than `0`. A zero value would indicate -that the `subject` of the subscription is incorrect or the subscription has created with the wrong `fiware-service-path` -or `fiware-service` header - -The `lastNotification` should be a recent timestamp - if this is not the case, then the devices are not regularly -sending data. Remember to unlock the **Smart Door** and switch on the **Smart Lamp** - -The `lastSuccess` should match the `lastNotification` date - if this is not the case then **Cosmos** is not receiving -the subscription properly. Check that the hostname and port are correct. - -Finally, check that the `status` of the subscription is `active` - an expired subscription will not fire. - -### Logger - Checking the Output - -Leave the subscription running for **one minute**. Then, the output on the console on which you ran the Spark job will -be like the following: - -```text -Sensor(Bell,3) -Sensor(Door,4) -Sensor(Lamp,7) -Sensor(Motion,6) -``` - -### Logger - Analyzing the Code - -```scala -package org.fiware.cosmos.tutorial - -import org.apache.spark.SparkConf -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector.OrionReceiver - -object Logger{ - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Logger") - val ssc = new StreamingContext(conf, Seconds(60)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new OrionReceiver(9001)) - - // Process event stream - val processedDataStream= eventStream - .flatMap(event => event.entities) - .map(ent => { - new Sensor(ent.`type`) - }) - .countByValue() - .window(Seconds(60)) - - processedDataStream.print() - - ssc.start() - ssc.awaitTermination() - } - case class Sensor(device: String) -} -``` - -The first lines of the program are aimed at importing the necessary dependencies, including the connector. The next step -is to create an instance of the `OrionReceiver` using the class provided by the connector and to add it to the -environment provided by Spark. - -The `OrionReceiver` constructor accepts a port number (`9001`) as a parameter. This port is used to listen to the -subscription notifications coming from Orion and converted to a `DataStream` of `NgsiEvent` objects. The definition of -these objects can be found within the -[Orion-Spark Connector documentation](https://github.com/ging/fiware-cosmos-orion-spark-connector/blob/master/README.md#orionreceiver). - -The stream processing consists of five separate steps. The first step (`flatMap()`) is performed in order to put -together the entity objects of all the NGSI Events received in a period of time. Thereafter the code iterates over them -(with the `map()` operation) and extracts the desired attributes. In this case, we are interested in the sensor `type` -(`Door`, `Motion`, `Bell` or `Lamp`). - -Within each iteration, we create a custom object with the property we need: the sensor `type`. For this purpose, we can -define a case class as shown: - -```scala -case class Sensor(device: String) -``` - -Thereafter can count the created objects by the type of device (`countByValue()`) and perform operations such as -`window()` on them. - -After the processing, the results are output to the console: - -```scala -processedDataStream.print() -``` - -#### Logger - NGSI-LD: - -The same example is provided for data in the NGSI-LD format (`LoggerLD.scala`). This example makes use of the -NGSILDReceiver provided by the Orion Spark Connector in order to receive messages in the NGSI-LD format. The only part -of the code that changes is the declaration of the receiver: - -```scala -... -import org.fiware.cosmos.orion.spark.connector.NGSILDReceiver -... -val eventStream = env.addSource(new NGSILDReceiver(9001)) -... -``` - -In order to run this job, you need to user the spark-submit command again, specifying the `LoggerLD` class instead of -`Logger`: - -```console -/spark/bin/spark-submit \ ---class org.fiware.cosmos.tutorial.LoggerLD \ ---master spark://spark-master:7077 \ ---deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.2.jar \ ---conf "spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console" -``` - -## Feedback Loop - Persisting Context Data - -The second example switches on a lamp when its motion sensor detects movement. - -The dataflow stream uses the `OrionReceiver` operator in order to receive notifications and filters the input to only -respond to motion senseors and then uses the `OrionSink` to push processed context back to the Context Broker. You can -find the source code of the example in -[org/fiware/cosmos/tutorial/Feedback.scala](https://github.com/ging/fiware-cosmos-orion-spark-connector-tutorial/blob/master/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala) - -### Feedback Loop - Installing the JAR - -```console -/spark/bin/spark-submit \ ---class org.fiware.cosmos.tutorial.Feedback \ ---master spark://spark-master:7077 \ ---deploy-mode client /home/cosmos-examples/target/cosmos-examples-1.2.2.jar \ ---conf "spark.driver.extraJavaOptions=-Dlog4jspark.root.logger=WARN,console" -``` - -### Feedback Loop - Subscribing to context changes - -If the previous example has not been run, a new subscription will need to be set up. A narrower subscription can be set -up to only trigger a notification when a motion sensor detects movement. - -> [!NOTE] -> If the previous subscription already exists, this step creating a second narrower Motion-only subscription -> is unnecessary. There is a filter within the business logic of the scala task itself. - -```console -curl -iX POST \ - 'http://localhost:1026/v2/subscriptions' \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "description": "Notify Spark of all context changes", - "subject": { - "entities": [ - { - "idPattern": "Motion.*" - } - ] - }, - "notification": { - "http": { - "url": "http://spark-worker-1:9001" - } - } -}' -``` - -### Feedback Loop - Checking the Output - -Go to `http://localhost:3000/device/monitor` - -Within any Store, unlock the door and wait. Once the door opens and the Motion sensor is triggered, the lamp will switch -on directly - -### Feedback Loop - Analyzing the Code - -```scala -package org.fiware.cosmos.tutorial - -import org.apache.spark.SparkConf -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector._ - -object Feedback { - final val CONTENT_TYPE = ContentType.JSON - final val METHOD = HTTPMethod.PATCH - final val CONTENT = "{\n \"on\": {\n \"type\" : \"command\",\n \"value\" : \"\"\n }\n}" - final val HEADERS = Map("fiware-service" -> "openiot","fiware-servicepath" -> "/","Accept" -> "*/*") - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Feedback") - val ssc = new StreamingContext(conf, Seconds(10)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new OrionReceiver(9001)) - - // Process event stream - val processedDataStream = eventStream - .flatMap(event => event.entities) - .filter(entity=>(entity.attrs("count").value == "1")) - .map(entity=> new Sensor(entity.id)) - .window(Seconds(10)) - - val sinkStream= processedDataStream.map(sensor => { - val url="http://localhost:1026/v2/entities/Lamp:"+sensor.id.takeRight(3)+"/attrs" - OrionSinkObject(CONTENT,url,CONTENT_TYPE,METHOD,HEADERS) - }) - // Add Orion Sink - OrionSink.addSink( sinkStream ) - - // print the results with a single thread, rather than in parallel - processedDataStream.print() - ssc.start() - - ssc.awaitTermination() - } - - case class Sensor(id: String) -} -``` - -As you can see, it is similar to the previous example. The main difference is that it writes the processed data back in -the Context Broker through the **`OrionSink`**. - -The arguments of the **`OrionSinkObject`** are: - -- **Message**: `"{\n \"on\": {\n \"type\" : \"command\",\n \"value\" : \"\"\n }\n}"`. We send 'on' command -- **URL**: `"http://localhost:1026/v2/entities/Lamp:"+node.id.takeRight(3)+"/attrs"`. TakeRight(3) gets the number of - the room, for example '001') -- **Content Type**: `ContentType.Plain`. -- **HTTP Method**: `HTTPMethod.POST`. -- **Headers**: `Map("fiware-service" -> "openiot","fiware-servicepath" -> "/","Accept" -> "*/*")`. Optional parameter. - We add the headers we need in the HTTP Request. - -# Next Steps - -If you would rather use Flink as your data processing engine, we have -[this tutorial available for Flink](https://github.com/ging/tutorials.Big-Data-Analysis) as well - -The operations performed on data in this tutorial were very simple. If you would like to know how to set up a scenario -for performing real-time predictions using Machine Learning check out the -[demo](https://github.com/ging/fiware-global-summit-berlin-2019-ml/) presented at the FIWARE Global Summit in Berlin -(2019). - -If you want to learn how to add more complexity to your application by adding advanced features, you can find out by -reading the other [tutorials in this series](https://fiware-tutorials.rtfd.io) +The tutorial uses [cUrl](https://ec.haxx.se/) commands throughout, but is also available as +[Postman documentation](https://www.postman.com/downloads/) --- diff --git a/cosmos-examples/.java-version b/cosmos-examples/.java-version deleted file mode 100644 index 6259340..0000000 --- a/cosmos-examples/.java-version +++ /dev/null @@ -1 +0,0 @@ -1.8 diff --git a/cosmos-examples/dependency-reduced-pom.xml b/cosmos-examples/dependency-reduced-pom.xml deleted file mode 100644 index 558022c..0000000 --- a/cosmos-examples/dependency-reduced-pom.xml +++ /dev/null @@ -1,369 +0,0 @@ - - - 4.0.0 - org.fiware.cosmos - cosmos-examples - Fiware Orion Spark Connector Examples - 1.2.1 - https://www.fiware.org/ - - - - maven-shade-plugin - 3.0.0 - - - package - - shade - - - - - org.apache.spark:force-shading - com.google.code.findbugs:jsr305 - org.slf4j:* - log4j:* - org.apache.flink - org.apache.flink - org.scala-lang - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - org.fiware.cosmos.tutorial.Logger - - - - - - - - maven-compiler-plugin - 3.1 - - 1.8 - 1.8 - - - - net.alchim31.maven - scala-maven-plugin - 3.2.2 - - - - compile - testCompile - - - - - - maven-eclipse-plugin - 2.8 - - true - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - org.scala-ide.sdt.core.scalabuilder - - - org.scala-ide.sdt.launching.SCALA_CONTAINER - org.eclipse.jdt.launching.JRE_CONTAINER - - - org.scala-lang:scala-library - org.scala-lang:scala-compiler - - - **/*.scala - **/*.java - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.7 - - - add-source - generate-sources - - add-source - - - - src/main/scala - - - - - add-test-source - generate-test-sources - - add-test-source - - - - src/test/scala - - - - - - - - - - add-dependencies-for-IDEA - - - org.apache.spark - spark-core_${scala.binary.version} - ${spark.version} - compile - - - org.apache.spark - spark-streaming_${scala.binary.version} - ${spark.version} - compile - - - org.scala-lang - scala-library - ${scala.version} - compile - - - - - - - - false - - - apache.snapshots - Apache Development Snapshot Repository - https://repository.apache.org/content/repositories/snapshots/ - - - - - org.apache.spark - spark-core_2.11 - 2.4.4 - provided - - - paranamer - com.thoughtworks.paranamer - - - avro - org.apache.avro - - - avro-mapred - org.apache.avro - - - hadoop-client - org.apache.hadoop - - - spark-launcher_2.11 - org.apache.spark - - - spark-kvstore_2.11 - org.apache.spark - - - spark-network-common_2.11 - org.apache.spark - - - spark-network-shuffle_2.11 - org.apache.spark - - - activation - javax.activation - - - curator-recipes - org.apache.curator - - - zookeeper - org.apache.zookeeper - - - javax.servlet-api - javax.servlet - - - commons-lang3 - org.apache.commons - - - commons-math3 - org.apache.commons - - - jul-to-slf4j - org.slf4j - - - jcl-over-slf4j - org.slf4j - - - compress-lzf - com.ning - - - lz4-java - org.lz4 - - - zstd-jni - com.github.luben - - - RoaringBitmap - org.roaringbitmap - - - commons-net - commons-net - - - json4s-jackson_2.11 - org.json4s - - - jersey-client - org.glassfish.jersey.core - - - jersey-common - org.glassfish.jersey.core - - - jersey-server - org.glassfish.jersey.core - - - jersey-container-servlet - org.glassfish.jersey.containers - - - jersey-container-servlet-core - org.glassfish.jersey.containers - - - netty-all - io.netty - - - netty - io.netty - - - stream - com.clearspring.analytics - - - metrics-core - io.dropwizard.metrics - - - metrics-jvm - io.dropwizard.metrics - - - metrics-json - io.dropwizard.metrics - - - metrics-graphite - io.dropwizard.metrics - - - jackson-module-scala_2.11 - com.fasterxml.jackson.module - - - ivy - org.apache.ivy - - - oro - oro - - - pyrolite - net.razorvine - - - py4j - net.sf.py4j - - - commons-crypto - org.apache.commons - - - - - org.apache.spark - spark-streaming_2.11 - 2.4.4 - provided - - - org.scala-lang - scala-library - 2.11.2 - provided - - - org.slf4j - slf4j-log4j12 - 1.7.7 - runtime - - - log4j - log4j - 1.2.17 - runtime - - - - 2.11.2 - 2.11 - 2.4.6 - UTF-8 - 2.4.4 - - - diff --git a/cosmos-examples/pom.xml b/cosmos-examples/pom.xml deleted file mode 100644 index 8b62490..0000000 --- a/cosmos-examples/pom.xml +++ /dev/null @@ -1,293 +0,0 @@ - - - 4.0.0 - - org.fiware.cosmos - cosmos-examples - 1.2.2 - jar - - Fiware Orion Spark Connector Examples - https://www.fiware.org/ - - - - apache.snapshots - Apache Development Snapshot Repository - https://repository.apache.org/content/repositories/snapshots/ - - false - - - true - - - - - - UTF-8 - 2.4.4 - 2.11 - 2.11.2 - 2.4.6 - - - - - - - org.apache.spark - spark-core_${scala.binary.version} - ${spark.version} - provided - - - - org.apache.spark - spark-streaming_${scala.binary.version} - ${spark.version} - provided - - - - - org.apache.spark - spark-sql_${scala.binary.version} - ${spark.version} - - - - - - org.scala-lang - scala-library - ${scala.version} - provided - - - - org.fiware.cosmos - orion.spark.connector - 1.2.2 - - - - - - org.slf4j - slf4j-log4j12 - 1.7.7 - runtime - - - log4j - log4j - 1.2.17 - runtime - - - org.apache.httpcomponents - httpclient - 4.5.9 - - - - - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.0.0 - - - - package - - shade - - - - - org.apache.spark:force-shading - com.google.code.findbugs:jsr305 - org.slf4j:* - log4j:* - org.apache.flink - org.apache.flink - org.scala-lang - - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - org.fiware.cosmos.tutorial.Logger - - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.1 - - 1.8 - 1.8 - - - - - - - net.alchim31.maven - scala-maven-plugin - 3.2.2 - - - - compile - testCompile - - - - - - - - org.apache.maven.plugins - maven-eclipse-plugin - 2.8 - - true - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - org.scala-ide.sdt.core.scalabuilder - - - org.scala-ide.sdt.launching.SCALA_CONTAINER - org.eclipse.jdt.launching.JRE_CONTAINER - - - org.scala-lang:scala-library - org.scala-lang:scala-compiler - - - **/*.scala - **/*.java - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.7 - - - - add-source - generate-sources - - add-source - - - - src/main/scala - - - - - - add-test-source - generate-test-sources - - add-test-source - - - - src/test/scala - - - - - - - - - - - - - - add-dependencies-for-IDEA - - - - idea.version - - - - - - org.apache.spark - spark-core_${scala.binary.version} - ${spark.version} - compile - - - - - org.apache.spark - spark-streaming_${scala.binary.version} - ${spark.version} - compile - - - - org.scala-lang - scala-library - ${scala.version} - compile - - - - - - diff --git a/cosmos-examples/src/main/resources/log4j.properties b/cosmos-examples/src/main/resources/log4j.properties deleted file mode 100644 index e7e2922..0000000 --- a/cosmos-examples/src/main/resources/log4j.properties +++ /dev/null @@ -1,25 +0,0 @@ -################################################################################ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -################################################################################ - -log4j.rootLogger=INFO, console -#log4j.rootLogger= console - - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n diff --git a/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala b/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala deleted file mode 100644 index 71ce96d..0000000 --- a/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Feedback.scala +++ /dev/null @@ -1,48 +0,0 @@ -package org.fiware.cosmos.tutorial - - -import org.apache.spark._ -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector._ - -/** - * Feedback Example Orion Connector - * @author @Javierlj - */ -object Feedback { - final val CONTENT_TYPE = ContentType.JSON - final val METHOD = HTTPMethod.PATCH - final val CONTENT = "{\n \"on\": {\n \"type\" : \"command\",\n \"value\" : \"\"\n }\n}" - final val HEADERS = Map("fiware-service" -> "openiot","fiware-servicepath" -> "/","Accept" -> "*/*") - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Temperature") - val ssc = new StreamingContext(conf, Seconds(60)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new OrionReceiver(9001)) - - // Process event stream - val processedDataStream = eventStream - .flatMap(event => event.entities) - .filter(entity=>(entity.attrs("count").value == "1")) - .map(entity=> new Sensor(entity.id)) - .window(Seconds(60)) - - val sinkStream= processedDataStream.map(sensor => { - OrionSinkObject(CONTENT,"http://orion:1026/v2/entities/Lamp:"+sensor.id.takeRight(3)+"/attrs",CONTENT_TYPE,METHOD,HEADERS) - }); - - - // Add Orion Sink - OrionSink.addSink( sinkStream ) - - // print the results with a single thread, rather than in parallel - processedDataStream.print() - ssc.start() - - ssc.awaitTermination() - } - - case class Sensor(id: String) -} diff --git a/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala b/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala deleted file mode 100644 index 18e6083..0000000 --- a/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/Logger.scala +++ /dev/null @@ -1,33 +0,0 @@ -package org.fiware.cosmos.tutorial -import org.apache.spark._ -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector._ -/** - * Logger example Orion Connector - * @author @Javierlj - */ -object Logger{ - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Example 1") - val ssc = new StreamingContext(conf, Seconds(60)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new OrionReceiver(9001)) - - // Process event stream - eventStream - .flatMap(event => event.entities) - .map(ent => { - new Sensor(ent.`type`) - }) - .countByValue() - .window(Seconds(60)) - .print() - - - ssc.start() - ssc.awaitTermination() - } - case class Sensor(device: String) -} diff --git a/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/LoggerLD.scala b/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/LoggerLD.scala deleted file mode 100644 index 2bb1594..0000000 --- a/cosmos-examples/src/main/scala/org/fiware/cosmos/tutorial/LoggerLD.scala +++ /dev/null @@ -1,33 +0,0 @@ -package org.fiware.cosmos.tutorial -import org.apache.spark._ -import org.apache.spark.streaming.{Seconds, StreamingContext} -import org.fiware.cosmos.orion.spark.connector._ -/** - * Logger example NGSILD Connector - * @author @Javierlj - */ -object LoggerLD{ - - def main(args: Array[String]): Unit = { - - val conf = new SparkConf().setAppName("Example 1") - val ssc = new StreamingContext(conf, Seconds(60)) - // Create Orion Receiver. Receive notifications on port 9001 - val eventStream = ssc.receiverStream(new NGSILDReceiver(9001)) - - // Process event stream - eventStream - .flatMap(event => event.entities) - .map(ent => { - new Sensor(ent.`type`) - }) - .countByValue() - .window(Seconds(60)) - .print() - - - ssc.start() - ssc.awaitTermination() - } - case class Sensor(device: String) -} diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 5303ac6..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,152 +0,0 @@ - -version: "3.8" -services: - # Orion is an NGSI-v2 context broker - orion-v2: - labels: - org.fiware: 'tutorial' - image: quay.io/fiware/orion:${ORION_VERSION} - hostname: orion - container_name: fiware-orion - depends_on: - - mongo-db - networks: - - default - expose: - - "${ORION_PORT}" - ports: - - "${ORION_PORT}:${ORION_PORT}" - command: -dbhost mongo-db -logLevel DEBUG - healthcheck: - test: curl --fail -s http://orion:${ORION_PORT}/version || exit 1 - interval: 5s - - # IoT-Agent is configured for the UltraLight Protocol - iot-agent: - labels: - org.fiware: 'tutorial' - image: quay.io/fiware/iotagent-ul:${ULTRALIGHT_VERSION} - hostname: iot-agent - container_name: fiware-iot-agent - depends_on: - - mongo-db - networks: - - default - ports: - - "${IOTA_NORTH_PORT}:${IOTA_NORTH_PORT}" - - "${IOTA_SOUTH_PORT}:${IOTA_SOUTH_PORT}" - environment: - - IOTA_CB_HOST=orion # name of the context broker to update context - - IOTA_CB_PORT=${ORION_PORT} # port the context broker listens on to update context - - IOTA_NORTH_PORT=${IOTA_NORTH_PORT} - - IOTA_REGISTRY_TYPE=mongodb #Whether to hold IoT device info in memory or in a database - - IOTA_LOG_LEVEL=DEBUG # The log level of the IoT Agent - - IOTA_TIMESTAMP=true # Supply timestamp information with each measurement - - IOTA_CB_NGSI_VERSION=v2 # use NGSIv2 when sending updates for active attributes - - IOTA_AUTOCAST=true # Ensure Ultralight number values are read as numbers not strings - - IOTA_MONGO_HOST=mongo-db # The host name of MongoDB - - IOTA_MONGO_PORT=${MONGO_DB_PORT} # The port mongoDB is listening on - - IOTA_MONGO_DB=iotagentul # The name of the database used in mongoDB - - IOTA_HTTP_PORT=${IOTA_SOUTH_PORT} # The port used for device traffic over HTTP - - IOTA_PROVIDER_URL=http://iot-agent:${IOTA_NORTH_PORT} - healthcheck: - interval: 5s - - - # Tutorial acts as a series of dummy IoT Sensors over HTTP - tutorial: - labels: - org.fiware: 'tutorial' - image: quay.io/fiware/tutorials.context-provider - hostname: iot-sensors - container_name: fiware-tutorial - networks: - default: - aliases: - - tutorial - - context-provider - expose: - - "${TUTORIAL_APP_PORT}" - - "${TUTORIAL_DUMMY_DEVICE_PORT}" - ports: - - "${TUTORIAL_APP_PORT}:${TUTORIAL_APP_PORT}" - - "${TUTORIAL_DUMMY_DEVICE_PORT}:${TUTORIAL_DUMMY_DEVICE_PORT}" - environment: - - "MONGO_URL=mongodb://mongo-db:27017" - - "DEBUG=tutorial:*" - - "WEB_APP_PORT=${TUTORIAL_APP_PORT}" # Port used by the content provider proxy and web-app for viewing data - - "IOTA_HTTP_HOST=iot-agent" - - "IOTA_HTTP_PORT=${IOTA_SOUTH_PORT}" - - "DUMMY_DEVICES_PORT=${TUTORIAL_DUMMY_DEVICE_PORT}" # Port used by the dummy IOT devices to receive commands - - "DUMMY_DEVICES_TRANSPORT=HTTP" # Default transport used by dummy IoT devices - - "CONTEXT_BROKER=http://orion:${ORION_PORT}/v2" # URL of the context broker to update context - - "NGSI_LD_PREFIX=" - - "OPENWEATHERMAP_KEY_ID=" - - "TWITTER_CONSUMER_KEY=" - - "TWITTER_CONSUMER_SECRET=" - healthcheck: - test: curl --fail -s http://tutorial:${TUTORIAL_APP_PORT}/version || exit 1 - spark-master: - labels: - org.fiware: 'tutorial' - image: bde2020/spark-master:${SPARK_VERSION} - container_name: spark-master - expose: - - "${SPARK_WEB_APP_PORT}" - - "${SPARK_SERVICE_PORT}" - ports: - - "${SPARK_WEB_APP_PORT}:${SPARK_WEB_APP_PORT}" - - "${SPARK_MASTER_PORT}:${SPARK_MASTER_PORT}" - - "${SPARK_SERVICE_PORT}:${SPARK_SERVICE_PORT}" - environment: - - INIT_DAEMON_STEP=setup_spark - - "constraint:node==spark-master" - volumes: - - ./log4j.properties:/spark/conf/log4j.properties - spark-worker-1: - labels: - org.fiware: 'tutorial' - image: bde2020/spark-worker:${SPARK_VERSION} - container_name: spark-worker-1 - depends_on: - - spark-master - ports: - - "${SPARK_WORKER_1_PORT}:${SPARK_WORKER_1_PORT}" - environment: - - "SPARK_MASTER=spark://spark-master:${SPARK_MASTER_PORT}" - - "constraint:node==spark-master" - volumes: - - ${PWD}/cosmos-examples/:/home/cosmos-examples/ - - ${PWD}/log4j.properties:/spark/conf/log4j.properties - # Database - mongo-db: - labels: - org.fiware: 'tutorial' - image: mongo:${MONGO_DB_VERSION} - hostname: mongo-db - container_name: db-mongo - expose: - - "${MONGO_DB_PORT}" - ports: - - "${MONGO_DB_PORT}:${MONGO_DB_PORT}" - networks: - - default - volumes: - - mongo-db:/data - healthcheck: - test: ["CMD","mongosh", "--eval", "db.adminCommand('ping')"] - interval: 5s - timeout: 5s - retries: 3 - start_period: 5s - -networks: - default: - labels: - org.fiware: 'tutorial' - ipam: - config: - - subnet: 172.18.100.0/24 - -volumes: - mongo-db: ~ diff --git a/import-data b/import-data deleted file mode 100755 index 7f801c8..0000000 --- a/import-data +++ /dev/null @@ -1,525 +0,0 @@ -#!/bin/bash -# -# curl -s -o /dev/null commands to reload the data from the previous tutorial -# -# - -set -e - - -printf "⏳ Loading context data " - -# -# Create four Store Entities in various locations across Berlin -# - -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -g -d '{ - "actionType": "append", - "entities": [ - { - "id":"urn:ngsi-ld:Store:001","type":"Store", - "address":{"type":"PostalAddress","value":{"streetAddress":"Bornholmer Straße 65","addressRegion":"Berlin","addressLocality":"Prenzlauer Berg","postalCode":"10439"}}, - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.3986,52.5547]}}, - "name":{"type":"Text","value":"Bösebrücke Einkauf"} - }, - { - "id":"urn:ngsi-ld:Store:002","type":"Store", - "address":{"type":"PostalAddress","value":{"streetAddress":"Friedrichstraße 44","addressRegion":"Berlin","addressLocality":"Kreuzberg","postalCode":"10969"}}, - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.3903,52.5075]}}, - "name":{"type":"Text","value":"Checkpoint Markt"}}, - { - "id":"urn:ngsi-ld:Store:003","type":"Store", - "address":{"type":"PostalAddress","value":{"streetAddress":"Mühlenstrasse 10","addressRegion":"Berlin","addressLocality":"Friedrichshain","postalCode":"10243"}}, - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.4447,52.5031]}}, - "name":{"type":"Text","value":"East Side Galleria"} - }, - { - "id":"urn:ngsi-ld:Store:004","type":"Store", - "address":{"type":"PostalAddress","value":{"streetAddress":"Panoramastraße 1A","addressRegion":"Berlin","addressLocality":"Mitte","postalCode":"10178"}}, - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.4094,52.5208]}}, - "name":{"type":"Text","value":"Tower Trödelmarkt"} - } - ] -}' - -# -# Add Weather and Twitter Responses for Store 1 using random values -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/registrations' \ - -H 'Content-Type: application/json' \ - -d '{ - "description": "Weather Conditions", - "dataProvided": { - "entities": [ - { - "id" : "urn:ngsi-ld:Store:001", - "type": "Store" - } - ], - "attrs": [ - "temperature", "relativeHumidity" - ] - }, - "provider": { - "http": { - "url": "http://context-provider:3000/proxy/v1/random/weatherConditions" - }, - "legacyForwarding": true - }, - "status": "active" -}' - -curl -s -o /dev/null -X POST \ - http://orion:1026/v2/registrations \ - -H 'Content-Type: application/json' \ - -d '{ - "description": "Tweeting Cat Facts", - "dataProvided": { - "entities": [ - { - "id" : "urn:ngsi-ld:Store:001", - "type": "Store" - } - ], - "attrs": [ - "tweets" - ] - }, - "provider": { - "http": { - "url": "http://context-provider:3000/proxy/v1/catfacts/tweets" - }, - "legacyForwarding": true - }, - "status": "active" -}' - - -# Add Weather and Twitter Responses for Store 2 using real sources -# -# curl -s -o /dev/null -X POST \ -# 'http://orion:1026/v2/registrations' \ -# -H 'Content-Type: application/json' \ -# -d '{ -# "description": "Store:002 - Real Temperature and Humidity", -# "dataProvided": { -# "entities": [ -# { -# "id": "urn:ngsi-ld:Store:002", -# "type": "Store" -# } -# ], -# "attrs": [ -# "temperature", -# "relativeHumidity" -# ] -# }, -# "provider": { -# "http": { -# "url": "http://context-provider:3000/proxy/weather/number/temperature:temp,relativeHumidity:humidity/berlin%2cde" -# }, -# "legacyForwarding": true -# } -# }' - -# curl -s -o /dev/null -X POST \ -# 'http://orion:1026/v2/registrations' \ -# -H 'Content-Type: application/json' \ -# -d '{ -# "description": "Store:002 Real Tweets", -# "dataProvided": { -# "entities": [ -# { -# "id": "urn:ngsi-ld:Store:002", -# "type": "Store" -# } -# ], -# "attrs": [ -# "tweets" -# ] -# }, -# "provider": { -# "http": { -# "url": "http://context-provider:3000/proxy/twitter/list/tweets:text/FIWARE" -# }, -# "legacyForwarding": true -# } -# }' - -# -# Create a series of Shelf Entities and place the in each Store. -# Each shelf is designed to hold one product. -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -g -d '{ - "actionType": "append", - "entities": [ - { - "id":"urn:ngsi-ld:Shelf:unit001","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.3986112,52.554699]}}, - "maxCapacity":{"type":"Integer","value":50}, - "name":{"type":"Text","value":"Corner Unit"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:001"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit002","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.3987221,52.554664]}}, - "maxCapacity":{"type":"Integer","value":100}, - "name":{"type":"Text","value":"Wall Unit 1"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:001"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit003","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.3987221,52.554664]}}, - "maxCapacity":{"type":"Integer","value":100}, - "name":{"type":"Text","value":"Wall Unit 2"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:001"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit004","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.390311,52.507522]}}, - "maxCapacity":{"type":"Integer","value":50}, - "name":{"type":"Text","value":"Corner Unit"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:002"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit005","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.390309,52.50751]}}, - "maxCapacity":{"type":"Integer","value":200}, - "name":{"type":"Text","value":"Long Wall Unit"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:002"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit006","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.4447112,52.503199]}}, - "maxCapacity":{"type":"Integer","value":50}, - "name":{"type":"Text","value":"Corner Unit"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit007","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.4447221,52.503164]}}, - "maxCapacity":{"type":"Integer","value":100}, - "name":{"type":"Text","value":"Wall Unit 1"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit008","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.4447221,52.503164]}}, - "maxCapacity":{"type":"Integer","value":100}, - "name":{"type":"Text","value":"Wall Unit 2"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit009","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.444711,52.503122]}}, - "maxCapacity":{"type":"Integer","value":50}, - "name":{"type":"Text","value":"Corner Unit"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"} - }, - { - "id":"urn:ngsi-ld:Shelf:unit010","type":"Shelf", - "location":{"type":"geo:json","value":{"type":"Point","coordinates":[13.4094111,52.5208028]}}, - "maxCapacity":{"type":"Integer","value":200}, - "name":{"type":"Text","value":"Long Wall Unit"}, - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:004"} - } - ] -}' - -# -# Create a series of Product Entities. -# These are a series of alcoholc and non-alcoholic drinks which are available to sell. -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -d '{ - "actionType":"APPEND", - "entities":[ - { - "id":"urn:ngsi-ld:Product:001", "type":"Product", - "name":{"type":"Text", "value":"Apples"}, - "size":{"type":"Text", "value": "S"}, - "price":{"type":"Integer", "value": 99} - }, - { - "id":"urn:ngsi-ld:Product:002", "type":"Product", - "name":{"type":"Text", "value":"Bananas"}, - "size":{"type":"Text", "value": "M"}, - "price":{"type":"Integer", "value": 1099} - }, - { - "id":"urn:ngsi-ld:Product:003", "type":"Product", - "name":{"type":"Text", "value":"Coconuts"}, - "size":{"type":"Text", "value": "M"}, - "price":{"type":"Integer", "value": 1499} - }, - { - "id":"urn:ngsi-ld:Product:004", "type":"Product", - "name":{"type":"Text", "value":"Melons"}, - "size":{"type":"Text", "value": "XL"}, - "price":{"type":"Integer", "value": 5000} - }, - { - "id":"urn:ngsi-ld:Product:005", "type":"Product", - "name":{"type":"Text", "value":"Kiwi Fruits"}, - "size":{"type":"Text", "value": "S"}, - "price":{"type":"Integer", "value": 99} - }, - { - "id":"urn:ngsi-ld:Product:006", "type":"Product", - "name":{"type":"Text", "value":"Strawberries"}, - "size":{"type":"Text", "value": "S"}, - "price":{"type":"Integer", "value": 99} - }, - { - "id":"urn:ngsi-ld:Product:007", "type":"Product", - "name":{"type":"Text", "value":"Raspberries"}, - "size":{"type":"Text", "value": "S"}, - "price":{"type":"Integer", "value": 99} - }, - { - "id":"urn:ngsi-ld:Product:008", "type":"Product", - "name":{"type":"Text", "value":"Pineapples"}, - "size":{"type":"Text", "value": "S"}, - "price":{"type":"Integer", "value": 99} - }, - { - "id":"urn:ngsi-ld:Product:009", "type":"Product", - "name":{"type":"Text", "value":"Oranges"}, - "size":{"type":"Text", "value": "S"}, - "price":{"type":"Integer", "value": 99} - } - ] -}' - - -# -# Create a series of InventoryItems Entities. -# These the drinks on order in Store:001 -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -d '{ - "actionType":"APPEND", - "entities":[ - { - "id":"urn:ngsi-ld:InventoryItem:001","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:001"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit001"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:001"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":15} - }, - { - "id":"urn:ngsi-ld:InventoryItem:002","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:001"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit002"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:003"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":50} - }, - { - "id":"urn:ngsi-ld:InventoryItem:003","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:001"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit003"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:004"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":50} - } - - ] -}' -# -# Create a series of InventoryItems Entities. -# These the drinks on order in Store:002 -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -d '{ - "actionType":"APPEND", - "entities":[ - { - "id":"urn:ngsi-ld:InventoryItem:004","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:002"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit004"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:001"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":15} - }, - { - "id":"urn:ngsi-ld:InventoryItem:005","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:002"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit005"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:002"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":15} - } - ] -}' -# -# Create a series of InventoryItems Entities. -# These the drinks on order in Store:003 -# Note that Shelf Unit009 is currently unused -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -d '{ - "actionType":"APPEND", - "entities":[ - { - "id":"urn:ngsi-ld:InventoryItem:006","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit006"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:001"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":50} - }, - { - "id":"urn:ngsi-ld:InventoryItem:007","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit007"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:008"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":50} - }, - { - "id":"urn:ngsi-ld:InventoryItem:008","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:003"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit008"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:009"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":50} - } - - ] -}' -# -# Create a series of InventoryItems Entities. -# These the drinks on order in Store:004 -# -curl -s -o /dev/null -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -d '{ - "actionType":"APPEND", - "entities":[ - { - "id":"urn:ngsi-ld:InventoryItem:401","type":"InventoryItem", - "refStore":{"type":"Relationship","value":"urn:ngsi-ld:Store:004"}, - "refShelf":{"type":"Relationship","value":"urn:ngsi-ld:Shelf:unit010"}, - "refProduct":{"type":"Relationship","value":"urn:ngsi-ld:Product:001"}, - "stockCount":{"type":"Integer","value":10000}, - "shelfCount":{"type":"Integer","value":50} - } - - ] -}' - -# -# Ensure that actuators are minimally provisioned. -# -curl -X POST \ - 'http://orion:1026/v2/op/update' \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -d '{ - "actionType":"APPEND", - "entities":[ - { - "id": "Bell:001", - "type": "Bell", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:001" - } - }, - { - "id": "Door:001", - "type": "Door", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:001" - } - }, - { - "id": "Lamp:001", - "type": "Lamp", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:001" - } - }, - { - "id": "Bell:002", - "type": "Bell", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:002" - } - }, - { - "id": "Door:002", - "type": "Door", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:002" - } - }, - { - "id": "Lamp:002", - "type": "Lamp", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:002" - } - }, - { - "id": "Bell:003", - "type": "Bell", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:003" - } - }, - { - "id": "Door:003", - "type": "Door", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:003" - } - }, - { - "id": "Lamp:003", - "type": "Lamp", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:003" - } - }, - { - "id": "Bell:004", - "type": "Bell", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:004" - } - }, - { - "id": "Door:004", - "type": "Door", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:004" - } - }, - { - "id": "Lamp:004", - "type": "Lamp", - "refStore": { - "type": "Relationship","value": "urn:ngsi-ld:Store:004" - } - } - ] -}' -echo -e " \033[1;32mdone\033[0m" \ No newline at end of file diff --git a/log4j.properties b/log4j.properties deleted file mode 100644 index 723593c..0000000 --- a/log4j.properties +++ /dev/null @@ -1,40 +0,0 @@ -# - # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set everything to be logged to the console -log4j.rootCategory=WARN, console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n - -# Set the default spark-shell log level to WARN. When running the spark-shell, the -# log level for this class is used to overwrite the root logger's log level, so that -# the user can have different defaults for the shell and regular Spark apps. -log4j.logger.org.apache.spark.repl.Main=WARN - -# Settings to quiet third party logs that are too verbose -log4j.logger.org.spark_project.jetty=WARN -log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR -log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO -log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO -log4j.logger.org.apache.parquet=ERROR -log4j.logger.parquet=ERROR - -# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support -log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL -log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR diff --git a/provision-devices b/provision-devices deleted file mode 100755 index 31eb090..0000000 --- a/provision-devices +++ /dev/null @@ -1,332 +0,0 @@ -#!/bin/bash -# -# curl commands to reload the data from the previous tutorial -# -# - -set -e - - - - - -printf "⏳ Provisioning IoT devices " - - -# -# Create a service groups for all UltraLight IoT devices -# - -curl -s -o /dev/null -X POST \ - "http://iot-agent:$IOTA_NORTH_PORT/iot/services" \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "services": [ - { - "apikey": "1068318794", - "cbroker": "'"http://orion:$ORION_PORT"'", - "entity_type": "Motion", - "resource": "/iot/d", - "protocol": "PDI-IoTA-UltraLight", - "transport": "HTTP", - "timezone": "Europe/Berlin", - "attributes": [ - { "object_id": "c", "name":"count", "type":"Integer"} - ], - "static_attributes": [ - {"name": "category", "type":"Text", "value": ["sensor"]}, - {"name": "controlledProperty", "type": "Text", "value": "motion"}, - {"name": "function", "type": "Text", "value":["sensing"]}, - {"name": "supportedProtocol", "type": "Text", "value": ["ul20"]}, - {"name": "supportedUnits", "type": "Text", "value": "C62"} - ] - }, - { - "apikey": "3020035", - "cbroker": "'"http://orion:$ORION_PORT"'", - "entity_type": "Bell", - "resource": "/iot/d", - "protocol": "PDI-IoTA-UltraLight", - "transport": "HTTP", - "timezone": "Europe/Berlin", - "commands": [ - { - "name": "ring", - "type": "command" - } - ], - "static_attributes": [ - {"name": "category", "type":"Text", "value": ["actuator"]}, - {"name": "controlledProperty", "type": "Text", "value": "noiseLevel"}, - {"name": "function", "type": "Text", "value":["onOff"]}, - {"name": "supportedProtocol", "type": "Text", "value": ["ul20"]} - ] - }, - { - "apikey": "3314136", - "cbroker": "'"http://orion:$ORION_PORT"'", - "entity_type": "Lamp", - "resource": "/iot/d", - "protocol": "PDI-IoTA-UltraLight", - "transport": "HTTP", - "timezone": "Europe/Berlin", - "commands": [ - {"name": "on","type": "command"}, - {"name": "off","type": "command"} - ], - "attributes": [ - {"object_id": "s", "name": "state", "type":"Text"}, - {"object_id": "l", "name": "luminosity", "type":"Integer"} - ], - "static_attributes": [ - {"name": "category", "type":"Text", "value": ["actuator","sensor"]}, - {"name": "controlledProperty", "type": "Text", "value": "light"}, - {"name": "function", "type": "Text", "value":["onOff", "sensing"]}, - {"name": "supportedProtocol", "type": "Text", "value": ["ul20"]}, - {"name": "supportedUnits", "type": "Text", "value": "CDL"} - ] - }, - { - "apikey": "3089326", - "cbroker": "'"http://orion:$ORION_PORT"'", - "entity_type": "Door", - "resource": "/iot/d", - "protocol": "PDI-IoTA-UltraLight", - "transport": "HTTP", - "timezone": "Europe/Berlin", - "commands": [ - {"name": "unlock","type": "command"}, - {"name": "open","type": "command"}, - {"name": "close","type": "command"}, - {"name": "lock","type": "command"} - ], - "attributes": [ - {"object_id": "s", "name": "state", "type":"Text"} - ], - "static_attributes": [ - {"name": "category", "type":"Text", "value": ["actuator", "sensor"]}, - {"name": "controlledProperty", "type": "Text", "value": "state"}, - {"name": "function", "type": "Text", "value":["openClose", "eventNotification"]}, - {"name": "supportedProtocol", "type": "Text", "value": ["ul20"]} - ] - } - ] -}' - -#################################################### -# -# Provision sensors for Store 001 -# - -curl -s -o /dev/null -X POST \ - "http://iot-agent:$IOTA_NORTH_PORT/iot/devices" \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "devices": [ - { - "device_id": "motion001", - "entity_name": "Motion:001", - "entity_type": "Motion", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:001"} - ] - }, - { - "device_id": "bell001", - "entity_name": "Bell:001", - "entity_type": "Bell", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/bell001"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:001"} - ] - }, - { - "device_id": "door001", - "entity_name": "Door:001", - "entity_type": "Door", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/door001"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:001"} - ] - }, - { - "device_id": "lamp001", - "entity_name": "Lamp:001", - "entity_type": "Lamp", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/lamp001"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:001"} - ] - } - ] -} -' - -#################################################### -# -# -# Provision sensors for Store 002 -# - -curl -s -o /dev/null -X POST \ - "http://iot-agent:$IOTA_NORTH_PORT/iot/devices" \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "devices": [ - { - "device_id": "motion002", - "entity_name": "Motion:002", - "entity_type": "Motion", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:002"} - ] - }, - { - "device_id": "bell002", - "entity_name": "Bell:002", - "entity_type": "Bell", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/bell002"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:002"} - ] - }, - { - "device_id": "door002", - "entity_name": "Door:002", - "entity_type": "Door", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/door002"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:002"} - ] - }, - { - "device_id": "lamp002", - "entity_name": "Lamp:002", - "entity_type": "Lamp", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/lamp002"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:002"} - ] - } - ] -} -' - - - - - -#################################################### -# -# -# Provision sensors for Store 3 -# - -curl -s -o /dev/null -X POST \ - "http://iot-agent:$IOTA_NORTH_PORT/iot/devices" \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "devices": [ - { - "device_id": "motion003", - "entity_name": "Motion:003", - "entity_type": "Motion", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:003"} - ] - }, - { - "device_id": "bell003", - "entity_name": "Bell:003", - "entity_type": "Bell", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/bell003"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:003"} - ] - }, - { - "device_id": "door003", - "entity_name": "Door:003", - "entity_type": "Door", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/door003"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:003"} - ] - }, - { - "device_id": "lamp003", - "entity_name": "Lamp:003", - "entity_type": "Lamp", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/lamp003"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:003"} - ] - } - ] -} -' - - - - -#################################################### -# -# -# Provision sensors for Store 4 -# - -curl -s -o /dev/null -X POST \ - "http://iot-agent:$IOTA_NORTH_PORT/iot/devices" \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "devices": [ - { - "device_id": "motion004", - "entity_name": "Motion:004", - "entity_type": "Motion", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:004"} - ] - }, - { - "device_id": "bell004", - "entity_name": "Bell:004", - "entity_type": "Bell", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/bell004"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:004"} - ] - }, - { - "device_id": "door004", - "entity_name": "Door:004", - "entity_type": "Door", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/door004"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:004"} - ] - }, - { - "device_id": "lamp004", - "entity_name": "Lamp:004", - "entity_type": "Lamp", - "endpoint": "'"http://iot-sensors:$TUTORIAL_DUMMY_DEVICE_PORT/iot/lamp004"'", - "static_attributes": [ - {"name": "refStore", "type": "Relationship","value": "urn:ngsi-ld:Store:004"} - ] - } - ] -} -' - -echo -e " \033[1;32mdone\033[0m" \ No newline at end of file diff --git a/services b/services index 1f46b81..c744bc4 100755 --- a/services +++ b/services @@ -2,160 +2,8 @@ # # Command Line Interface to start all services associated with the Tutorial # -# For this tutorial the commands are merely a convenience script to run docker or docker-compose -# -# Each services script can be run using either docker-compose (the external tool with the hyphen -) -# or docker compose (the newer version directly bundled with Docker with a space ) -# -# if you start up with the following command: -# -# ./services start legacy -# -# This will force the script to use docker-compose which may be more reliable in -# some cases (or if an older version of Docker is being used) set -e -loadData () { - docker run --rm -v $(pwd)/import-data:/import-data \ - --network fiware_default \ - -e ORION_PORT="${ORION_PORT}" \ - -e TUTORIAL_APP_PORT="${TUTORIAL_APP_PORT}" \ - --entrypoint /bin/ash quay.io/curl/curl /import-data - waitForIoTAgent - docker run --rm -v $(pwd)/provision-devices:/provision-devices \ - --network fiware_default \ - -e ORION_PORT="${ORION_PORT}" \ - -e TUTORIAL_APP_PORT="${TUTORIAL_APP_PORT}" \ - -e TUTORIAL_DUMMY_DEVICE_PORT="${TUTORIAL_DUMMY_DEVICE_PORT}" \ - -e IOTA_NORTH_PORT="${IOTA_NORTH_PORT}" \ - --entrypoint /bin/ash quay.io/curl/curl /provision-devices - echo "" -} - -stoppingContainers () { - CONTAINERS=$(docker ps --filter "label=org.fiware=tutorial" -aq) - if [[ -n $CONTAINERS ]]; then - echo "Stopping containers" - docker rm -f $CONTAINERS || true - fi - VOLUMES=$(docker volume ls -qf dangling=true) - if [[ -n $VOLUMES ]]; then - echo "Removing old volumes" - docker volume rm $VOLUMES || true - fi - NETWORKS=$(docker network ls --filter "label=org.fiware=tutorial" -q) - if [[ -n $NETWORKS ]]; then - echo "Removing tutorial networks" - docker network rm $NETWORKS || true - fi -} - -displayServices () { - echo "" - docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" --filter name='[fiware|spark]-*' - echo "" -} - -addDatabaseIndex () { - printf "Adding appropriate \033[1mMongoDB\033[0m indexes for \033[1;34mOrion\033[0m ..." - docker exec db-mongo mongosh --eval ' - conn = new Mongo();db.createCollection("orion"); - db = conn.getDB("orion"); - db.createCollection("entities"); - db.entities.createIndex({"_id.servicePath": 1, "_id.id": 1, "_id.type": 1}, {unique: true}); - db.entities.createIndex({"_id.type": 1}); - db.entities.createIndex({"_id.id": 1});' > /dev/null - - docker exec db-mongo mongosh --eval ' - conn = new Mongo();db.createCollection("orion-openiot"); - db = conn.getDB("orion-openiot"); - db.createCollection("entities"); - db.entities.createIndex({"_id.servicePath": 1, "_id.id": 1, "_id.type": 1}, {unique: true}); - db.entities.createIndex({"_id.type": 1}); - db.entities.createIndex({"_id.id": 1});' > /dev/null - echo -e " \033[1;32mdone\033[0m" - -} - -waitForMongo () { - echo -e "\n⏳ Waiting for \033[1mMongoDB\033[0m to be available\n" - while ! [ `docker inspect --format='{{.State.Health.Status}}' db-mongo` == "healthy" ] - do - sleep 1 - done -} - -waitForOrion () { - echo -e "\n⏳ Waiting for \033[1;34mOrion\033[0m to be available\n" - - while ! [ `docker inspect --format='{{.State.Health.Status}}' fiware-orion` == "healthy" ] - do - echo -e "Context Broker HTTP state: " `curl -s -o /dev/null -w %{http_code} 'http://localhost:1026/version'` " (waiting for 200)" - sleep 1 - done -} - -waitForIoTAgent () { - echo -e "\n⏳ Waiting for \033[1;36mIoT-Agent\033[0m to be available\n" - while ! [ `docker inspect --format='{{.State.Health.Status}}' fiware-iot-agent` == "healthy" ] - - do - echo -e "IoT Agent HTTP state: " `curl -s -o /dev/null -w %{http_code} 'http://localhost:4041/version'` " (waiting for 200)" - sleep 1 - done -} - -dockerCmd="docker compose" -if (( $# == 2 )); then - dockerCmd="docker-compose" -fi - -if (( $# < 1 )); then - echo "Illegal number of parameters" - echo "usage: services [create|start|stop]" - exit 1 -fi - -command="$1" -case "${command}" in - "help") - echo "usage: services [create|start|stop]" - ;; - "start") - export $(cat .env | grep "#" -v) - stoppingContainers - echo -e "Starting containers: \033[1;34mOrion\033[0m, \033[1;36mIoT-Agent\033[0m, two \033[1mSpark\033[0m instances (jobmanager taskmanager) and a \033[1mTutorial\033[0m and \033[1mMongoDB\033[0m database." - echo -e "- \033[1;34mOrion\033[0m is the context broker" - echo -e "- \033[1;36mIoT-Agent\033[0m is configured for the UltraLight Protocol" - echo -e "- \033[1mTutorial\033[0m acts as a series of dummy IoT Sensors over HTTP" - echo -e "- \033[1mApache Spark\033[0m is a stream-processing framework" - echo "" - ${dockerCmd} up -d --remove-orphans - waitForMongo - addDatabaseIndex - waitForOrion - loadData - displayServices - echo -e "Now open \033[4mhttp://localhost:3000/device/monitor\033[0m" - ;; - "stop") - export $(cat .env | grep "#" -v) - stoppingContainers - ;; - "create") - export $(cat .env | grep "#" -v) - echo "Pulling Docker images" - docker pull -q quay.io/curl/curl - ${dockerCmd} pull --ignore-pull-failures - echo -e "Pulling \033[1;34mCosmos Spark\033[0m connector" - curl -LO https://github.com/ging/fiware-cosmos-orion-spark-connector/releases/download/FIWARE_7.9.1/orion.spark.connector-1.2.2.jar - mv orion.spark.connector-1.2.2.jar cosmos-examples/orion.spark.connector-1.2.2.jar - ;; - *) - echo "Command not Found." - echo "usage: services [create|start|stop]" - exit 127; - ;; -esac - +echo -e "Checkout the \033[1;36mNGSI-v2\033[0m branch of this repository to run the Smart Supermarket tutorial.\n" +echo -e "Checkout the \033[1;31mNGSI-LD\033[0m branch of this repository to run the Smart Farm tutorial.\n" diff --git a/subscription b/subscription deleted file mode 100755 index be84b6d..0000000 --- a/subscription +++ /dev/null @@ -1,21 +0,0 @@ -curl -iX POST \ - 'http://localhost:1026/v2/subscriptions' \ - -H 'Content-Type: application/json' \ - -H 'fiware-service: openiot' \ - -H 'fiware-servicepath: /' \ - -d '{ - "description": "Notify Spark of all context changes", - "subject": { - "entities": [ - { - "idPattern": "Motion.*" - } - ] - }, - "notification": { - "http": { - "url": "http://spark-worker-1:9001/notify" - } - }, - "throttling": 5 -}' \ No newline at end of file