From 363095a38c8780ab90a3dc804d2e8509a596892a Mon Sep 17 00:00:00 2001 From: yonzhang Date: Sat, 18 Apr 2020 15:09:04 -0700 Subject: [PATCH 1/6] EAGLE-1104, make ServerDebug run in local box --- .../src/main/resources/application.conf | 10 +- .../alert/engine/spout/CorrelationSpout.java | 1 + .../app/example/ExampleStormApplication.java | 2 +- .../src/main/resources/log4j.properties | 19 - .../src/main/webapp/app/package-lock.json | 2274 +++++++++++++++++ .../src/test/resources/configuration.yml | 18 + pom.xml | 2 +- 7 files changed, 2300 insertions(+), 26 deletions(-) delete mode 100644 eagle-server/src/main/resources/log4j.properties create mode 100644 eagle-server/src/main/webapp/app/package-lock.json diff --git a/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/application.conf b/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/application.conf index 46f5b084fc..33f1792576 100644 --- a/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/application.conf +++ b/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/application.conf @@ -13,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. { - "appId" : "HBaseAuditLogApp", + "appId" : "EaglePolicyEngine", "mode" : "LOCAL", "siteId" : "testsite", "topology" : { "name" : "alertUnitTopology_1", "numOfTotalWorkers" : 2, "numOfSpoutTasks" : 1, - "numOfRouterBolts" : 4, - "numOfAlertBolts" : 10, + "numOfRouterBolts" : 1, + "numOfAlertBolts" : 1, "numOfPublishExecutors" : 1, "numOfPublishTasks" : 1, "messageTimeoutSecs": 3600, "localMode" : "true" }, "spout" : { - "kafkaBrokerZkQuorum": "server.eagle.apache.org:2181", + "kafkaBrokerZkQuorum": "127.0.0.1:2000", "kafkaBrokerZkBasePath": "/kafka", "stormKafkaUseSameZkQuorumWithKafkaBroker": true, "stormKafkaTransactionZkQuorum": "", @@ -36,7 +36,7 @@ "stormKafkaEagleConsumer": "eagle_consumer" }, "zkConfig" : { - "zkQuorum" : "server.eagle.apache.org:2181", + "zkQuorum" : "127.0.0.1:2000", "zkRoot" : "/alert" }, "metadataService": { diff --git a/eagle-core/eagle-alert-parent/eagle-alert/alert-engine/src/main/java/org/apache/eagle/alert/engine/spout/CorrelationSpout.java b/eagle-core/eagle-alert-parent/eagle-alert/alert-engine/src/main/java/org/apache/eagle/alert/engine/spout/CorrelationSpout.java index 4338964242..dbce70fcdb 100644 --- a/eagle-core/eagle-alert-parent/eagle-alert/alert-engine/src/main/java/org/apache/eagle/alert/engine/spout/CorrelationSpout.java +++ b/eagle-core/eagle-alert-parent/eagle-alert/alert-engine/src/main/java/org/apache/eagle/alert/engine/spout/CorrelationSpout.java @@ -65,6 +65,7 @@ public class CorrelationSpout extends BaseRichSpout implements SpoutSpecListener private SpoutSpec cachedSpoutSpec; + private transient KafkaSpoutMetric kafkaSpoutMetric; @SuppressWarnings("rawtypes") diff --git a/eagle-examples/eagle-app-example/src/main/java/org/apache/eagle/app/example/ExampleStormApplication.java b/eagle-examples/eagle-app-example/src/main/java/org/apache/eagle/app/example/ExampleStormApplication.java index f1f4e5894e..1bf9983cf4 100644 --- a/eagle-examples/eagle-app-example/src/main/java/org/apache/eagle/app/example/ExampleStormApplication.java +++ b/eagle-examples/eagle-app-example/src/main/java/org/apache/eagle/app/example/ExampleStormApplication.java @@ -35,7 +35,7 @@ public class ExampleStormApplication extends StormApplication{ @Override public StormTopology execute(Config config, StormEnvironment environment) { TopologyBuilder builder = new TopologyBuilder(); - builder.setSpout("metric_spout", new RandomEventSpout(), config.getInt("spoutNum")); + builder.setSpout("metric_spout", new RandomEventSpout(), config.hasPath("spoutNum") ? config.getInt("spoutNum") : 1); builder.setBolt("sink_1",environment.getStreamSink("SAMPLE_STREAM_1",config)).fieldsGrouping("metric_spout",new Fields("metric")); builder.setBolt("sink_2",environment.getStreamSink("SAMPLE_STREAM_2",config)).fieldsGrouping("metric_spout",new Fields("metric")); return builder.createTopology(); diff --git a/eagle-server/src/main/resources/log4j.properties b/eagle-server/src/main/resources/log4j.properties deleted file mode 100644 index 9c6875d8d4..0000000000 --- a/eagle-server/src/main/resources/log4j.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout -# standard output -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n \ No newline at end of file diff --git a/eagle-server/src/main/webapp/app/package-lock.json b/eagle-server/src/main/webapp/app/package-lock.json new file mode 100644 index 0000000000..ee74e97e20 --- /dev/null +++ b/eagle-server/src/main/webapp/app/package-lock.json @@ -0,0 +1,2274 @@ +{ + "name": "ApacheEagleWebApp", + "requires": true, + "lockfileVersion": 1, + "dependencies": { + "abab": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/abab/-/abab-1.0.4.tgz", + "integrity": "sha1-X6rZwsB/YN12dw9xzwJbYqY8/U4=" + }, + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "ace-builds": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.3.3.tgz", + "integrity": "sha512-PbSdoHw42kt5vaXkEVSfUYCd3K1BCfAvyXW9TvR/2ATkk65oImjS1v0evHmzHhOYPSTUO8BprvmpfYT9Vp2akA==" + }, + "acorn": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", + "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=" + }, + "acorn-globals": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-3.1.0.tgz", + "integrity": "sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8=", + "requires": { + "acorn": "4.0.13" + } + }, + "admin-lte": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/admin-lte/-/admin-lte-2.3.2.tgz", + "integrity": "sha1-f3Jr6e6+Ng0cxDOUJTIiJ06+wqU=" + }, + "ajv": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", + "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "requires": { + "co": "4.6.0", + "fast-deep-equal": "1.1.0", + "fast-json-stable-stringify": "2.0.0", + "json-schema-traverse": "0.3.1" + } + }, + "align-text": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", + "dev": true, + "requires": { + "kind-of": "3.2.2", + "longest": "1.0.1", + "repeat-string": "1.6.1" + } + }, + "amdefine": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", + "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", + "dev": true + }, + "angular": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/angular/-/angular-1.5.0.tgz", + "integrity": "sha1-2W7perbfbP0JFazL7khNCYrbdOw=" + }, + "angular-animate": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/angular-animate/-/angular-animate-1.5.0.tgz", + "integrity": "sha1-DjHzH6erLd9epXh+R2VIZE1i+5M=" + }, + "angular-cookies": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/angular-cookies/-/angular-cookies-1.5.0.tgz", + "integrity": "sha1-WIamdvpBS4Z+1I0NSikEQda5um0=" + }, + "angular-resource": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/angular-resource/-/angular-resource-1.5.0.tgz", + "integrity": "sha1-BYOkN7VMIvzc4ivPaCZf51Hy02g=" + }, + "angular-route": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/angular-route/-/angular-route-1.5.0.tgz", + "integrity": "sha1-1vlPWPvoD4rA7zxEdTwgxCTbbDM=" + }, + "angular-ui-bootstrap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/angular-ui-bootstrap/-/angular-ui-bootstrap-1.3.3.tgz", + "integrity": "sha1-SCD4ldQzwXq+tKxzh/LxIsv4wRw=" + }, + "angular-ui-router": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/angular-ui-router/-/angular-ui-router-0.3.1.tgz", + "integrity": "sha1-kUZ47utFRd78eGzxLQ0ZlCRqPoo=", + "requires": { + "angular": "1.5.0" + } + }, + "ansi-regex": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz", + "integrity": "sha1-DY6UaWej2BQ/k+JOKYUl/BsiNfk=", + "dev": true + }, + "ansi-styles": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz", + "integrity": "sha1-6uy/Zs1waIJ2Cy9GkVgrj1XXp94=", + "dev": true + }, + "argparse": { + "version": "0.1.16", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz", + "integrity": "sha1-z9AeD7uj1srtBJ+9dY1A9lGW9Xw=", + "dev": true, + "requires": { + "underscore": "1.7.0", + "underscore.string": "2.4.0" + }, + "dependencies": { + "underscore.string": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz", + "integrity": "sha1-jN2PusTi0uoefi6Al8QvRCKA+Fs=", + "dev": true + } + } + }, + "array-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", + "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=" + }, + "array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=", + "dev": true + }, + "asn1": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + }, + "async": { + "version": "0.1.22", + "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz", + "integrity": "sha1-D8GqoIig4+8Ovi2IMbqw3PiEUGE=", + "dev": true + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + }, + "aws4": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", + "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=" + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", + "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", + "optional": true, + "requires": { + "tweetnacl": "0.14.5" + } + }, + "boom": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", + "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", + "requires": { + "hoek": "4.2.1" + } + }, + "bootstrap": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-3.3.6.tgz", + "integrity": "sha1-jej3SdyKdD8qxbUQ2Yg3Hj2qZYk=" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "1.0.0", + "concat-map": "0.0.1" + } + }, + "browserify-zlib": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", + "integrity": "sha1-uzX4pRn2AOD6a4SFJByXnQFB+y0=", + "dev": true, + "requires": { + "pako": "0.2.9" + } + }, + "buffer-from": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.0.0.tgz", + "integrity": "sha512-83apNb8KK0Se60UE1+4Ukbe3HbfELJ6UlI4ldtOGs7So4KD26orJM8hIY9lxdzP+UpItH1Yh/Y8GUvNFWFFRxA==", + "dev": true + }, + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true + }, + "camelcase": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", + "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=", + "dev": true + }, + "camelcase-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", + "dev": true, + "requires": { + "camelcase": "2.1.1", + "map-obj": "1.0.1" + } + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "center-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", + "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", + "dev": true, + "requires": { + "align-text": "0.1.4", + "lazy-cache": "1.0.4" + } + }, + "chalk": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz", + "integrity": "sha1-Zjs6ZItotV0EaQ1JFnqoN4WPIXQ=", + "dev": true, + "requires": { + "ansi-styles": "1.1.0", + "escape-string-regexp": "1.0.5", + "has-ansi": "0.1.0", + "strip-ansi": "0.3.0", + "supports-color": "0.2.0" + } + }, + "clean-css": { + "version": "3.4.28", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-3.4.28.tgz", + "integrity": "sha1-vxlF6C/ICPVWlebd6uwBQA79A/8=", + "dev": true, + "requires": { + "commander": "2.8.1", + "source-map": "0.4.4" + }, + "dependencies": { + "source-map": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", + "integrity": "sha1-66T12pwNyZneaAMti092FzZSA2s=", + "dev": true, + "requires": { + "amdefine": "1.0.1" + } + } + } + }, + "cli": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/cli/-/cli-0.6.6.tgz", + "integrity": "sha1-Aq1Eo4Cr8nraxebwzdewQ9dMU+M=", + "dev": true, + "requires": { + "exit": "0.1.2", + "glob": "3.2.11" + }, + "dependencies": { + "glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", + "dev": true, + "requires": { + "inherits": "2.0.3", + "minimatch": "0.3.0" + } + }, + "minimatch": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", + "dev": true, + "requires": { + "lru-cache": "2.7.3", + "sigmund": "1.0.1" + } + } + } + }, + "cliui": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", + "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", + "dev": true, + "requires": { + "center-align": "0.1.3", + "right-align": "0.1.3", + "wordwrap": "0.0.2" + }, + "dependencies": { + "wordwrap": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", + "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=", + "dev": true + } + } + }, + "co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" + }, + "coffee-script": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz", + "integrity": "sha1-FQ1rTLUiiUNp7+1qIQHCC8f0pPQ=", + "dev": true + }, + "colors": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "integrity": "sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=", + "dev": true + }, + "combined-stream": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", + "requires": { + "delayed-stream": "1.0.0" + } + }, + "commander": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", + "integrity": "sha1-Br42f+v9oMMwqh4qBy09yXYkJdQ=", + "dev": true, + "requires": { + "graceful-readlink": "1.0.1" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "requires": { + "buffer-from": "1.0.0", + "inherits": "2.0.3", + "readable-stream": "2.3.5", + "typedarray": "0.0.6" + } + }, + "console-browserify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", + "integrity": "sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA=", + "dev": true, + "requires": { + "date-now": "0.1.4" + } + }, + "content-type-parser": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/content-type-parser/-/content-type-parser-1.0.2.tgz", + "integrity": "sha512-lM4l4CnMEwOLHAHr/P6MEZwZFPJFtAAKgL6pogbXmVZggIqXhdB6RbBtPOTsw2FcXwYhehRGERJmRrjOiIB8pQ==" + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "cryptiles": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", + "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", + "requires": { + "boom": "5.2.0" + }, + "dependencies": { + "boom": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", + "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==", + "requires": { + "hoek": "4.2.1" + } + } + } + }, + "cssom": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.2.tgz", + "integrity": "sha1-uANhcMefB6kP8vFuIihAJ6JDhIs=" + }, + "cssstyle": { + "version": "0.2.37", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-0.2.37.tgz", + "integrity": "sha1-VBCXI0yyUTyDzu06zdwn/yeYfVQ=", + "requires": { + "cssom": "0.3.2" + } + }, + "currently-unhandled": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", + "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=", + "dev": true, + "requires": { + "array-find-index": "1.0.2" + } + }, + "d3": { + "version": "3.5.16", + "resolved": "https://registry.npmjs.org/d3/-/d3-3.5.16.tgz", + "integrity": "sha1-pRjWztFUowePLLeK4fitlTQrJzQ=" + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "requires": { + "assert-plus": "1.0.0" + } + }, + "date-now": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz", + "integrity": "sha1-6vQ5/U1ISK105cx9vvIAZyueNFs=", + "dev": true + }, + "dateformat": { + "version": "1.0.2-1.2.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz", + "integrity": "sha1-sCIMAt6YYXQztyhRz0fePfLNvuk=", + "dev": true + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=" + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "dom-serializer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz", + "integrity": "sha1-BzxpdUbOB4DOI75KKOKT5AvDDII=", + "dev": true, + "requires": { + "domelementtype": "1.1.3", + "entities": "1.1.1" + }, + "dependencies": { + "domelementtype": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz", + "integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=", + "dev": true + }, + "entities": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.1.tgz", + "integrity": "sha1-blwtClYhtdra7O+AuQ7ftc13cvA=", + "dev": true + } + } + }, + "domelementtype": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.0.tgz", + "integrity": "sha1-sXrtguirWeUt2cGbF1bg/BhyBMI=", + "dev": true + }, + "domhandler": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.3.0.tgz", + "integrity": "sha1-LeWaCCLVAn+r/28DLCsloqir5zg=", + "dev": true, + "requires": { + "domelementtype": "1.3.0" + } + }, + "domutils": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz", + "integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=", + "dev": true, + "requires": { + "dom-serializer": "0.1.0", + "domelementtype": "1.3.0" + } + }, + "ecc-jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", + "optional": true, + "requires": { + "jsbn": "0.1.1" + } + }, + "echarts": { + "version": "3.8.5", + "resolved": "https://registry.npmjs.org/echarts/-/echarts-3.8.5.tgz", + "integrity": "sha512-E+nnROMfCeiLeoT/fZyX8SE8mKzwkTjyemyoBF543oqjRtjTSKQAVDEihMXy4oC6pJS0tYGdMqCA2ATk8onyRg==", + "requires": { + "zrender": "3.7.4" + } + }, + "entities": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.0.0.tgz", + "integrity": "sha1-sph6o4ITR/zeZCsk/fyeT7cSvyY=", + "dev": true + }, + "error-ex": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.1.tgz", + "integrity": "sha1-+FWobOYa3E6GIcPNoh56dhLDqNw=", + "dev": true, + "requires": { + "is-arrayish": "0.2.1" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "escodegen": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.9.1.tgz", + "integrity": "sha512-6hTjO1NAWkHnDk3OqQ4YrCuwwmGHL9S3nPlzBOUG/R44rda3wLNrfvQ5fkSGjyhHFKM7ALPKcKGrwvCLe0lC7Q==", + "requires": { + "esprima": "3.1.3", + "estraverse": "4.2.0", + "esutils": "2.0.2", + "optionator": "0.8.2", + "source-map": "0.6.1" + } + }, + "esprima": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz", + "integrity": "sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM=" + }, + "estraverse": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", + "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=" + }, + "esutils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", + "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=" + }, + "eventemitter2": { + "version": "0.4.14", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz", + "integrity": "sha1-j2G3XN4BKy6esoTUVFWDtWQ7Yas=", + "dev": true + }, + "exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=", + "dev": true + }, + "extend": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", + "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + }, + "fast-deep-equal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=" + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + }, + "figures": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-1.7.0.tgz", + "integrity": "sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4=", + "dev": true, + "requires": { + "escape-string-regexp": "1.0.5", + "object-assign": "4.1.1" + } + }, + "file-sync-cmp": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/file-sync-cmp/-/file-sync-cmp-0.1.1.tgz", + "integrity": "sha1-peeo/7+kk7Q7kju9TKiaU7Y7YSs=", + "dev": true + }, + "find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, + "requires": { + "path-exists": "2.1.0", + "pinkie-promise": "2.0.1" + } + }, + "findup-sync": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", + "integrity": "sha1-fz56l7gjksZTvwZYm9hRkOk8NoM=", + "dev": true, + "requires": { + "glob": "3.2.11", + "lodash": "2.4.2" + }, + "dependencies": { + "glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", + "dev": true, + "requires": { + "inherits": "2.0.3", + "minimatch": "0.3.0" + } + }, + "lodash": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", + "dev": true + }, + "minimatch": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", + "dev": true, + "requires": { + "lru-cache": "2.7.3", + "sigmund": "1.0.1" + } + } + } + }, + "font-awesome": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/font-awesome/-/font-awesome-4.7.0.tgz", + "integrity": "sha1-j6jPBBGhoxr9B7BtKQK7n8gVoTM=" + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + }, + "form-data": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", + "requires": { + "asynckit": "0.4.0", + "combined-stream": "1.0.6", + "mime-types": "2.1.18" + } + }, + "get-stdin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=", + "dev": true + }, + "getobject": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz", + "integrity": "sha1-BHpEl4n6Fg0Bj1SG7ZEyC27HiFw=", + "dev": true + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "requires": { + "assert-plus": "1.0.0" + } + }, + "glob": { + "version": "3.1.21", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", + "integrity": "sha1-0p4KBV3qUTj00H7UDomC6DwgZs0=", + "dev": true, + "requires": { + "graceful-fs": "1.2.3", + "inherits": "1.0.2", + "minimatch": "0.2.14" + }, + "dependencies": { + "inherits": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz", + "integrity": "sha1-ykMJ2t7mtUzAuNJH6NfHoJdb3Js=", + "dev": true + } + } + }, + "graceful-fs": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz", + "integrity": "sha1-FaSAaldUfLLS2/J/QuiajDRRs2Q=", + "dev": true + }, + "graceful-readlink": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "integrity": "sha1-TK+tdrxi8C+gObL5Tpo906ORpyU=", + "dev": true + }, + "grunt": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", + "integrity": "sha1-VpN81RlDJK3/bSB2MYMqnWuk5/A=", + "dev": true, + "requires": { + "async": "0.1.22", + "coffee-script": "1.3.3", + "colors": "0.6.2", + "dateformat": "1.0.2-1.2.3", + "eventemitter2": "0.4.14", + "exit": "0.1.2", + "findup-sync": "0.1.3", + "getobject": "0.1.0", + "glob": "3.1.21", + "grunt-legacy-log": "0.1.3", + "grunt-legacy-util": "0.2.0", + "hooker": "0.2.3", + "iconv-lite": "0.2.11", + "js-yaml": "2.0.5", + "lodash": "0.9.2", + "minimatch": "0.2.14", + "nopt": "1.0.10", + "rimraf": "2.2.8", + "underscore.string": "2.2.1", + "which": "1.0.9" + }, + "dependencies": { + "iconv-lite": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "integrity": "sha1-HOYKOleGSiktEyH/RgnKS7llrcg=", + "dev": true + } + } + }, + "grunt-cli": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/grunt-cli/-/grunt-cli-0.1.13.tgz", + "integrity": "sha1-6evEBHYx9QEtkidww5N4EzytEPQ=", + "dev": true, + "requires": { + "findup-sync": "0.1.3", + "nopt": "1.0.10", + "resolve": "0.3.1" + } + }, + "grunt-contrib-clean": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-clean/-/grunt-contrib-clean-0.7.0.tgz", + "integrity": "sha1-EvynC79SW5GLc+XMsUUPQ762Kc0=", + "dev": true, + "requires": { + "rimraf": "2.2.8" + } + }, + "grunt-contrib-concat": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/grunt-contrib-concat/-/grunt-contrib-concat-0.5.1.tgz", + "integrity": "sha1-lTxu/f39LBB6uchQd/LUsk0xzUk=", + "dev": true, + "requires": { + "chalk": "0.5.1", + "source-map": "0.3.0" + }, + "dependencies": { + "source-map": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.3.0.tgz", + "integrity": "sha1-hYb7mloAXltQHiHNGLbyG0V60fk=", + "dev": true, + "requires": { + "amdefine": "1.0.1" + } + } + } + }, + "grunt-contrib-copy": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/grunt-contrib-copy/-/grunt-contrib-copy-0.8.2.tgz", + "integrity": "sha1-3zHJD/zECbyfr+ROwN0eQlmRb+o=", + "dev": true, + "requires": { + "chalk": "1.1.3", + "file-sync-cmp": "0.1.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, + "requires": { + "ansi-styles": "2.2.1", + "escape-string-regexp": "1.0.5", + "has-ansi": "2.0.0", + "strip-ansi": "3.0.1", + "supports-color": "2.0.0" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true + } + } + }, + "grunt-contrib-cssmin": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/grunt-contrib-cssmin/-/grunt-contrib-cssmin-0.14.0.tgz", + "integrity": "sha1-iLCpJTaWm7VmKBxcYexQYtgz87c=", + "dev": true, + "requires": { + "chalk": "1.1.3", + "clean-css": "3.4.28", + "maxmin": "1.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, + "requires": { + "ansi-styles": "2.2.1", + "escape-string-regexp": "1.0.5", + "has-ansi": "2.0.0", + "strip-ansi": "3.0.1", + "supports-color": "2.0.0" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true + } + } + }, + "grunt-contrib-jshint": { + "version": "0.11.3", + "resolved": "https://registry.npmjs.org/grunt-contrib-jshint/-/grunt-contrib-jshint-0.11.3.tgz", + "integrity": "sha1-gDaBgdzNVRGG5bg4XAEc7iTWQKA=", + "dev": true, + "requires": { + "hooker": "0.2.3", + "jshint": "2.8.0" + } + }, + "grunt-contrib-uglify": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/grunt-contrib-uglify/-/grunt-contrib-uglify-0.5.1.tgz", + "integrity": "sha1-FfCqXo6LpCGuqYCHnuUFvHErbN4=", + "dev": true, + "requires": { + "chalk": "0.5.1", + "lodash": "2.4.2", + "maxmin": "0.2.2", + "uglify-js": "2.8.29" + }, + "dependencies": { + "gzip-size": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-0.2.0.tgz", + "integrity": "sha1-46KhkSBf5W7jJvXCcUNd+uz7Phw=", + "dev": true, + "requires": { + "browserify-zlib": "0.1.4", + "concat-stream": "1.6.2" + } + }, + "lodash": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", + "dev": true + }, + "maxmin": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/maxmin/-/maxmin-0.2.2.tgz", + "integrity": "sha1-o2ztjMIuOrzRCM+3l6OktAJ1WT8=", + "dev": true, + "requires": { + "chalk": "0.5.1", + "figures": "1.7.0", + "gzip-size": "0.2.0", + "pretty-bytes": "0.1.2" + } + }, + "pretty-bytes": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-0.1.2.tgz", + "integrity": "sha1-zZApTVihyk6KXQ+5yCJZmIgazwA=", + "dev": true + } + } + }, + "grunt-htmlrefs": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/grunt-htmlrefs/-/grunt-htmlrefs-0.5.0.tgz", + "integrity": "sha1-GkYOxsiQS4gr7EO+FCWj94xedTs=", + "dev": true + }, + "grunt-legacy-log": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz", + "integrity": "sha1-7ClCboAwIa9ZAp+H0vnNczWgVTE=", + "dev": true, + "requires": { + "colors": "0.6.2", + "grunt-legacy-log-utils": "0.1.1", + "hooker": "0.2.3", + "lodash": "2.4.2", + "underscore.string": "2.3.3" + }, + "dependencies": { + "lodash": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", + "dev": true + }, + "underscore.string": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz", + "integrity": "sha1-ccCL9rQosRM/N+ePo6Icgvcymw0=", + "dev": true + } + } + }, + "grunt-legacy-log-utils": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz", + "integrity": "sha1-wHBrndkGThFvNvI/5OawSGcsD34=", + "dev": true, + "requires": { + "colors": "0.6.2", + "lodash": "2.4.2", + "underscore.string": "2.3.3" + }, + "dependencies": { + "lodash": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "integrity": "sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=", + "dev": true + }, + "underscore.string": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz", + "integrity": "sha1-ccCL9rQosRM/N+ePo6Icgvcymw0=", + "dev": true + } + } + }, + "grunt-legacy-util": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz", + "integrity": "sha1-kzJIhNv343qf98Am3/RR2UqeVUs=", + "dev": true, + "requires": { + "async": "0.1.22", + "exit": "0.1.2", + "getobject": "0.1.0", + "hooker": "0.2.3", + "lodash": "0.9.2", + "underscore.string": "2.2.1", + "which": "1.0.9" + } + }, + "grunt-regex-replace": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/grunt-regex-replace/-/grunt-regex-replace-0.2.10.tgz", + "integrity": "sha1-LsReo4PhaNnkOCLFxtwKPj/ZRrg=", + "dev": true + }, + "gzip-size": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-1.0.0.tgz", + "integrity": "sha1-Zs+LEBBHInuVus5uodoMF37Vwi8=", + "dev": true, + "requires": { + "browserify-zlib": "0.1.4", + "concat-stream": "1.6.2" + } + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + }, + "har-validator": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", + "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", + "requires": { + "ajv": "5.5.2", + "har-schema": "2.0.0" + } + }, + "has-ansi": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz", + "integrity": "sha1-hPJlqujA5qiKEtcCKJS3VoiUxi4=", + "dev": true, + "requires": { + "ansi-regex": "0.2.1" + } + }, + "hawk": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", + "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==", + "requires": { + "boom": "4.3.1", + "cryptiles": "3.1.2", + "hoek": "4.2.1", + "sntp": "2.1.0" + } + }, + "hoek": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz", + "integrity": "sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==" + }, + "hooker": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz", + "integrity": "sha1-uDT3I8xKJCqmWWNFnfbZhMXT2Vk=", + "dev": true + }, + "hosted-git-info": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.6.0.tgz", + "integrity": "sha512-lIbgIIQA3lz5XaB6vxakj6sDHADJiZadYEJB+FgA+C4nubM1NwcuvUr9EJPmnH1skZqpqUzWborWo8EIUi0Sdw==", + "dev": true + }, + "html-encoding-sniffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz", + "integrity": "sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw==", + "requires": { + "whatwg-encoding": "1.0.3" + } + }, + "htmlparser2": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.8.3.tgz", + "integrity": "sha1-mWwosZFRaovoZQGn15dX5ccMEGg=", + "dev": true, + "requires": { + "domelementtype": "1.3.0", + "domhandler": "2.3.0", + "domutils": "1.5.1", + "entities": "1.0.0", + "readable-stream": "1.1.14" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "dev": true + }, + "readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "dev": true, + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=", + "dev": true + } + } + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "1.0.0", + "jsprim": "1.4.1", + "sshpk": "1.14.1" + } + }, + "iconv-lite": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", + "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ==" + }, + "indent-string": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", + "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", + "dev": true, + "requires": { + "repeating": "2.0.1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true + }, + "is-builtin-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", + "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", + "dev": true, + "requires": { + "builtin-modules": "1.1.1" + } + }, + "is-finite": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz", + "integrity": "sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko=", + "dev": true, + "requires": { + "number-is-nan": "1.0.1" + } + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "jquery": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-2.2.4.tgz", + "integrity": "sha1-LInWiJterFIqfuoywUUhVZxsvwI=" + }, + "jquery-slimscroll": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/jquery-slimscroll/-/jquery-slimscroll-1.3.6.tgz", + "integrity": "sha1-NF2drhcHWS1yAoIQAoTfs29xDn0=", + "requires": { + "jquery": "2.2.4" + } + }, + "js-yaml": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", + "integrity": "sha1-olrmUJmZ6X3yeMZxnaEb0Gh3Q6g=", + "dev": true, + "requires": { + "argparse": "0.1.16", + "esprima": "1.0.4" + }, + "dependencies": { + "esprima": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", + "integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0=", + "dev": true + } + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "optional": true + }, + "jsdom": { + "version": "9.12.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-9.12.0.tgz", + "integrity": "sha1-6MVG//ywbADUgzyoRBD+1/igl9Q=", + "requires": { + "abab": "1.0.4", + "acorn": "4.0.13", + "acorn-globals": "3.1.0", + "array-equal": "1.0.0", + "content-type-parser": "1.0.2", + "cssom": "0.3.2", + "cssstyle": "0.2.37", + "escodegen": "1.9.1", + "html-encoding-sniffer": "1.0.2", + "nwmatcher": "1.4.4", + "parse5": "1.5.1", + "request": "2.85.0", + "sax": "1.2.4", + "symbol-tree": "3.2.2", + "tough-cookie": "2.3.4", + "webidl-conversions": "4.0.2", + "whatwg-encoding": "1.0.3", + "whatwg-url": "4.8.0", + "xml-name-validator": "2.0.1" + } + }, + "jshint": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/jshint/-/jshint-2.8.0.tgz", + "integrity": "sha1-HQmjvZE8TK36gb8Y1YK9hb/+DUQ=", + "dev": true, + "requires": { + "cli": "0.6.6", + "console-browserify": "1.1.0", + "exit": "0.1.2", + "htmlparser2": "3.8.3", + "lodash": "3.7.0", + "minimatch": "2.0.10", + "shelljs": "0.3.0", + "strip-json-comments": "1.0.4" + }, + "dependencies": { + "lodash": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.7.0.tgz", + "integrity": "sha1-Nni9irmVBXwHreg27S7wh9qBHUU=", + "dev": true + }, + "minimatch": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz", + "integrity": "sha1-jQh8OcazjAAbl/ynzm0OHoCvusc=", + "dev": true, + "requires": { + "brace-expansion": "1.1.11" + } + } + } + }, + "json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + }, + "json-schema-traverse": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", + "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + }, + "jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "requires": { + "is-buffer": "1.1.6" + } + }, + "lazy-cache": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", + "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=", + "dev": true + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "requires": { + "prelude-ls": "1.1.2", + "type-check": "0.3.2" + } + }, + "load-json-file": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "dev": true, + "requires": { + "graceful-fs": "4.1.11", + "parse-json": "2.2.0", + "pify": "2.3.0", + "pinkie-promise": "2.0.1", + "strip-bom": "2.0.0" + }, + "dependencies": { + "graceful-fs": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=", + "dev": true + } + } + }, + "lodash": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz", + "integrity": "sha1-jzSZxSRdNG1oLlsNO0B2fgnxqSw=", + "dev": true + }, + "longest": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", + "dev": true + }, + "loud-rejection": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", + "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=", + "dev": true, + "requires": { + "currently-unhandled": "0.4.1", + "signal-exit": "3.0.2" + } + }, + "lru-cache": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, + "map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", + "dev": true + }, + "maxmin": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/maxmin/-/maxmin-1.1.0.tgz", + "integrity": "sha1-cTZehKmd2Piz99X94vANHn9zvmE=", + "dev": true, + "requires": { + "chalk": "1.1.3", + "figures": "1.7.0", + "gzip-size": "1.0.0", + "pretty-bytes": "1.0.4" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, + "requires": { + "ansi-styles": "2.2.1", + "escape-string-regexp": "1.0.5", + "has-ansi": "2.0.0", + "strip-ansi": "3.0.1", + "supports-color": "2.0.0" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true + } + } + }, + "meow": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", + "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", + "dev": true, + "requires": { + "camelcase-keys": "2.1.0", + "decamelize": "1.2.0", + "loud-rejection": "1.6.0", + "map-obj": "1.0.1", + "minimist": "1.2.0", + "normalize-package-data": "2.4.0", + "object-assign": "4.1.1", + "read-pkg-up": "1.0.1", + "redent": "1.0.0", + "trim-newlines": "1.0.0" + } + }, + "mime-db": { + "version": "1.33.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", + "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==" + }, + "mime-types": { + "version": "2.1.18", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", + "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", + "requires": { + "mime-db": "1.33.0" + } + }, + "minimatch": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", + "integrity": "sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=", + "dev": true, + "requires": { + "lru-cache": "2.7.3", + "sigmund": "1.0.1" + } + }, + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "dev": true + }, + "moment": { + "version": "2.11.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.11.2.tgz", + "integrity": "sha1-h5aOX5WsA4wuQqyVnHWBnNP1KQE=" + }, + "moment-timezone": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.0.tgz", + "integrity": "sha1-8oLI5ebihhSy9t0QSGY3VNMOoR8=", + "requires": { + "moment": "2.11.2" + } + }, + "nopt": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", + "dev": true, + "requires": { + "abbrev": "1.1.1" + } + }, + "normalize-package-data": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", + "integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==", + "dev": true, + "requires": { + "hosted-git-info": "2.6.0", + "is-builtin-module": "1.0.0", + "semver": "5.5.0", + "validate-npm-package-license": "3.0.3" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true + }, + "nwmatcher": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/nwmatcher/-/nwmatcher-1.4.4.tgz", + "integrity": "sha512-3iuY4N5dhgMpCUrOVnuAdGrgxVqV2cJpM+XNccjR2DKOB1RUP0aA+wGXEiNziG/UKboFyGBIoKOaNlJxx8bciQ==" + }, + "oauth-sign": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=" + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "dev": true + }, + "optionator": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", + "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "requires": { + "deep-is": "0.1.3", + "fast-levenshtein": "2.0.6", + "levn": "0.3.0", + "prelude-ls": "1.1.2", + "type-check": "0.3.2", + "wordwrap": "1.0.0" + } + }, + "pako": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz", + "integrity": "sha1-8/dSL073gjSNqBYbrZ7P1Rv4OnU=", + "dev": true + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "1.3.1" + } + }, + "parse5": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-1.5.1.tgz", + "integrity": "sha1-m387DeMr543CQBsXVzzK8Pb1nZQ=" + }, + "path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, + "requires": { + "pinkie-promise": "2.0.1" + } + }, + "path-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "dev": true, + "requires": { + "graceful-fs": "4.1.11", + "pify": "2.3.0", + "pinkie-promise": "2.0.1" + }, + "dependencies": { + "graceful-fs": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", + "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=", + "dev": true + } + } + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + }, + "pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true + }, + "pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, + "requires": { + "pinkie": "2.0.4" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=" + }, + "pretty-bytes": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-1.0.4.tgz", + "integrity": "sha1-CiLoIQYJrTVUL4yNXSFZr/B1HIQ=", + "dev": true, + "requires": { + "get-stdin": "4.0.1", + "meow": "3.7.0" + } + }, + "process-nextick-args": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==", + "dev": true + }, + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + }, + "qs": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + }, + "read-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "dev": true, + "requires": { + "load-json-file": "1.1.0", + "normalize-package-data": "2.4.0", + "path-type": "1.1.0" + } + }, + "read-pkg-up": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "dev": true, + "requires": { + "find-up": "1.1.2", + "read-pkg": "1.1.0" + } + }, + "readable-stream": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.5.tgz", + "integrity": "sha512-tK0yDhrkygt/knjowCUiWP9YdV7c5R+8cR0r/kt9ZhBU906Fs6RpQJCEilamRJj1Nx2rWI6LkW9gKqjTkshhEw==", + "dev": true, + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "1.0.0", + "process-nextick-args": "2.0.0", + "safe-buffer": "5.1.1", + "string_decoder": "1.0.3", + "util-deprecate": "1.0.2" + } + }, + "redent": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", + "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=", + "dev": true, + "requires": { + "indent-string": "2.1.0", + "strip-indent": "1.0.1" + } + }, + "repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true + }, + "repeating": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", + "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=", + "dev": true, + "requires": { + "is-finite": "1.0.2" + } + }, + "request": { + "version": "2.85.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.85.0.tgz", + "integrity": "sha512-8H7Ehijd4js+s6wuVPLjwORxD4zeuyjYugprdOXlPSqaApmL/QOy+EB/beICHVCHkGMKNh5rvihb5ov+IDw4mg==", + "requires": { + "aws-sign2": "0.7.0", + "aws4": "1.6.0", + "caseless": "0.12.0", + "combined-stream": "1.0.6", + "extend": "3.0.1", + "forever-agent": "0.6.1", + "form-data": "2.3.2", + "har-validator": "5.0.3", + "hawk": "6.0.2", + "http-signature": "1.2.0", + "is-typedarray": "1.0.0", + "isstream": "0.1.2", + "json-stringify-safe": "5.0.1", + "mime-types": "2.1.18", + "oauth-sign": "0.8.2", + "performance-now": "2.1.0", + "qs": "6.5.1", + "safe-buffer": "5.1.1", + "stringstream": "0.0.5", + "tough-cookie": "2.3.4", + "tunnel-agent": "0.6.0", + "uuid": "3.2.1" + } + }, + "resolve": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.3.1.tgz", + "integrity": "sha1-NMY0R8ZkxwWY0cmxJvxDsqJDEKQ=", + "dev": true + }, + "right-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", + "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", + "dev": true, + "requires": { + "align-text": "0.1.4" + } + }, + "rimraf": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", + "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=", + "dev": true + }, + "safe-buffer": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" + }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, + "semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", + "dev": true + }, + "shelljs": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz", + "integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E=", + "dev": true + }, + "sigmund": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "dev": true + }, + "sntp": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", + "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==", + "requires": { + "hoek": "4.2.1" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true + }, + "spdx-correct": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.0.0.tgz", + "integrity": "sha512-N19o9z5cEyc8yQQPukRCZ9EUmb4HUpnrmaL/fxS2pBo2jbfcFRVuFZ/oFC+vZz0MNNk0h80iMn5/S6qGZOL5+g==", + "dev": true, + "requires": { + "spdx-expression-parse": "3.0.0", + "spdx-license-ids": "3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.1.0.tgz", + "integrity": "sha512-4K1NsmrlCU1JJgUrtgEeTVyfx8VaYea9J9LvARxhbHtVtohPs/gFGG5yy49beySjlIMhhXZ4QqujIZEfS4l6Cg==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "requires": { + "spdx-exceptions": "2.1.0", + "spdx-license-ids": "3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.0.tgz", + "integrity": "sha512-2+EPwgbnmOIl8HjGBXXMd9NAu02vLjOO1nWw4kmeRDFyHn+M/ETfHxQUK0oXg8ctgVnl9t3rosNVsZ1jG61nDA==", + "dev": true + }, + "sshpk": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.1.tgz", + "integrity": "sha1-Ew9Zde3a2WPx1W+SuaxsUfqfg+s=", + "requires": { + "asn1": "0.2.3", + "assert-plus": "1.0.0", + "bcrypt-pbkdf": "1.0.1", + "dashdash": "1.14.1", + "ecc-jsbn": "0.1.1", + "getpass": "0.1.7", + "jsbn": "0.1.1", + "tweetnacl": "0.14.5" + } + }, + "string_decoder": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz", + "integrity": "sha512-4AH6Z5fzNNBcH+6XDMfA/BTt87skxqJlO0lAh3Dker5zThcAxG6mKz+iGu308UKoPPQ8Dcqx/4JhujzltRa+hQ==", + "dev": true, + "requires": { + "safe-buffer": "5.1.1" + } + }, + "stringstream": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=" + }, + "strip-ansi": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz", + "integrity": "sha1-JfSOoiynkYfzF0pNuHWTR7sSYiA=", + "dev": true, + "requires": { + "ansi-regex": "0.2.1" + } + }, + "strip-bom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "dev": true, + "requires": { + "is-utf8": "0.2.1" + } + }, + "strip-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", + "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", + "dev": true, + "requires": { + "get-stdin": "4.0.1" + } + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "supports-color": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz", + "integrity": "sha1-2S3iaU6z9nMjlz1649i1W0wiGQo=", + "dev": true + }, + "symbol-tree": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.2.tgz", + "integrity": "sha1-rifbOPZgp64uHDt9G8KQgZuFGeY=" + }, + "tough-cookie": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", + "integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==", + "requires": { + "punycode": "1.4.1" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" + }, + "trim-newlines": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", + "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", + "dev": true + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "5.1.1" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "optional": true + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "requires": { + "prelude-ls": "1.1.2" + } + }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true + }, + "uglify-js": { + "version": "2.8.29", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", + "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", + "dev": true, + "requires": { + "source-map": "0.5.7", + "uglify-to-browserify": "1.0.2", + "yargs": "3.10.0" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true + } + } + }, + "uglify-to-browserify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", + "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", + "dev": true, + "optional": true + }, + "underscore": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=", + "dev": true + }, + "underscore.string": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz", + "integrity": "sha1-18D6KvXVoaZ/QlPa7pgTLnM/Dxk=", + "dev": true + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true + }, + "uuid": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz", + "integrity": "sha512-jZnMwlb9Iku/O3smGWvZhauCf6cvvpKi4BKRiliS3cxnI+Gz9j5MEpTz2UFuXiKPJocb7gnsLHwiS05ige5BEA==" + }, + "validate-npm-package-license": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.3.tgz", + "integrity": "sha512-63ZOUnL4SIXj4L0NixR3L1lcjO38crAbgrTpl28t8jjrfuiOBL5Iygm+60qPs/KsZGzPNg6Smnc/oY16QTjF0g==", + "dev": true, + "requires": { + "spdx-correct": "3.0.0", + "spdx-expression-parse": "3.0.0" + } + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "requires": { + "assert-plus": "1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "1.3.0" + } + }, + "webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==" + }, + "whatwg-encoding": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.3.tgz", + "integrity": "sha512-jLBwwKUhi8WtBfsMQlL4bUUcT8sMkAtQinscJAe/M4KHCkHuUJAF6vuB0tueNIw4c8ziO6AkRmgY+jL3a0iiPw==", + "requires": { + "iconv-lite": "0.4.19" + } + }, + "whatwg-url": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-4.8.0.tgz", + "integrity": "sha1-0pgaqRSMHgCkHFphMRZqtGg7vMA=", + "requires": { + "tr46": "0.0.3", + "webidl-conversions": "3.0.1" + }, + "dependencies": { + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" + } + } + }, + "which": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz", + "integrity": "sha1-RgwdoPgQED0DIam2M6+eV15kSG8=", + "dev": true + }, + "window-size": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", + "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=", + "dev": true + }, + "wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" + }, + "xml-name-validator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-2.0.1.tgz", + "integrity": "sha1-TYuPHszTQZqjYgYb7O9RXh5VljU=" + }, + "yargs": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", + "dev": true, + "requires": { + "camelcase": "1.2.1", + "cliui": "2.1.0", + "decamelize": "1.2.0", + "window-size": "0.1.0" + }, + "dependencies": { + "camelcase": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", + "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=", + "dev": true + } + } + }, + "zombiej-bootstrap-components": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/zombiej-bootstrap-components/-/zombiej-bootstrap-components-1.1.6.tgz", + "integrity": "sha1-nSqu2eJv/oBJ4Ddi4NJy8nK12tk=", + "requires": { + "bootstrap": "3.3.6", + "jquery": "2.2.4" + } + }, + "zombiej-nvd3": { + "version": "1.8.2-3", + "resolved": "https://registry.npmjs.org/zombiej-nvd3/-/zombiej-nvd3-1.8.2-3.tgz", + "integrity": "sha1-Dxs6bSDgjG+oQq28RTIuoe3rL7s=" + }, + "zrender": { + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/zrender/-/zrender-3.7.4.tgz", + "integrity": "sha512-5Nz7+L1wIoL0+Pp/iOP56jD6eD017qC9VRSgUBheXBiAHgOBJZ4uh4/g6e83acIwa8RKSyZf/FlceKu5ntUuxQ==" + } + } +} diff --git a/eagle-server/src/test/resources/configuration.yml b/eagle-server/src/test/resources/configuration.yml index b49da04ea8..40c7695386 100644 --- a/eagle-server/src/test/resources/configuration.yml +++ b/eagle-server/src/test/resources/configuration.yml @@ -21,6 +21,24 @@ server: - type: http port: 9091 + +logging: + level: INFO + loggers: + org.apache.eagle: INFO + appenders: + - type: file + currentLogFilename: log/eagle-server.log + threshold: ALL + archive: true + archivedLogFilenamePattern: log/eagle-server-%d.log + archivedFileCount: 5 + logFormat: "%-5p [%d{ISO8601,UTC}] [%t] %c: %m%n%rEx" + - type: console + threshold: INFO + logFormat: "%-5p [%d{ISO8601,UTC}] [%t] %c: %m%n%rEx" + + # --------------------------------------------- # Eagle Authentication Configuration # --------------------------------------------- diff --git a/pom.xml b/pom.xml index a6b614c32b..6e9da486fc 100755 --- a/pom.xml +++ b/pom.xml @@ -24,7 +24,7 @@ org.apache.eagle eagle-parent - 0.5.1-SNAPSHOT + 1.0.0-SNAPSHOT pom Eagle::Parent https://eagle.apache.org From e7030ccbb245d63bf70cd53877dbaeb482ae55ba Mon Sep 17 00:00:00 2001 From: yonzhang Date: Sat, 18 Apr 2020 15:09:29 -0700 Subject: [PATCH 2/6] EAGLE-1104, missed file --- .../src/main/resources/application.conf | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 eagle-examples/eagle-app-example/src/main/resources/application.conf diff --git a/eagle-examples/eagle-app-example/src/main/resources/application.conf b/eagle-examples/eagle-app-example/src/main/resources/application.conf new file mode 100644 index 0000000000..9d5c3c5669 --- /dev/null +++ b/eagle-examples/eagle-app-example/src/main/resources/application.conf @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{ + "appId" : "ExampleMonitorApp", + "mode" : "LOCAL", + "siteId" : "testsite", + "topology" : { + "numOfTotalWorkers" : 2, + "numOfSpoutTasks" : 2, + "numOfParserTasks" : 2, + "numOfJoinTasks" : 2, + "numOfSinkTasks" : 2 + }, + "dataSourceConfig": { + "topic" : "nonexistingtopic", + "zkConnection" : "127.0.0.1:2000", + "txZkServers" : "127.0.0.1:2000", + "schemeCls" : "storm.kafka.StringScheme" + }, + "eagleService": { + "host": "localhost", + "port": 9090 + "username": "admin", + "password": "secret" + }, + "dataSinkConfig": { + "topic" : "myexampletopic", + "brokerList" : "127.0.0.1:2000", + "serializerClass" : "kafka.serializer.StringEncoder", + "keySerializerClass" : "kafka.serializer.StringEncoder" + } +} From dacfe7bc9125c5ed89fe121595cecad0a0bddbcc Mon Sep 17 00:00:00 2001 From: yonzhang Date: Sat, 18 Apr 2020 17:43:48 -0700 Subject: [PATCH 3/6] EAGLE-1104, integrate flink and start from siddhi operator --- eagle-flink/pom.xml | 44 +++ .../eagle/flink/AlertDeduplication.java | 82 +++++ .../apache/eagle/flink/AlertDefinition.java | 106 ++++++ .../apache/eagle/flink/AlertPublishEvent.java | 184 ++++++++++ .../org/apache/eagle/flink/AlertSeverity.java | 21 ++ .../org/apache/eagle/flink/AlertSink.java | 17 + .../eagle/flink/AlertStreamCallback.java | 92 +++++ .../eagle/flink/AlertStreamCollector.java | 26 ++ .../apache/eagle/flink/AlertStreamEvent.java | 192 +++++++++++ .../org/apache/eagle/flink/Collector.java | 27 ++ .../eagle/flink/CompositePolicyHandler.java | 92 +++++ .../org/apache/eagle/flink/DateTimeUtil.java | 220 ++++++++++++ .../eagle/flink/EagleFlinkStreamApp.java | 24 ++ .../apache/eagle/flink/PartitionedEvent.java | 131 +++++++ .../eagle/flink/PolicyChangeListener.java | 24 ++ .../apache/eagle/flink/PolicyDefinition.java | 321 ++++++++++++++++++ .../eagle/flink/PolicyGroupEvaluator.java | 41 +++ .../eagle/flink/PolicyGroupEvaluatorImpl.java | 177 ++++++++++ .../eagle/flink/PolicyHandlerContext.java | 68 ++++ .../eagle/flink/PolicyStreamHandler.java | 25 ++ .../eagle/flink/PolicyStreamHandlers.java | 48 +++ .../org/apache/eagle/flink/SiddhiCEPOp.java | 14 + .../eagle/flink/SiddhiDefinitionAdapter.java | 172 ++++++++++ .../eagle/flink/SiddhiPolicyHandler.java | 115 +++++++ .../eagle/flink/SiddhiPolicyStateHandler.java | 59 ++++ .../org/apache/eagle/flink/StreamColumn.java | 235 +++++++++++++ .../org/apache/eagle/flink/StreamContext.java | 41 +++ .../apache/eagle/flink/StreamContextImpl.java | 41 +++ .../org/apache/eagle/flink/StreamCounter.java | 25 ++ .../apache/eagle/flink/StreamDefinition.java | 209 ++++++++++++ .../org/apache/eagle/flink/StreamEvent.java | 166 +++++++++ .../eagle/flink/StreamEventBuilder.java | 94 +++++ .../eagle/flink/StreamEventIterator.java | 105 ++++++ .../apache/eagle/flink/StreamEventSource.java | 46 +++ .../flink/StreamNotDefinedException.java | 38 +++ .../apache/eagle/flink/StreamPartition.java | 142 ++++++++ .../src/main/resources/log4j.properties | 19 ++ pom.xml | 1 + 38 files changed, 3484 insertions(+) create mode 100644 eagle-flink/pom.xml create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertDeduplication.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertDefinition.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertPublishEvent.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java create mode 100755 eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCollector.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java create mode 100755 eagle-flink/src/main/java/org/apache/eagle/flink/Collector.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/DateTimeUtil.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyDefinition.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluator.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyHandlerContext.java create mode 100755 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandler.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandlers.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java create mode 100755 eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyStateHandler.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamColumn.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamContext.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamCounter.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventBuilder.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamNotDefinedException.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/StreamPartition.java create mode 100644 eagle-flink/src/main/resources/log4j.properties diff --git a/eagle-flink/pom.xml b/eagle-flink/pom.xml new file mode 100644 index 0000000000..6c8a52fc15 --- /dev/null +++ b/eagle-flink/pom.xml @@ -0,0 +1,44 @@ + + + + eagle-parent + org.apache.eagle + 1.0.0-SNAPSHOT + + 4.0.0 + + eagle-flink + + 1.10.0 + 2.11 + + + + org.apache.flink + flink-streaming-java_${scala.binary.version} + ${flink.version} + + + com.fasterxml.jackson.core + jackson-databind + + + com.google.guava + guava + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + + org.wso2.siddhi + siddhi-core + + + \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertDeduplication.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertDeduplication.java new file mode 100644 index 0000000000..acb31ba3dc --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertDeduplication.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eagle.flink; + +import org.apache.commons.collections.ListUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import java.util.List; +import java.util.Objects; + +public class AlertDeduplication { + private String outputStreamId; + private String dedupIntervalMin; + private List dedupFields; + + public String getOutputStreamId() { + return outputStreamId; + } + + public void setOutputStreamId(String outputStreamId) { + this.outputStreamId = outputStreamId; + } + + public String getDedupIntervalMin() { + return dedupIntervalMin; + } + + public void setDedupIntervalMin(String dedupIntervalMin) { + this.dedupIntervalMin = dedupIntervalMin; + } + + public List getDedupFields() { + return dedupFields; + } + + public void setDedupFields(List dedupFields) { + this.dedupFields = dedupFields; + } + + @Override + public int hashCode() { + return new HashCodeBuilder() + .append(outputStreamId) + .append(dedupFields) + .append(dedupIntervalMin) + .build(); + } + + @Override + public boolean equals(Object that) { + if (that == this) { + return true; + } + if (!(that instanceof AlertDeduplication)) { + return false; + } + AlertDeduplication another = (AlertDeduplication) that; + if (ListUtils.isEqualList(another.dedupFields, this.dedupFields) + && Objects.equals(another.dedupIntervalMin, this.dedupIntervalMin) + && Objects.equals(another.outputStreamId, this.outputStreamId)) { + return true; + } + return false; + } + + +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertDefinition.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertDefinition.java new file mode 100644 index 0000000000..108b283877 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertDefinition.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import java.util.Objects; + +public class AlertDefinition { + private TemplateType templateType = TemplateType.TEXT; + private String subject; + private String body; + + private AlertSeverity severity; + private String category; + + public String getBody() { + return body; + } + + public void setBody(String templateResource) { + this.body = templateResource; + } + + public TemplateType getTemplateType() { + return templateType; + } + + public void setTemplateType(TemplateType type) { + this.templateType = type; + } + + public String getSubject() { + return subject; + } + + public void setSubject(String subject) { + this.subject = subject; + } + + public AlertSeverity getSeverity() { + return severity; + } + + public void setSeverity(AlertSeverity severity) { + this.severity = severity; + } + + public String getCategory() { + return category; + } + + public void setCategory(String category) { + this.category = category; + } + + public enum TemplateType { + TEXT, + // FILE, + // HTTP + } + + @Override + public int hashCode() { + return new HashCodeBuilder() + .append(templateType) + .append(this.body) + .append(this.category) + .append(this.severity) + .append(this.subject) + .build(); + } + + @Override + public boolean equals(Object that) { + if (that == this) { + return true; + } + if (!(that instanceof AlertDefinition)) { + return false; + } + AlertDefinition another = (AlertDefinition) that; + if (Objects.equals(another.templateType, this.templateType) + && Objects.equals(another.body, this.body) + && Objects.equals(another.category, this.category) + && Objects.equals(another.severity, this.severity) + && Objects.equals(another.subject, this.subject)) { + return true; + } + return false; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertPublishEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertPublishEvent.java new file mode 100644 index 0000000000..a4c48e9c6b --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertPublishEvent.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.eagle.flink; + +import com.google.common.base.Preconditions; + +import java.util.List; +import java.util.Map; + +/** + * Use as final rich alert event. + */ +public class AlertPublishEvent { + private String alertId; + private String siteId; + private List appIds; + private String policyId; + private String policyValue; + private long alertTimestamp; + private Map alertData; + private String alertSubject; + private String alertBody; + private String streamId; + private String createdBy; + private long createdTime; + + public static final String ALERT_ID_KEY = "alertId"; + public static final String SITE_ID_KEY = "siteId"; + public static final String APP_IDS_KEY = "appIds"; + public static final String POLICY_ID_KEY = "policyId"; + public static final String POLICY_VALUE_KEY = "policyValue"; + public static final String ALERT_CATEGORY = "category"; + public static final String ALERT_SEVERITY = "severity"; + public static final String ALERT_HOST = "host"; + + public String getAlertId() { + return alertId; + } + + public void setAlertId(String alertId) { + this.alertId = alertId; + } + + public List getAppIds() { + return appIds; + } + + public void setAppIds(List appIds) { + this.appIds = appIds; + } + + public String getPolicyValue() { + return policyValue; + } + + public void setPolicyValue(String policyValue) { + this.policyValue = policyValue; + } + + public long getAlertTimestamp() { + return alertTimestamp; + } + + public void setAlertTimestamp(long alertTimestamp) { + this.alertTimestamp = alertTimestamp; + } + + public String getSiteId() { + return siteId; + } + + public void setSiteId(String siteId) { + this.siteId = siteId; + } + + + public String getPolicyId() { + return policyId; + } + + public void setPolicyId(String policyId) { + this.policyId = policyId; + } + + public Map getAlertData() { + return alertData; + } + + public void setAlertData(Map alertData) { + this.alertData = alertData; + } + + public static AlertPublishEvent createAlertPublishEvent(AlertStreamEvent event) { + Preconditions.checkNotNull(event.getAlertId(), "alertId is not initialized before being published: " + event.toString()); + AlertPublishEvent alertEvent = new AlertPublishEvent(); + alertEvent.setAlertId(event.getAlertId()); + alertEvent.setPolicyId(event.getPolicyId()); + alertEvent.setAlertTimestamp(event.getCreatedTime()); + alertEvent.setStreamId(event.getStreamId()); + alertEvent.setCreatedBy(event.getCreatedBy()); + alertEvent.setCreatedTime(event.getCreatedTime()); + alertEvent.setAlertSubject(event.getSubject()); + alertEvent.setAlertBody(event.getBody()); + if (event.getContext() != null && !event.getContext().isEmpty()) { + if (event.getContext().containsKey(SITE_ID_KEY)) { + alertEvent.setSiteId(event.getContext().get(SITE_ID_KEY).toString()); + } + if (event.getContext().containsKey(POLICY_VALUE_KEY)) { + alertEvent.setPolicyValue(event.getContext().get(POLICY_VALUE_KEY).toString()); + } + if (event.getContext().containsKey(APP_IDS_KEY)) { + alertEvent.setAppIds((List) event.getContext().get(APP_IDS_KEY)); + } + } + alertEvent.setAlertData(event.getDataMap()); + return alertEvent; + } + + public String toString() { + return String.format("%s %s alertId=%s, siteId=%s, policyId=%s, alertData=%s", + DateTimeUtil.millisecondsToHumanDateWithSeconds(alertTimestamp), + DateTimeUtil.CURRENT_TIME_ZONE.getID(), + alertId, + siteId, + policyId, + alertData == null ? "" : alertData.toString()); + } + + public String getAlertSubject() { + return alertSubject; + } + + public void setAlertSubject(String alertSubject) { + this.alertSubject = alertSubject; + } + + public String getAlertBody() { + return alertBody; + } + + public void setAlertBody(String alertBody) { + this.alertBody = alertBody; + } + + public String getStreamId() { + return streamId; + } + + public void setStreamId(String streamId) { + this.streamId = streamId; + } + + public String getCreatedBy() { + return createdBy; + } + + public void setCreatedBy(String createdBy) { + this.createdBy = createdBy; + } + + public long getCreatedTime() { + return createdTime; + } + + public void setCreatedTime(long createdTime) { + this.createdTime = createdTime; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java new file mode 100644 index 0000000000..7d4769c228 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +public enum AlertSeverity { + UNKNOWN, OK, WARNING, CRITICAL, FATAL +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java new file mode 100644 index 0000000000..1e5b0186b1 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java @@ -0,0 +1,17 @@ +package org.apache.eagle.flink; + +import org.apache.flink.streaming.api.functions.sink.SinkFunction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class AlertSink implements SinkFunction { + + private static final long serialVersionUID = 1L; + + private static final Logger LOG = LoggerFactory.getLogger(AlertSink.class); + + @Override + public void invoke(AlertPublishEvent value, Context context) { + LOG.info(value.toString()); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java new file mode 100644 index 0000000000..9dd193f03f --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.wso2.siddhi.core.event.Event; +import org.wso2.siddhi.core.stream.output.StreamCallback; + +/** + * Created on 8/2/16. + */ +public class AlertStreamCallback extends StreamCallback { + + private static final Logger LOG = LoggerFactory.getLogger(AlertStreamCallback.class); + private final String outputStream; + private final Collector collector; + private final PolicyHandlerContext context; + private final StreamDefinition definition; + + private int currentIndex; + + public AlertStreamCallback(String outputStream, + StreamDefinition streamDefinition, + Collector collector, + PolicyHandlerContext context, + int currentIndex) { + this.outputStream = outputStream; + this.collector = collector; + this.context = context; + this.definition = streamDefinition; + this.currentIndex = currentIndex; + } + + /** + * Possibly more than one event will be triggered for alerting. + */ + @Override + public void receive(Event[] events) { + String policyName = context.getPolicyDefinition().getName(); + String siteId = context.getPolicyDefinition().getSiteId(); + CompositePolicyHandler handler = ((PolicyGroupEvaluatorImpl) context.getPolicyEvaluator()).getPolicyHandler(policyName); + if (LOG.isDebugEnabled()) { + LOG.debug("Generated {} alerts from policy '{}' in {}, index of definiton {} ", events.length, policyName, context.getPolicyEvaluatorId(), currentIndex); + } + for (Event e : events) { + AlertStreamEvent event = new AlertStreamEvent(); + event.setSiteId(siteId); + event.setTimestamp(e.getTimestamp()); + event.setData(e.getData()); + event.setStreamId(outputStream); + event.setPolicyId(context.getPolicyDefinition().getName()); + if (this.context.getPolicyEvaluator() != null) { + event.setCreatedBy(context.getPolicyEvaluator().getName()); + } + event.setCreatedTime(System.currentTimeMillis()); + event.setSchema(definition); + + if (LOG.isDebugEnabled()) { + LOG.debug("Generate new alert event: {}", event); + } + try { + if (handler == null) { + // extreme case: the handler is removed from the evaluator. Just emit. + if (LOG.isDebugEnabled()) { + LOG.debug(" handler not found when callback received event, directly emit. policy removed? "); + } + collector.emit(event); + } else { + handler.send(event, currentIndex + 1); + } + } catch (Exception ex) { + LOG.error(String.format("send event %s to index %d failed with exception. ", event, currentIndex), ex); + } + } + context.getPolicyCounter().incrBy(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "alert_count"), events.length); + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCollector.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCollector.java new file mode 100755 index 0000000000..2ea5c21b98 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCollector.java @@ -0,0 +1,26 @@ +package org.apache.eagle.flink; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +public interface AlertStreamCollector extends Collector { + /** + * No need to be thread-safe, but should be called on in synchronous like in Storm bolt execute method. + */ + void flush(); + + void close(); +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java new file mode 100644 index 0000000000..2a6c36057b --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java @@ -0,0 +1,192 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import java.util.*; + +/** + * streamId stands for alert type instead of source event streamId. + */ +public class AlertStreamEvent extends StreamEvent { + private static final long serialVersionUID = 2392131134670106397L; + + private String siteId; + private String alertId; + private String policyId; + private StreamDefinition schema; + private String createdBy; + private long createdTime; + private String category; + private AlertSeverity severity = AlertSeverity.WARNING; + private boolean duplicationChecked = false; + + // ---------------------- + // Lazy Alert Fields + // ---------------------- + + // Dynamical context like app related fields + private Map context; + // Alert content like subject and body + private String subject; + private String body; + + public AlertStreamEvent() { + } + + public AlertStreamEvent(AlertStreamEvent event) { + this.siteId = event.getSiteId(); + this.alertId = event.getAlertId(); + this.policyId = event.policyId; + this.schema = event.schema; + this.createdBy = event.createdBy; + this.createdTime = event.createdTime; + this.setTimestamp(event.getTimestamp()); + this.setData(new Object[event.data.length]); + System.arraycopy(event.data, 0, this.data, 0, event.data.length); + this.setStreamId(event.getStreamId()); + this.setMetaVersion(event.getMetaVersion()); + } + + public void setPolicyId(String policyId) { + this.policyId = policyId; + } + + public String getPolicyId() { + return policyId; + } + + @Override + public String toString() { + List dataStrings = new ArrayList<>(this.getData().length); + for (Object obj : this.getData()) { + if (obj != null) { + dataStrings.add(obj.toString()); + } else { + dataStrings.add(null); + } + } + + return String.format("Alert {site=%s, stream=%s,timestamp=%s,data=%s, policyId=%s, createdBy=%s, metaVersion=%s}", + this.getSiteId(), + this.getStreamId(), DateTimeUtil.millisecondsToHumanDateWithMilliseconds(this.getTimestamp()), + this.getDataMap(), this.getPolicyId(), this.getCreatedBy(), this.getMetaVersion()); + } + + public String getCreatedBy() { + return createdBy; + } + + public void setCreatedBy(String createdBy) { + this.createdBy = createdBy; + } + + public StreamDefinition getSchema() { + return schema; + } + + public void setSchema(StreamDefinition schema) { + this.schema = schema; + } + + public long getCreatedTime() { + return createdTime; + } + + public void setCreatedTime(long createdTime) { + this.createdTime = createdTime; + } + + public Map getDataMap() { + Map event = new HashMap<>(); + for (StreamColumn column : schema.getColumns()) { + Object obj = this.getData()[schema.getColumnIndex(column.getName())]; + if (obj == null) { + event.put(column.getName(), null); + continue; + } + event.put(column.getName(), obj); + } + return event; + } + + public Map getContext() { + return context; + } + + public void setContext(Map context) { + this.context = context; + } + + public String getAlertId() { + ensureAlertId(); + return alertId; + } + + public void ensureAlertId() { + if (this.alertId == null) { + this.alertId = UUID.randomUUID().toString(); + } + } + + public String getSubject() { + return subject; + } + + public void setSubject(String subject) { + this.subject = subject; + } + + public String getBody() { + return body; + } + + public void setBody(String body) { + this.body = body; + } + + public String getCategory() { + return category; + } + + public void setCategory(String category) { + this.category = category; + } + + public AlertSeverity getSeverity() { + return severity; + } + + public void setSeverity(AlertSeverity severity) { + this.severity = severity; + } + + public String getSiteId() { + return siteId; + } + + public void setSiteId(String siteId) { + this.siteId = siteId; + } + + public boolean isDuplicationChecked() { + return duplicationChecked; + } + + public void setDuplicationChecked(boolean duplicationChecked) { + this.duplicationChecked = duplicationChecked; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/Collector.java b/eagle-flink/src/main/java/org/apache/eagle/flink/Collector.java new file mode 100755 index 0000000000..294e25c980 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/Collector.java @@ -0,0 +1,27 @@ +package org.apache.eagle.flink; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@FunctionalInterface +public interface Collector { + /** + * Must make sure thread-safe. + * + * @param t + */ + void emit(T t); +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java b/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java new file mode 100644 index 0000000000..50ae079bd9 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * Created on 7/27/16. + */ +public class CompositePolicyHandler implements PolicyStreamHandler { + private static final Logger LOG = LoggerFactory.getLogger(CompositePolicyHandler.class); + + private PolicyStreamHandler policyHandler; + private PolicyStreamHandler stateHandler; + private List handlers = new ArrayList<>(); + + private Collector collector; + + private Map sds; + + public CompositePolicyHandler(Map sds) { + this.sds = sds; + } + + @Override + public void prepare(Collector collector, PolicyHandlerContext context) throws Exception { + this.collector = collector; + // TODO: create two handlers + policyHandler = PolicyStreamHandlers.createHandler(context.getPolicyDefinition().getDefinition(), sds); + policyHandler.prepare(collector, context); + handlers.add(policyHandler); + + if (context.getPolicyDefinition().getStateDefinition() != null) { + stateHandler = PolicyStreamHandlers.createStateHandler(context.getPolicyDefinition().getStateDefinition().type, sds); + stateHandler.prepare(collector, context); + handlers.add(stateHandler); + } + } + + @Override + public void send(StreamEvent event) throws Exception { + // policyHandler.send(event); + send(event, 0); + } + + // send event to index of stream handler + public void send(StreamEvent event, int idx) throws Exception { + if (handlers.size() > idx) { + handlers.get(idx).send(event); + } else if (event instanceof AlertStreamEvent) { + if (LOG.isDebugEnabled()) { + LOG.debug("Emit new alert event: {}", event); + } + collector.emit((AlertStreamEvent) event); // for alert stream events, emit if no handler found. + } else { + // nothing found. LOG, and throw exception + LOG.error("non-alert-stream-event {} send with index {}, but the handler is not found!", event, idx); + throw new Exception(String.format("event %s send with idx %d can not found expecting handler!", event, idx)); + } + } + + @Override + public void close() throws Exception { + for (PolicyStreamHandler handler : handlers) { + try { + handler.close(); + } catch (Exception e) { + LOG.error("close handler {} failed, continue to run.", handler); + } + } + } + +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/DateTimeUtil.java b/eagle-flink/src/main/java/org/apache/eagle/flink/DateTimeUtil.java new file mode 100644 index 0000000000..a25ffbaa63 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/DateTimeUtil.java @@ -0,0 +1,220 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.TimeZone; + +/** + * be aware that SimpleDateFormat instantiation is expensive, so if that's under a tight loop, probably we need + * a thread local SimpleDateFormat object. + */ +public class DateTimeUtil { + public static final long ONESECOND = 1L * 1000L; + public static final long ONEMINUTE = 1L * 60L * 1000L; + public static final long ONEHOUR = 1L * 60L * 60L * 1000L; + public static final long ONEDAY = 24L * 60L * 60L * 1000L; + public static TimeZone CURRENT_TIME_ZONE; + + static { + Config config = ConfigFactory.load(); + CURRENT_TIME_ZONE = TimeZone.getTimeZone((config.hasPath("service.timezone") + ? config.getString("service.timezone") : "UTC")); + } + + public static Date humanDateToDate(String date) throws ParseException { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + return sdf.parse(date); + } + + public static long getCurrentTimestamp() { + return System.currentTimeMillis(); + } + + public static String secondsToHumanDate(long seconds) { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date t = new Date(); + t.setTime(seconds * 1000); + return sdf.format(t); + } + + public static String secondsToHumanDate(long seconds, TimeZone timeZone) { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(timeZone); + Date t = new Date(); + t.setTime(seconds * 1000); + return sdf.format(t); + } + + public static String millisecondsToHumanDateWithMilliseconds(long milliseconds) { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date t = new Date(); + t.setTime(milliseconds); + return sdf.format(t); + } + + public static String millisecondsToHumanDateWithSeconds(long milliseconds) { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date t = new Date(); + t.setTime(milliseconds); + return sdf.format(t); + } + + public static String millisecondsToHumanDateWithSecondsAndTimezone(long milliseconds) { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date t = new Date(); + t.setTime(milliseconds); + return sdf.format(t); + } + + public static long humanDateToSeconds(String date) throws ParseException { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date d = sdf.parse(date); + return d.getTime() / 1000; + } + + public static long humanDateToSeconds(String date, TimeZone timeZone) throws ParseException { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(timeZone); + Date d = sdf.parse(date); + return d.getTime() / 1000; + } + + public static long humanDateToMilliseconds(String date) throws ParseException { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date d = sdf.parse(date); + return d.getTime(); + } + + public static long humanDateToMilliseconds(String date, TimeZone timeZone) throws ParseException { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS"); + sdf.setTimeZone(timeZone); + Date d = sdf.parse(date); + return d.getTime(); + } + + + public static long humanDateToMillisecondsWithoutException(String date) { + try { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date d = sdf.parse(date); + return d.getTime(); + } catch (ParseException ex) { + return 0L; + } + } + + public static long humanDateToSecondsWithoutException(String date) { + try { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date d = sdf.parse(date); + return (d.getTime() / 1000); + } catch (ParseException ex) { + return 0L; + } + } + + /** + * this could be accurate only when timezone is UTC + * for the timezones other than UTC, there is possibly issue, for example + * assume timezone is GMT+8 in China + * When user time is "2014-07-15 05:00:00", it will be converted to timestamp first, + * internally it would be "2014-07-14 21:00:00" in UTC timezone. When rounded down to day, the internal time would + * be changed to "2014-07-14 00:00:00", and that means the user time is "2014-07-14 08:00:00". + * But originally user wants to round it to "2014-07-15 00:00:00" + * + * @param timeInMillis the seconds elapsed since 1970-01-01 00:00:00 + */ + public static long roundDown(int field, long timeInMillis) { + switch (field) { + case Calendar.DAY_OF_MONTH: + case Calendar.DAY_OF_WEEK: + case Calendar.DAY_OF_YEAR: + return (timeInMillis - timeInMillis % (24 * 60 * 60 * 1000)); + case Calendar.HOUR: + return (timeInMillis - timeInMillis % (60 * 60 * 1000)); + case Calendar.MINUTE: + return (timeInMillis - timeInMillis % (60 * 1000)); + case Calendar.SECOND: + return (timeInMillis - timeInMillis % (1000)); + default: + return 0L; + } + } + + public static String getCalendarFieldName(int field) { + switch (field) { + case Calendar.DAY_OF_MONTH: + return "DAY_OF_MONTH"; + case Calendar.DAY_OF_WEEK: + return "DAY_OF_WEEK"; + case Calendar.DAY_OF_YEAR: + return "DAY_OF_YEAR"; + case Calendar.HOUR: + return "HOUR"; + case Calendar.MINUTE: + return "MINUTE"; + case Calendar.SECOND: + return "SECOND"; + default: + throw new IllegalArgumentException("Unknown field code: " + field); + } + } + + public static String format(long milliseconds, String format) { + SimpleDateFormat sdf = new SimpleDateFormat(format); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date t = new Date(); + t.setTime(milliseconds); + return sdf.format(t); + } + + //For mapr + //exp: 2015-06-06T10:44:22.800Z + public static long maprhumanDateToMilliseconds(String date) throws ParseException { + date = date.replace('T', ' '); + date = date.replace('Z', ' '); + date = date.replace('.', ','); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS "); + sdf.setTimeZone(CURRENT_TIME_ZONE); + Date d = sdf.parse(date); + return d.getTime(); + } + + public static long parseTimeStrToMilliseconds(String timeStr) throws ParseException { + try { + return Long.valueOf(timeStr); + } catch (Exception ex) { + return humanDateToSeconds(timeStr) * ONESECOND; + } + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java b/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java new file mode 100644 index 0000000000..76a317f393 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java @@ -0,0 +1,24 @@ +package org.apache.eagle.flink; + +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; + +public class EagleFlinkStreamApp { + public static void main(String[] args) throws Exception { + StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + + DataStream events = env + .addSource(new StreamEventSource()) + .name("eagle-events"); + + DataStream alerts = events + .keyBy(StreamEvent::getKey) + .process(new SiddhiCEPOp()) + .name("eagle-alert-engine"); + + alerts.addSink(new AlertSink()) + .name("eagle-alert-publisher"); + + env.execute("Eagle Alert Engine"); + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java new file mode 100644 index 0000000000..fd6d736bf7 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java @@ -0,0 +1,131 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import java.io.Serializable; +import java.util.Objects; + +/** + * This is a critical data structure across spout, router bolt and alert bolt + * partition[StreamPartition] defines how one incoming data stream is partitioned, sorted + * partitionKey[long] is java hash value of groupby fields. The groupby fields are defined in StreamPartition + * event[StreamEvent] is actual data. + */ +public class PartitionedEvent implements Serializable { + private static final long serialVersionUID = -3840016190614238593L; + private StreamPartition partition; + private long partitionKey; + private StreamEvent event; + + public PartitionedEvent() { + this.event = null; + this.partition = null; + this.partitionKey = 0L; + } + + public PartitionedEvent(StreamEvent event, StreamPartition partition, int partitionKey) { + this.event = event; + this.partition = partition; + this.partitionKey = partitionKey; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null) { + return false; + } + if (obj instanceof PartitionedEvent) { + PartitionedEvent another = (PartitionedEvent) obj; + return !(this.partitionKey != another.getPartitionKey() + || !Objects.equals(this.event, another.getEvent()) + || !Objects.equals(this.partition, another.getPartition())); + } else { + return false; + } + } + + @Override + public int hashCode() { + return new HashCodeBuilder() + .append(partitionKey) + .append(event) + .append(partition) + .build(); + } + + public StreamEvent getEvent() { + return event; + } + + public void setEvent(StreamEvent event) { + this.event = event; + } + + public StreamPartition getPartition() { + return partition; + } + + public void setPartition(StreamPartition partition) { + this.partition = partition; + } + + public void setPartitionKey(long partitionKey) { + this.partitionKey = partitionKey; + } + + public long getPartitionKey() { + return this.partitionKey; + } + + public String toString() { + return String.format("PartitionedEvent[partition=%s,event=%s,key=%s", partition, event, partitionKey); + } + + public long getTimestamp() { + return (event != null) ? event.getTimestamp() : 0L; + } + + public String getStreamId() { + return (event != null) ? event.getStreamId() : null; + } + + public Object[] getData() { + return event != null ? event.getData() : null; + } + + public boolean isSortRequired() { + return isPartitionRequired() && this.getPartition().getSortSpec() != null; + } + + public boolean isPartitionRequired() { + return this.getPartition() != null; + } + + public PartitionedEvent copy() { + PartitionedEvent copied = new PartitionedEvent(); + copied.setEvent(this.getEvent()); + copied.setPartition(this.partition); + copied.setPartitionKey(this.partitionKey); + return copied; + } + +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java new file mode 100644 index 0000000000..11b2ae803b --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import java.util.Collection; +import java.util.List; + +public interface PolicyChangeListener { + void onPolicyChange(List allPolicies, Collection addedPolicies, Collection removedPolicies, Collection modifiedPolicies); +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyDefinition.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyDefinition.java new file mode 100644 index 0000000000..55346618cd --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyDefinition.java @@ -0,0 +1,321 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.ListUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import java.io.Serializable; +import java.util.*; + +/** + * @since Apr 5, 2016. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class PolicyDefinition implements Serializable { + private static final long serialVersionUID = 377581499339572414L; + // unique identifier + private String name; + private String description; + private List inputStreams = new ArrayList<>(); + private List outputStreams = new ArrayList<>(); + private String siteId = "default"; + + private Definition definition; + private Definition stateDefinition; + private PolicyStatus policyStatus = PolicyStatus.ENABLED; + private AlertDefinition alertDefinition; + private List alertDeduplications = new ArrayList<>(); + + // one stream only have one partition in one policy, since we don't support stream alias + private List partitionSpec = new ArrayList(); + private boolean dedicated; + + // runtime configuration for policy, these are user-invisible + private int parallelismHint = 1; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public List getInputStreams() { + return inputStreams; + } + + public void setInputStreams(List inputStreams) { + this.inputStreams = inputStreams; + } + + public List getOutputStreams() { + return outputStreams; + } + + public void setOutputStreams(List outputStreams) { + this.outputStreams = outputStreams; + } + + public Definition getDefinition() { + return definition; + } + + public Definition getStateDefinition() { + return stateDefinition; + } + + public void setStateDefinition(Definition stateDefinition) { + this.stateDefinition = stateDefinition; + } + + public void setDefinition(Definition definition) { + this.definition = definition; + } + + public List getPartitionSpec() { + return partitionSpec; + } + + public void setPartitionSpec(List partitionSpec) { + this.partitionSpec = partitionSpec; + } + + public void addPartition(StreamPartition par) { + this.partitionSpec.add(par); + } + + public boolean isDedicated() { + return dedicated; + } + + public void setDedicated(boolean dedicated) { + this.dedicated = dedicated; + } + + public int getParallelismHint() { + return parallelismHint; + } + + public void setParallelismHint(int parallelism) { + this.parallelismHint = parallelism; + } + + public PolicyStatus getPolicyStatus() { + return policyStatus; + } + + public void setPolicyStatus(PolicyStatus policyStatus) { + this.policyStatus = policyStatus; + } + + public List getAlertDeduplications() { + return alertDeduplications; + } + + public void setAlertDeduplications(List alertDeduplications) { + this.alertDeduplications = alertDeduplications; + } + + public AlertDefinition getAlertDefinition() { + return alertDefinition; + } + + public void setAlertDefinition(AlertDefinition alertDefinition) { + this.alertDefinition = alertDefinition; + } + + public AlertSeverity getAlertSeverity() { + return alertDefinition == null ? null : alertDefinition.getSeverity(); + } + + public String getAlertCategory() { + return alertDefinition == null ? null : alertDefinition.getCategory(); + } + + public String getSiteId() { + return siteId; + } + + public void setSiteId(String siteId) { + this.siteId = siteId; + } + + @Override + public int hashCode() { + return new HashCodeBuilder() + .append(siteId) + .append(name) + .append(inputStreams) + .append(outputStreams) + .append(definition) + .append(partitionSpec) + .append(policyStatus) + .append(parallelismHint) + .append(alertDefinition) + .append(alertDeduplications) + .build(); + } + + @Override + public boolean equals(Object that) { + if (that == this) { + return true; + } + + if (!(that instanceof PolicyDefinition)) { + return false; + } + + PolicyDefinition another = (PolicyDefinition) that; + + if (Objects.equals(another.siteId, this.siteId) + && Objects.equals(another.name, this.name) + && Objects.equals(another.description, this.description) + && CollectionUtils.isEqualCollection(another.inputStreams, this.inputStreams) + && CollectionUtils.isEqualCollection(another.outputStreams, this.outputStreams) + && (another.definition != null && another.definition.equals(this.definition)) + && Objects.equals(this.definition, another.definition) + && CollectionUtils.isEqualCollection(another.partitionSpec, this.partitionSpec) + && another.policyStatus.equals(this.policyStatus) + && another.parallelismHint == this.parallelismHint + && Objects.equals(another.alertDefinition, alertDefinition) + && CollectionUtils.isEqualCollection(another.alertDeduplications, alertDeduplications)) { + return true; + } + return false; + } + + @JsonIgnoreProperties(ignoreUnknown = true) + public static class Definition implements Serializable { + private static final long serialVersionUID = -622366527887848346L; + + public String type; + public String value; + public String handlerClass; + public Map properties = new HashMap<>(); + + private List inputStreams = new ArrayList(); + private List outputStreams = new ArrayList(); + + public Definition(String type, String value) { + this.type = type; + this.value = value; + } + + public Definition() { + this.type = null; + this.value = null; + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(type).append(value).build(); + } + + @Override + public boolean equals(Object that) { + if (that == this) { + return true; + } + if (!(that instanceof Definition)) { + return false; + } + Definition another = (Definition) that; + if (another.type.equals(this.type) + && another.value.equals(this.value) + && ListUtils.isEqualList(another.inputStreams, this.inputStreams) + && ListUtils.isEqualList(another.outputStreams, this.outputStreams)) { + return true; + } + return false; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public void setInputStreams(List inputStreams) { + this.inputStreams = inputStreams; + } + + public void setOutputStreams(List outputStreams) { + this.outputStreams = outputStreams; + } + + public List getInputStreams() { + return inputStreams; + } + + public List getOutputStreams() { + return outputStreams; + } + + public String getHandlerClass() { + return handlerClass; + } + + public void setHandlerClass(String handlerClass) { + this.handlerClass = handlerClass; + } + + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + @Override + public String toString() { + return String.format("{type=\"%s\",value=\"%s\", inputStreams=\"%s\", outputStreams=\"%s\" }", type, value, inputStreams, outputStreams); + } + } + + public static enum PolicyStatus { + ENABLED, DISABLED + } + + @Override + public String toString() { + return String.format("{site=\"%s\", name=\"%s\",definition=%s}", this.getSiteId(), this.getName(), this.getDefinition() == null ? "null" : this.getDefinition().toString()); + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluator.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluator.java new file mode 100644 index 0000000000..842ea83e61 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluator.java @@ -0,0 +1,41 @@ +package org.apache.eagle.flink; + +import java.io.Serializable; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * policy group refers to the policies which belong to the same MonitoredStream + * 3 lifecycle steps are involved in PolicyGroupEvaluator + * Step 1: create object. Be aware that in distributed environment, this object may be serialized and transferred across network + * Step 2: init. This normally is invoked only once before nextEvent is invoked + * Step 3: nextEvent + * Step 4: close + */ +public interface PolicyGroupEvaluator extends PolicyChangeListener, Serializable { + void init(StreamContext context, AlertStreamCollector collector); + + /** + * Evaluate event. + */ + void nextEvent(PartitionedEvent event); + + String getName(); + + void close(); +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java new file mode 100644 index 0000000000..66bbb3ab27 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java @@ -0,0 +1,177 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class PolicyGroupEvaluatorImpl implements PolicyGroupEvaluator { + private static final long serialVersionUID = -5499413193675985288L; + + private static final Logger LOG = LoggerFactory.getLogger(PolicyGroupEvaluatorImpl.class); + + private AlertStreamCollector collector; + // mapping from policy name to PolicyDefinition + private volatile Map policyDefinitionMap = new HashMap<>(); + // mapping from policy name to PolicyStreamHandler + private volatile Map policyStreamHandlerMap = new HashMap<>(); + private String policyEvaluatorId; + private StreamContext context; + + public PolicyGroupEvaluatorImpl(String policyEvaluatorId) { + this.policyEvaluatorId = policyEvaluatorId; + } + + public void init(StreamContext context, AlertStreamCollector collector) { + this.collector = collector; + this.policyStreamHandlerMap = new HashMap<>(); + this.context = context; + Thread.currentThread().setName(policyEvaluatorId); + } + + public void nextEvent(PartitionedEvent event) { + this.context.counter().incr("receive_count"); + dispatch(event); + } + + @Override + public String getName() { + return this.policyEvaluatorId; + } + + public void close() { + for (PolicyStreamHandler handler : policyStreamHandlerMap.values()) { + try { + handler.close(); + } catch (Exception e) { + LOG.error("Failed to close handler {}", handler.toString(), e); + } + } + } + + /** + * fixme make selection of PolicyStreamHandler to be more efficient. + * + * @param partitionedEvent PartitionedEvent + */ + private void dispatch(PartitionedEvent partitionedEvent) { + boolean handled = false; + for (Map.Entry policyStreamHandler : policyStreamHandlerMap.entrySet()) { + if (isAcceptedByPolicy(partitionedEvent, policyDefinitionMap.get(policyStreamHandler.getKey()))) { + try { + handled = true; + this.context.counter().incr("eval_count"); + policyStreamHandler.getValue().send(partitionedEvent.getEvent()); + } catch (Exception e) { + this.context.counter().incr("fail_count"); + LOG.error("{} failed to handle {}", policyStreamHandler.getValue(), partitionedEvent.getEvent(), e); + } + } + } + if (!handled) { + this.context.counter().incr("drop_count"); + LOG.warn("Drop stream non-matched event {}, which should not be sent to evaluator", partitionedEvent); + } else { + this.context.counter().incr("accept_count"); + } + } + + private static boolean isAcceptedByPolicy(PartitionedEvent event, PolicyDefinition policy) { + return policy.getPartitionSpec().contains(event.getPartition()) + && (policy.getInputStreams().contains(event.getEvent().getStreamId()) + || policy.getDefinition().getInputStreams().contains(event.getEvent().getStreamId())); + } + + @Override + public void onPolicyChange(List allPolicies, Collection addedPolicies, Collection removedPolicies, Collection modifiedPolicies) { + + } + +// @Override +// public void onPolicyChange(String version, List added, List removed, List modified, Map sds) { +// Map copyPolicies = new HashMap<>(policyDefinitionMap); +// Map copyHandlers = new HashMap<>(policyStreamHandlerMap); +// for (PolicyDefinition pd : added) { +// inplaceAdd(copyPolicies, copyHandlers, pd, sds); +// } +// for (PolicyDefinition pd : removed) { +// inplaceRemove(copyPolicies, copyHandlers, pd); +// } +// for (PolicyDefinition pd : modified) { +// inplaceRemove(copyPolicies, copyHandlers, pd); +// inplaceAdd(copyPolicies, copyHandlers, pd, sds); +// } +// +// // logging +// LOG.info("{} with {} Policy metadata updated with added={}, removed={}, modified={}", policyEvaluatorId, version, added, removed, modified); +// +// // switch reference +// this.policyDefinitionMap = copyPolicies; +// this.policyStreamHandlerMap = copyHandlers; +// } + + private void inplaceAdd(Map policies, Map handlers, PolicyDefinition policy, Map sds) { + if (handlers.containsKey(policy.getName())) { + LOG.error("metadata calculation error, try to add existing PolicyDefinition " + policy); + } else { + policies.put(policy.getName(), policy); + CompositePolicyHandler handler = new CompositePolicyHandler(sds); + try { + PolicyHandlerContext handlerContext = new PolicyHandlerContext(); + handlerContext.setPolicyCounter(this.context.counter()); + handlerContext.setPolicyDefinition(policy); + handlerContext.setPolicyEvaluator(this); + handlerContext.setPolicyEvaluatorId(policyEvaluatorId); + handlerContext.setConfig(this.context.config()); + handler.prepare(collector, handlerContext); + handlers.put(policy.getName(), handler); + } catch (Exception e) { + LOG.error(e.getMessage(), e); + policies.remove(policy.getName()); + handlers.remove(policy.getName()); + } + } + } + + private void inplaceRemove(Map policies, Map handlers, PolicyDefinition policy) { + if (handlers.containsKey(policy.getName())) { + PolicyStreamHandler handler = handlers.get(policy.getName()); + try { + handler.close(); + } catch (Exception e) { + LOG.error("Failed to close handler {}", handler, e); + } finally { + policies.remove(policy.getName()); + handlers.remove(policy.getName()); + LOG.info("Removed policy: {}", policy); + } + } else { + LOG.error("metadata calculation error, try to remove nonexisting PolicyDefinition: " + policy); + } + } + + + public CompositePolicyHandler getPolicyHandler(String policy) { + return policyStreamHandlerMap.get(policy); + } + +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyHandlerContext.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyHandlerContext.java new file mode 100644 index 0000000000..fc53069e17 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyHandlerContext.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eagle.flink; + +import com.typesafe.config.Config; + +public class PolicyHandlerContext { + private PolicyDefinition policyDefinition; + private PolicyGroupEvaluator policyEvaluator; + private StreamCounter policyCounter; + private String policyEvaluatorId; + private Config config; + + public PolicyDefinition getPolicyDefinition() { + return policyDefinition; + } + + public void setPolicyDefinition(PolicyDefinition policyDefinition) { + this.policyDefinition = policyDefinition; + } + + public PolicyGroupEvaluator getPolicyEvaluator() { + return policyEvaluator; + } + + public void setPolicyEvaluator(PolicyGroupEvaluator policyEvaluator) { + this.policyEvaluator = policyEvaluator; + } + + public void setPolicyCounter(StreamCounter metric) { + this.policyCounter = metric; + } + + public StreamCounter getPolicyCounter() { + return policyCounter; + } + + public String getPolicyEvaluatorId() { + return policyEvaluatorId; + } + + public void setPolicyEvaluatorId(String policyEvaluatorId) { + this.policyEvaluatorId = policyEvaluatorId; + } + + public Config getConfig() { + return config; + } + + public void setConfig(Config config) { + this.config = config; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandler.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandler.java new file mode 100755 index 0000000000..0c97df44e9 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandler.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +public interface PolicyStreamHandler { + void prepare(Collector collector, PolicyHandlerContext context) throws Exception; + + void send(StreamEvent event) throws Exception; + + void close() throws Exception; +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandlers.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandlers.java new file mode 100644 index 0000000000..7b1be47fb9 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyStreamHandlers.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +/** + * TODO/FIXME: to support multiple stage definition in single policy. The methods in this class is not good to understand now.(Hard code of 0/1). + */ +public class PolicyStreamHandlers { + private static final Logger LOG = LoggerFactory.getLogger(PolicyStreamHandlers.class); + + public static final String SIDDHI_ENGINE = "siddhi"; + public static final String NO_DATA_ALERT_ENGINE = "nodataalert"; + public static final String ABSENCE_ALERT_ENGINE = "absencealert"; + public static final String CUSTOMIZED_ENGINE = "Custom"; + + public static PolicyStreamHandler createHandler(PolicyDefinition.Definition definition, Map sds) { + if (SIDDHI_ENGINE.equals(definition.getType())) { + return new SiddhiPolicyHandler(sds, 0);// // FIXME: 8/2/16 + } + throw new IllegalArgumentException("Illegal policy stream handler type " + definition.getType()); + } + + public static PolicyStreamHandler createStateHandler(String type, Map sds) { + if (SIDDHI_ENGINE.equals(type)) { + return new SiddhiPolicyStateHandler(sds, 1); //// FIXME: 8/2/16 + } + throw new IllegalArgumentException("Illegal policy state handler type " + type); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java new file mode 100644 index 0000000000..9ddda847b5 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java @@ -0,0 +1,14 @@ +package org.apache.eagle.flink; + +import org.apache.flink.streaming.api.functions.KeyedProcessFunction; +import org.apache.flink.util.Collector; + +public class SiddhiCEPOp extends KeyedProcessFunction { + @Override + public void processElement(StreamEvent value, Context ctx, Collector out) throws Exception { + if(value.data[0].equals(100)) { + AlertPublishEvent event = new AlertPublishEvent(); + out.collect(event); + } + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java new file mode 100644 index 0000000000..b26eefe7b9 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java @@ -0,0 +1,172 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import com.google.common.base.Preconditions; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.wso2.siddhi.query.api.definition.AbstractDefinition; +import org.wso2.siddhi.query.api.definition.Attribute; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class SiddhiDefinitionAdapter { + private static final Logger LOG = LoggerFactory.getLogger(SiddhiDefinitionAdapter.class); + public static final String DEFINE_STREAM_TEMPLATE = "define stream %s ( %s );"; + + public static String buildStreamDefinition(StreamDefinition streamDefinition) { + List columns = new ArrayList<>(); + Preconditions.checkNotNull(streamDefinition, "StreamDefinition is null"); + if (streamDefinition.getColumns() != null) { + for (StreamColumn column : streamDefinition.getColumns()) { + columns.add(String.format("%s %s", column.getName(), convertToSiddhiAttributeType(column.getType()).toString().toLowerCase())); + } + } else { + LOG.warn("No columns found for stream {}" + streamDefinition.getStreamId()); + } + return String.format(DEFINE_STREAM_TEMPLATE, streamDefinition.getStreamId(), StringUtils.join(columns, ",")); + } + + public static Attribute.Type convertToSiddhiAttributeType(StreamColumn.Type type) { + if (_EAGLE_SIDDHI_TYPE_MAPPING.containsKey(type)) { + return _EAGLE_SIDDHI_TYPE_MAPPING.get(type); + } + + throw new IllegalArgumentException("Unknown stream type: " + type); + } + + public static Class convertToJavaAttributeType(StreamColumn.Type type) { + if (_EAGLE_JAVA_TYPE_MAPPING.containsKey(type)) { + return _EAGLE_JAVA_TYPE_MAPPING.get(type); + } + + throw new IllegalArgumentException("Unknown stream type: " + type); + } + + public static StreamColumn.Type convertFromJavaAttributeType(Class type) { + if (_JAVA_EAGLE_TYPE_MAPPING.containsKey(type)) { + return _JAVA_EAGLE_TYPE_MAPPING.get(type); + } + + throw new IllegalArgumentException("Unknown stream type: " + type); + } + + public static StreamColumn.Type convertFromSiddhiAttributeType(Attribute.Type type) { + if (_SIDDHI_EAGLE_TYPE_MAPPING.containsKey(type)) { + return _SIDDHI_EAGLE_TYPE_MAPPING.get(type); + } + + throw new IllegalArgumentException("Unknown siddhi type: " + type); + } + + public static String buildSiddhiExecutionPlan(PolicyDefinition policyDefinition, Map sds) { + StringBuilder builder = new StringBuilder(); + PolicyDefinition.Definition coreDefinition = policyDefinition.getDefinition(); + // init if not present + List inputStreams = coreDefinition.getInputStreams(); + if (inputStreams == null || inputStreams.isEmpty()) { + inputStreams = policyDefinition.getInputStreams(); + } + + for (String inputStream : inputStreams) { + builder.append(SiddhiDefinitionAdapter.buildStreamDefinition(sds.get(inputStream))); + builder.append("\n"); + } + builder.append(coreDefinition.value); + if (LOG.isDebugEnabled()) { + LOG.debug("Generated siddhi execution plan: {} from definition: {}", builder.toString(), coreDefinition); + } + return builder.toString(); + } + + public static String buildSiddhiExecutionPlan(String policyDefinition, Map inputStreamDefinitions) { + StringBuilder builder = new StringBuilder(); + for (Map.Entry entry: inputStreamDefinitions.entrySet()) { + builder.append(SiddhiDefinitionAdapter.buildStreamDefinition(entry.getValue())); + builder.append("\n"); + } + builder.append(policyDefinition); + if (LOG.isDebugEnabled()) { + LOG.debug("Generated siddhi execution plan: {}", builder.toString()); + } + return builder.toString(); + } + + /** + * public enum Type { + * STRING, INT, LONG, FLOAT, DOUBLE, BOOL, OBJECT + * }. + */ + private static final Map _EAGLE_SIDDHI_TYPE_MAPPING = new HashMap<>(); + private static final Map> _EAGLE_JAVA_TYPE_MAPPING = new HashMap<>(); + private static final Map, StreamColumn.Type> _JAVA_EAGLE_TYPE_MAPPING = new HashMap<>(); + private static final Map _SIDDHI_EAGLE_TYPE_MAPPING = new HashMap<>(); + + static { + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.STRING, Attribute.Type.STRING); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.INT, Attribute.Type.INT); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.LONG, Attribute.Type.LONG); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.FLOAT, Attribute.Type.FLOAT); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.DOUBLE, Attribute.Type.DOUBLE); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.BOOL, Attribute.Type.BOOL); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.OBJECT, Attribute.Type.OBJECT); + + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.STRING, String.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.INT, Integer.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.LONG, Long.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.FLOAT, Float.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.DOUBLE, Double.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.BOOL, Boolean.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.OBJECT, Object.class); + + _JAVA_EAGLE_TYPE_MAPPING.put(String.class, StreamColumn.Type.STRING); + _JAVA_EAGLE_TYPE_MAPPING.put(Integer.class, StreamColumn.Type.INT); + _JAVA_EAGLE_TYPE_MAPPING.put(Long.class, StreamColumn.Type.LONG); + _JAVA_EAGLE_TYPE_MAPPING.put(Float.class, StreamColumn.Type.FLOAT); + _JAVA_EAGLE_TYPE_MAPPING.put(Double.class, StreamColumn.Type.DOUBLE); + _JAVA_EAGLE_TYPE_MAPPING.put(Boolean.class, StreamColumn.Type.BOOL); + _JAVA_EAGLE_TYPE_MAPPING.put(Object.class, StreamColumn.Type.OBJECT); + + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.STRING, StreamColumn.Type.STRING); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.INT, StreamColumn.Type.INT); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.LONG, StreamColumn.Type.LONG); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.FLOAT, StreamColumn.Type.FLOAT); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.DOUBLE, StreamColumn.Type.DOUBLE); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.BOOL, StreamColumn.Type.BOOL); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.OBJECT, StreamColumn.Type.OBJECT); + } + + public static StreamDefinition convertFromSiddiDefinition(AbstractDefinition siddhiDefinition) { + StreamDefinition streamDefinition = new StreamDefinition(); + streamDefinition.setStreamId(siddhiDefinition.getId()); + List columns = new ArrayList<>(siddhiDefinition.getAttributeNameArray().length); + for (Attribute attribute : siddhiDefinition.getAttributeList()) { + StreamColumn column = new StreamColumn(); + column.setType(convertFromSiddhiAttributeType(attribute.getType())); + column.setName(attribute.getName()); + columns.add(column); + } + streamDefinition.setColumns(columns); + streamDefinition.setTimeseries(true); + streamDefinition.setDescription("Auto-generated stream schema from siddhi for " + siddhiDefinition.getId()); + return streamDefinition; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java new file mode 100755 index 0000000000..f74148de0a --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java @@ -0,0 +1,115 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.wso2.siddhi.core.ExecutionPlanRuntime; +import org.wso2.siddhi.core.SiddhiManager; +import org.wso2.siddhi.core.stream.input.InputHandler; + +import java.util.List; +import java.util.Map; + +public class SiddhiPolicyHandler implements PolicyStreamHandler { + private static final Logger LOG = LoggerFactory.getLogger(SiddhiPolicyHandler.class); + private ExecutionPlanRuntime executionRuntime; + private SiddhiManager siddhiManager; + private Map sds; + private PolicyDefinition policy; + private PolicyHandlerContext context; + + private int currentIndex = 0; // the index of current definition statement inside the policy definition + + public SiddhiPolicyHandler(Map sds, int index) { + this.sds = sds; + this.currentIndex = index; + } + + protected String generateExecutionPlan(PolicyDefinition policyDefinition, Map sds) throws StreamNotDefinedException { + return SiddhiDefinitionAdapter.buildSiddhiExecutionPlan(policyDefinition,sds); + } + + @Override + public void prepare(final Collector collector, PolicyHandlerContext context) throws Exception { + LOG.info("Initializing handler for policy {}", context.getPolicyDefinition()); + this.policy = context.getPolicyDefinition(); + this.siddhiManager = new SiddhiManager(); + String plan = generateExecutionPlan(policy, sds); + try { + this.executionRuntime = siddhiManager.createExecutionPlanRuntime(plan); + LOG.info("Created siddhi runtime {}", executionRuntime.getName()); + } catch (Exception parserException) { + LOG.error("Failed to create siddhi runtime for policy: {}, siddhi plan: \n\n{}\n", context.getPolicyDefinition().getName(), plan, parserException); + throw parserException; + } + + // add output stream callback + List outputStreams = getOutputStreams(policy); + for (final String outputStream : outputStreams) { + if (executionRuntime.getStreamDefinitionMap().containsKey(outputStream)) { + StreamDefinition streamDefinition = SiddhiDefinitionAdapter.convertFromSiddiDefinition(executionRuntime.getStreamDefinitionMap().get(outputStream)); + this.executionRuntime.addCallback(outputStream, + new AlertStreamCallback(outputStream, streamDefinition, + collector, context, currentIndex)); + } else { + throw new IllegalStateException("Undefined output stream " + outputStream); + } + } + this.executionRuntime.start(); + this.context = context; + LOG.info("Initialized policy handler for policy: {}", policy.getName()); + } + + protected List getOutputStreams(PolicyDefinition policy) { + return policy.getOutputStreams().isEmpty() ? policy.getDefinition().getOutputStreams() : policy.getOutputStreams(); + } + + public void send(StreamEvent event) throws Exception { + context.getPolicyCounter().incr(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "receive_count")); + String streamId = event.getStreamId(); + InputHandler inputHandler = executionRuntime.getInputHandler(streamId); + if (inputHandler != null) { + context.getPolicyCounter().incr(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "eval_count")); + inputHandler.send(event.getTimestamp(), event.getData()); + + if (LOG.isDebugEnabled()) { + LOG.debug("sent event to siddhi stream {} ", streamId); + } + } else { + context.getPolicyCounter().incr(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "drop_count")); + LOG.warn("No input handler found for stream {}", streamId); + } + } + + public void close() throws Exception { + LOG.info("Closing handler for policy {}", this.policy.getName()); + this.executionRuntime.shutdown(); + LOG.info("Shutdown siddhi runtime {}", this.executionRuntime.getName()); + this.siddhiManager.shutdown(); + LOG.info("Shutdown siddhi manager {}", this.siddhiManager); + LOG.info("Closed handler for policy {}", this.policy.getName()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("SiddhiPolicyHandler for policy: "); + sb.append(this.policy == null ? "" : this.policy.getName()); + return sb.toString(); + } + +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyStateHandler.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyStateHandler.java new file mode 100644 index 0000000000..3d67e55a69 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyStateHandler.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; + +/** + * Created on 7/27/16. + */ +public class SiddhiPolicyStateHandler extends SiddhiPolicyHandler { + + private static final Logger LOG = LoggerFactory.getLogger(SiddhiPolicyStateHandler.class); + + public SiddhiPolicyStateHandler(Map sds, int index) { + super(sds, index); + } + + @Override + protected String generateExecutionPlan(PolicyDefinition policyDefinition, Map sds) throws StreamNotDefinedException { + StringBuilder builder = new StringBuilder(); + PolicyDefinition.Definition stateDefiniton = policyDefinition.getStateDefinition(); + List inputStreams = stateDefiniton.getInputStreams(); + for (String inputStream : inputStreams) { // the state stream follow the output stream of the policy definition + builder.append(SiddhiDefinitionAdapter.buildStreamDefinition(sds.get(inputStream))); + builder.append("\n"); + } + builder.append(stateDefiniton.value); + if (LOG.isDebugEnabled()) { + LOG.debug("Generated siddhi state execution plan: {} from definiton: {}", builder.toString(), stateDefiniton); + } + return builder.toString(); + } + + @Override + protected List getOutputStreams(PolicyDefinition policy) { + return policy.getStateDefinition().getOutputStreams(); + } + + // more validation on prepare + +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamColumn.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamColumn.java new file mode 100644 index 0000000000..c0733b0cf8 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamColumn.java @@ -0,0 +1,235 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import javax.xml.bind.annotation.adapters.XmlAdapter; +import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; +import java.io.IOException; +import java.io.Serializable; +import java.util.HashMap; +import java.util.Objects; + +public class StreamColumn implements Serializable { + + private static final long serialVersionUID = -5457861313624389106L; + private String name; + private Type type; + private Object defaultValue; + private boolean required = true; + private String description; + private String nodataExpression; + + public String toString() { + return String.format("StreamColumn=name[%s], type=[%s], defaultValue=[%s], required=[%s], nodataExpression=[%s]", + name, type, defaultValue, required, nodataExpression); + } + + @Override + public int hashCode() { + return new HashCodeBuilder() + .append(this.name) + .append(this.type) + .append(this.defaultValue) + .append(this.required) + .append(this.description) + .append(this.nodataExpression) + .build(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof StreamColumn)) { + return false; + } + return Objects.equals(this.name, ((StreamColumn) obj).name) + && Objects.equals(this.type, ((StreamColumn) obj).type) + && Objects.equals(this.defaultValue, ((StreamColumn) obj).defaultValue) + && Objects.equals(this.required, ((StreamColumn) obj).required) + && Objects.equals(this.description, ((StreamColumn) obj).description) + && Objects.equals(this.nodataExpression, ((StreamColumn) obj).nodataExpression); + } + + public String getNodataExpression() { + return nodataExpression; + } + + public void setNodataExpression(String nodataExpression) { + this.nodataExpression = nodataExpression; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @XmlJavaTypeAdapter(StreamColumnTypeAdapter.class) + public Type getType() { + return type; + } + + public void setType(Type type) { + this.type = type; + } + + @XmlJavaTypeAdapter(value = DefaultValueAdapter.class) + public Object getDefaultValue() { + return defaultValue; + } + + private void ensureDefaultValueType() { + if (this.getDefaultValue() != null && (this.getDefaultValue() instanceof String) && this.getType() != Type.STRING) { + switch (this.getType()) { + case INT: + this.setDefaultValue(Integer.valueOf((String) this.getDefaultValue())); + break; + case LONG: + this.setDefaultValue(Long.valueOf((String) this.getDefaultValue())); + break; + case FLOAT: + this.setDefaultValue(Float.valueOf((String) this.getDefaultValue())); + break; + case DOUBLE: + this.setDefaultValue(Double.valueOf((String) this.getDefaultValue())); + break; + case BOOL: + this.setDefaultValue(Boolean.valueOf((String) this.getDefaultValue())); + break; + case OBJECT: + try { + this.setDefaultValue(new ObjectMapper().readValue((String) this.getDefaultValue(), HashMap.class)); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + break; + default: + throw new IllegalArgumentException("Illegal type: " + this.getType()); + } + } + } + + public void setDefaultValue(Object defaultValue) { + this.defaultValue = defaultValue; + ensureDefaultValueType(); + } + + public boolean isRequired() { + return required; + } + + public void setRequired(boolean required) { + this.required = required; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public enum Type implements Serializable { + STRING("string"), INT("int"), LONG("long"), FLOAT("float"), DOUBLE("double"), BOOL("bool"), OBJECT("object"); + + private final String name; + + Type(String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } + + @com.fasterxml.jackson.annotation.JsonCreator + public static Type getEnumFromValue(String value) { + for (Type testEnum : values()) { + if (testEnum.name.equalsIgnoreCase(value)) { + return testEnum; + } + } + throw new IllegalArgumentException(); + } + } + + public static class StreamColumnTypeAdapter extends XmlAdapter { + + @Override + public Type unmarshal(String v) throws Exception { + return Type.getEnumFromValue(v); + } + + @Override + public String marshal(Type v) throws Exception { + return v.name; + } + } + + public static class DefaultValueAdapter extends XmlAdapter { + @Override + public Object unmarshal(String v) throws Exception { + return v; + } + + @Override + public String marshal(Object v) throws Exception { + return v.toString(); + } + } + + public static class Builder { + private StreamColumn column; + + public Builder() { + column = new StreamColumn(); + } + + public Builder name(String name) { + column.setName(name); + return this; + } + + public Builder type(Type type) { + column.setType(type); + return this; + } + + public Builder defaultValue(Object defaultValue) { + column.setDefaultValue(defaultValue); + return this; + } + + public Builder required(boolean required) { + column.setRequired(required); + return this; + } + + public StreamColumn build() { + return column; + } + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContext.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContext.java new file mode 100644 index 0000000000..e1afc20a10 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContext.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import com.typesafe.config.Config; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +public interface StreamContext { + StreamCounter counter(); + + Config config(); +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java new file mode 100644 index 0000000000..14aec4af4d --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import backtype.storm.metric.api.MultiCountMetric; +import backtype.storm.task.TopologyContext; +import com.typesafe.config.Config; + +public class StreamContextImpl implements StreamContext { + private final Config config; + private final StreamCounter counter; + + public StreamContextImpl(Config config, MultiCountMetric counter, TopologyContext context) { + this.counter = new StormMultiCountMetric(counter); + this.config = config; + } + + @Override + public StreamCounter counter() { + return this.counter; + } + + @Override + public Config config() { + return this.config; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamCounter.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamCounter.java new file mode 100644 index 0000000000..94497e9813 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamCounter.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +public interface StreamCounter { + void incr(String scopeName); + + void incrBy(String scopeName, int length); + + void scope(String scopeName); +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java new file mode 100644 index 0000000000..b7589f7a0c --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlElementWrapper; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * This is actually a data source schema. + * + * @since Apr 5, 2016 + */ +public class StreamDefinition implements Serializable { + private static final long serialVersionUID = 2352202882328931825L; + + // Stream unique ID + private String streamId; + + // Stream description + private String description; + + // Is validateable or not + private boolean validate = true; + + // Is timeseries-based stream or not + private boolean timeseries; + + // TODO: Decouple dataSource and siteId from stream definition + + // Stream data source ID + private String dataSource; + + private String group = "global"; + + // + private String streamSource; + + // Tenant (Site) ID + private String siteId; + + private List columns = new ArrayList<>(); + + public String toString() { + return String.format("StreamDefinition[group=%s, streamId=%s, dataSource=%s, description=%s, validate=%s, timeseries=%s, columns=%s", + group, + streamId, + dataSource, + description, + validate, + timeseries, + columns + ); + } + + @Override + public int hashCode() { + return new HashCodeBuilder() + .append(this.streamId) + .append(this.group) + .append(this.description) + .append(this.validate) + .append(this.timeseries) + .append(this.dataSource) + .append(streamSource) + .append(this.siteId) + .append(this.columns) + .build(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof StreamDefinition)) { + return false; + } + StreamDefinition streamDefinition = (StreamDefinition) obj; + return Objects.equals(this.streamId, streamDefinition.streamId) + && Objects.equals(this.group, streamDefinition.group) + && Objects.equals(this.description, streamDefinition.description) + && Objects.equals(this.validate, streamDefinition.validate) + && Objects.equals(this.timeseries, streamDefinition.timeseries) + && Objects.equals(this.dataSource, streamDefinition.dataSource) + && Objects.equals(this.streamSource, streamDefinition.streamSource) + && Objects.equals(this.siteId, streamDefinition.siteId) + && CollectionUtils.isEqualCollection(this.columns, streamDefinition.columns); + } + + public String getStreamId() { + return streamId; + } + + public void setStreamId(String streamId) { + this.streamId = streamId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + @Deprecated + public boolean isValidate() { + return validate; + } + + public void setValidate(boolean validate) { + this.validate = validate; + } + + public boolean isTimeseries() { + return timeseries; + } + + public void setTimeseries(boolean timeseries) { + this.timeseries = timeseries; + } + + @XmlElementWrapper(name = "columns") + @XmlElement(name = "column") + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + public String getDataSource() { + return dataSource; + } + + public void setDataSource(String dataSource) { + this.dataSource = dataSource; + } + + public int getColumnIndex(String column) { + int i = 0; + for (StreamColumn col : this.getColumns()) { + if (col.getName().equals(column)) { + return i; + } + i++; + } + return -1; + } + + public String getSiteId() { + return siteId; + } + + public void setSiteId(String siteId) { + this.siteId = siteId; + } + + public String getStreamSource() { + return streamSource; + } + + public void setStreamSource(String streamSource) { + this.streamSource = streamSource; + } + + public StreamDefinition copy() { + StreamDefinition copied = new StreamDefinition(); + copied.setColumns(this.getColumns()); + copied.setDataSource(this.getDataSource()); + copied.setDescription(this.getDescription()); + copied.setSiteId(this.getSiteId()); + copied.setStreamId(this.getStreamId()); + copied.setGroup(this.getGroup()); + copied.setTimeseries(this.isTimeseries()); + copied.setValidate(this.isValidate()); + return copied; + } + + public String getGroup() { + return group; + } + + public void setGroup(String group) { + this.group = group; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java new file mode 100644 index 0000000000..2461d858f8 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * @since Apr 5, 2016. + */ +public class StreamEvent implements Serializable { + private static final long serialVersionUID = 2765116509856609763L; + + protected String streamId; + protected Object[] data; + protected long timestamp; + protected String metaVersion; + + public StreamEvent() { + } + + public StreamEvent(String streamId, long timestamp, Object[] data) { + this.setStreamId(streamId); + this.setTimestamp(timestamp); + this.setData(data); + } + + public StreamEvent(String streamId, long timestamp, Object[] data, String metaVersion) { + this.setStreamId(streamId); + this.setTimestamp(timestamp); + this.setData(data); + this.setMetaVersion(metaVersion); + } + + public String getStreamId() { + return streamId; + } + + public void setStreamId(String streamId) { + this.streamId = streamId; + } + + public long getKey(){ + return 1; + } + + public void setData(Object[] data) { + this.data = data; + } + + public long getTimestamp() { + return timestamp; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + public String getMetaVersion() { + return metaVersion; + } + + public void setMetaVersion(String metaVersion) { + this.metaVersion = metaVersion; + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(streamId).append(timestamp).append(data).append(metaVersion).build(); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj instanceof StreamEvent) { + StreamEvent another = (StreamEvent) obj; + return Objects.equals(this.streamId, another.streamId) && this.timestamp == another.timestamp + && Arrays.deepEquals(this.data, another.data) && Objects.equals(this.metaVersion, another.metaVersion); + } + return false; + } + + @Override + public String toString() { + List dataStrings = new ArrayList<>(); + if (this.getData() != null) { + for (Object obj : this.getData()) { + if (obj != null) { + dataStrings.add(obj.toString()); + } else { + dataStrings.add(null); + } + } + } + return String.format("StreamEvent[stream=%S,timestamp=%s,data=[%s],metaVersion=%s]", + this.getStreamId(), + DateTimeUtil.millisecondsToHumanDateWithMilliseconds(this.getTimestamp()), + StringUtils.join(dataStrings, ","), + this.getMetaVersion()); + } + + public static StreamEventBuilder builder() { + return new StreamEventBuilder(); + } + + /** + * @return cloned new event object. + */ + public StreamEvent copy() { + StreamEvent newEvent = new StreamEvent(); + newEvent.setTimestamp(this.getTimestamp()); + newEvent.setData(this.getData()); + newEvent.setStreamId(this.getStreamId()); + newEvent.setMetaVersion(this.getMetaVersion()); + return newEvent; + } + + public void copyFrom(StreamEvent event) { + this.setTimestamp(event.getTimestamp()); + this.setData(event.getData()); + this.setStreamId(event.getStreamId()); + this.setMetaVersion(event.getMetaVersion()); + } + + public Object[] getData() { + return data; + } + + public Object[] getData(StreamDefinition streamDefinition, List column) { + ArrayList result = new ArrayList<>(column.size()); + for (String colName : column) { + result.add(this.getData()[streamDefinition.getColumnIndex(colName)]); + } + return result.toArray(); + } + + public Object[] getData(StreamDefinition streamDefinition, String... column) { + ArrayList result = new ArrayList<>(column.length); + for (String colName : column) { + result.add(this.getData()[streamDefinition.getColumnIndex(colName)]); + } + return result.toArray(); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventBuilder.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventBuilder.java new file mode 100644 index 0000000000..5c14eff104 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventBuilder.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class StreamEventBuilder { + private static final Logger LOG = LoggerFactory.getLogger(StreamEventBuilder.class); + + private StreamEvent instance; + private StreamDefinition streamDefinition; + + public StreamEventBuilder() { + instance = new StreamEvent(); + } + + public StreamEventBuilder schema(StreamDefinition streamDefinition) { + this.streamDefinition = streamDefinition; + if (instance.getStreamId() == null) { + instance.setStreamId(streamDefinition.getStreamId()); + } + return this; + } + + public StreamEventBuilder streamId(String streamId) { + instance.setStreamId(streamId); + return this; + } + + public StreamEventBuilder attributes(Map data, StreamDefinition streamDefinition) { + this.schema(streamDefinition); + List columnList = streamDefinition.getColumns(); + if (columnList != null && columnList.size() > 0) { + List values = new ArrayList<>(columnList.size()); + for (StreamColumn column : columnList) { + values.add(data.getOrDefault(column.getName(), column.getDefaultValue())); + } + instance.setData(values.toArray()); + } else if (LOG.isDebugEnabled()) { + LOG.warn("All data [{}] are ignored as no columns defined in schema {}", data, streamDefinition); + } + return this; + } + + public StreamEventBuilder attributes(Map data) { + return attributes(data, this.streamDefinition); + } + + public StreamEventBuilder attributes(Object... data) { + instance.setData(data); + return this; + } + + public StreamEventBuilder timestamep(long timestamp) { + instance.setTimestamp(timestamp); + return this; + } + + public StreamEventBuilder metaVersion(String metaVersion) { + instance.setMetaVersion(metaVersion); + return this; + } + + public StreamEvent build() { + if (instance.getStreamId() == null) { + throw new IllegalArgumentException("streamId is null of event: " + instance); + } + return instance; + } + + public StreamEventBuilder copyFrom(StreamEvent event) { + this.instance.copyFrom(event); + return this; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java new file mode 100644 index 0000000000..c256ee6cd6 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.eagle.flink; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +/** + * An iterator of StreamEvent events. + */ +final class StreamEventIterator implements Iterator, Serializable { + + private static final long serialVersionUID = 1L; + + private static final Timestamp INITIAL_TIMESTAMP = Timestamp.valueOf("2019-01-01 00:00:00"); + + private static final long SIX_MINUTES = 6 * 60 * 1000; + + private final boolean bounded; + + private int index = 0; + + private long timestamp; + + static StreamEventIterator bounded() { + return new StreamEventIterator(true); + } + + static StreamEventIterator unbounded() { + return new StreamEventIterator(false); + } + + private StreamEventIterator(boolean bounded) { + this.bounded = bounded; + this.timestamp = INITIAL_TIMESTAMP.getTime(); + } + + @Override + public boolean hasNext() { + if (index < data.size()) { + return true; + } else if (!bounded) { + index = 0; + return true; + } else { + return false; + } + } + + @Override + public StreamEvent next() { + StreamEvent StreamEvent = data.get(index++); + StreamEvent.setTimestamp(timestamp); + timestamp += SIX_MINUTES; + return StreamEvent; + } + + private static List data = Arrays.asList( + new StreamEvent("testStream_1", 0L, new Object[]{188.23}), + new StreamEvent("testStream_1", 10L, new Object[]{100}), + new StreamEvent("testStream_1", 10L, new Object[]{188.23}), + new StreamEvent("testStream_1", 20L, new Object[]{188.23}), + new StreamEvent("testStream_1", 20L, new Object[]{188.23}), + new StreamEvent("testStream_1", 20L, new Object[]{188.23}), + new StreamEvent("testStream_1", 30L, new Object[]{188.23}), + new StreamEvent("testStream_1", 40L, new Object[]{188.23}), + new StreamEvent("testStream_1", 50L, new Object[]{188.23}), + new StreamEvent("testStream_1", 70L, new Object[]{188.23}), + new StreamEvent("testStream_1", 90L, new Object[]{100}), + new StreamEvent("testStream_1", 100L, new Object[]{188.23}), + new StreamEvent("testStream_1", 200L, new Object[]{188.23}), + new StreamEvent("testStream_1", 210L, new Object[]{188.23}), + new StreamEvent("testStream_1", 220L, new Object[]{188.23}), + new StreamEvent("testStream_1", 230L, new Object[]{188.23}), + new StreamEvent("testStream_1", 250L, new Object[]{188.23}), + new StreamEvent("testStream_1", 260L, new Object[]{188.23}), + new StreamEvent("testStream_1", 270L, new Object[]{188.23}), + new StreamEvent("testStream_1", 300L, new Object[]{188.23}), + new StreamEvent("testStream_1", 400L, new Object[]{188.23}), + new StreamEvent("testStream_1", 600L, new Object[]{188.23}), + new StreamEvent("testStream_1", 1000L, new Object[]{188.23}), + new StreamEvent("testStream_1", 12000L, new Object[]{188.23}), + new StreamEvent("testStream_1", 12001L, new Object[]{188.23}), + new StreamEvent("testStream_1", 12002L, new Object[]{188.23}) + ); +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java new file mode 100644 index 0000000000..7f099369b6 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java @@ -0,0 +1,46 @@ +package org.apache.eagle.flink; + +import org.apache.flink.annotation.Public; +import org.apache.flink.streaming.api.functions.source.FromIteratorFunction; + +import java.io.Serializable; +import java.util.Iterator; + +/** + * A stream of transactions. + */ +@Public +public class StreamEventSource extends FromIteratorFunction { + + private static final long serialVersionUID = 1L; + + public StreamEventSource() { + super(new RateLimitedIterator<>(StreamEventIterator.unbounded())); + } + + private static class RateLimitedIterator implements Iterator, Serializable { + + private static final long serialVersionUID = 1L; + + private final Iterator inner; + + private RateLimitedIterator(Iterator inner) { + this.inner = inner; + } + + @Override + public boolean hasNext() { + return inner.hasNext(); + } + + @Override + public T next() { + try { + Thread.sleep(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + return inner.next(); + } + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamNotDefinedException.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamNotDefinedException.java new file mode 100644 index 0000000000..07bafd10f8 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamNotDefinedException.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import java.io.IOException; + +public class StreamNotDefinedException extends IOException { + private static final long serialVersionUID = 6027811718016485808L; + + public StreamNotDefinedException() { + } + + public StreamNotDefinedException(String streamId) { + super("Stream definition not found: " + streamId); + } + + public StreamNotDefinedException(String streamName, String specVersion) { + super(String.format("Stream '%s' not found! Current spec version '%s'. Possibly metadata not loaded or metadata mismatch between upstream and alert bolts yet!", streamName, specVersion)); + } + + public StreamNotDefinedException(String streamName, String streamMetaVersion, String specVersion) { + super(String.format("Stream '%s' has meta version '%s' which is different from current spec version '%s'.", streamName, streamMetaVersion, specVersion)); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamPartition.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamPartition.java new file mode 100644 index 0000000000..ee54620eca --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamPartition.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import java.io.Serializable; +import java.util.*; + +/** + * StreamPartition defines how a data stream is partitioned and sorted + * streamId is used for distinguishing different streams which are spawned from the same data source + * type defines how to partition data among slots within one slotqueue + * columns are fields based on which stream is grouped + * sortSpec defines how data is sorted. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class StreamPartition implements Serializable { + private static final long serialVersionUID = -3361648309136926040L; + + private String streamId; + private Type type; + private List columns = new ArrayList<>(); + + public StreamPartition() { + } + + public StreamPartition(StreamPartition o) { + this.streamId = o.streamId; + this.type = o.type; + this.columns = new ArrayList(o.columns); + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } + if (!(other instanceof StreamPartition)) { + return false; + } + StreamPartition sp = (StreamPartition) other; + return Objects.equals(streamId, sp.streamId) && Objects.equals(type, sp.type) + && CollectionUtils.isEqualCollection(columns, sp.columns); + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(streamId).append(type).append(columns).build(); + } + + public void setType(Type type) { + this.type = type; + } + + public Type getType() { + return this.type; + } + + public enum Type { + GLOBAL("GLOBAL", 0), GROUPBY("GROUPBY", 1), SHUFFLE("SHUFFLE", 2); + private final String name; + private final int index; + + Type(String name, int index) { + this.name = name; + this.index = index; + } + + @Override + public String toString() { + return this.name; + } + + public static Type locate(String type) { + Type _type = _NAME_TYPE.get(type.toUpperCase()); + if (_type == null) { + throw new IllegalStateException("Illegal type name: " + type); + } + return _type; + } + + public static Type locate(int index) { + Type _type = _INDEX_TYPE.get(index); + if (_type == null) { + throw new IllegalStateException("Illegal type index: " + index); + } + return _type; + } + + private static final Map _NAME_TYPE = new HashMap<>(); + private static final Map _INDEX_TYPE = new TreeMap<>(); + + static { + _NAME_TYPE.put(GLOBAL.name, GLOBAL); + _NAME_TYPE.put(GROUPBY.name, GROUPBY); + _NAME_TYPE.put(SHUFFLE.name, SHUFFLE); + + _INDEX_TYPE.put(GLOBAL.index, GLOBAL); + _INDEX_TYPE.put(GROUPBY.index, GLOBAL); + _INDEX_TYPE.put(SHUFFLE.index, GLOBAL); + } + } + + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + public String getStreamId() { + return streamId; + } + + public void setStreamId(String streamId) { + this.streamId = streamId; + } + + @Override + public String toString() { + return String.format("StreamPartition[streamId=%s,type=%s,columns=[%s]]", this.getStreamId(), this.getType(), StringUtils.join(this.getColumns(), ",")); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/resources/log4j.properties b/eagle-flink/src/main/resources/log4j.properties new file mode 100644 index 0000000000..09edea3850 --- /dev/null +++ b/eagle-flink/src/main/resources/log4j.properties @@ -0,0 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +log4j.rootLogger=INFO, stdout, DRFA +# standard output +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n \ No newline at end of file diff --git a/pom.xml b/pom.xml index 6e9da486fc..8161ac6bfb 100755 --- a/pom.xml +++ b/pom.xml @@ -143,6 +143,7 @@ eagle-jpm eagle-assembly eagle-topology-check + eagle-flink From 7fb58909f45b730530579448a839d6f870b8c4bc Mon Sep 17 00:00:00 2001 From: yonzhang Date: Sun, 19 Apr 2020 13:33:04 -0700 Subject: [PATCH 4/6] EAGLE-1104, make EagleFlinkStreamApp runnable for simple test --- eagle-flink/pom.xml | 5 + .../org/apache/eagle/flink/AlertSeverity.java | 4 +- .../org/apache/eagle/flink/AlertSink.java | 4 +- .../eagle/flink/AlertStreamCallback.java | 35 ++-- .../apache/eagle/flink/AlertStreamEvent.java | 13 +- .../eagle/flink/CompositePolicyHandler.java | 10 +- .../eagle/flink/EagleFlinkStreamApp.java | 2 +- .../flink/MockSampleMetadataFactory.java | 193 ++++++++++++++++++ .../eagle/flink/MockStreamCollector.java | 50 +++++ .../flink/MockStreamMetadataService.java | 39 ++++ .../apache/eagle/flink/MyStreamCounter.java | 18 ++ .../apache/eagle/flink/PartitionedEvent.java | 2 +- .../eagle/flink/PolicyChangeListener.java | 10 +- .../eagle/flink/PolicyGroupEvaluatorImpl.java | 48 ++--- .../eagle/flink/PolicyStreamHandler.java | 4 +- .../org/apache/eagle/flink/SiddhiCEPOp.java | 47 ++++- .../eagle/flink/SiddhiPolicyHandler.java | 12 +- .../apache/eagle/flink/StreamContextImpl.java | 6 +- .../org/apache/eagle/flink/StreamEvent.java | 2 +- .../eagle/flink/StreamEventIterator.java | 60 +++--- .../test/SiddhiCEPPolicyEventHandlerTest.java | 150 ++++++++++++++ 21 files changed, 607 insertions(+), 107 deletions(-) create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java create mode 100755 eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java create mode 100755 eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java diff --git a/eagle-flink/pom.xml b/eagle-flink/pom.xml index 6c8a52fc15..04928c3a6a 100644 --- a/eagle-flink/pom.xml +++ b/eagle-flink/pom.xml @@ -40,5 +40,10 @@ org.wso2.siddhi siddhi-core + + junit + junit + test + \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java index 7d4769c228..45452b86b1 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSeverity.java @@ -16,6 +16,8 @@ */ package org.apache.eagle.flink; -public enum AlertSeverity { +import java.io.Serializable; + +public enum AlertSeverity{ UNKNOWN, OK, WARNING, CRITICAL, FATAL } \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java index 1e5b0186b1..267ae67ee4 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertSink.java @@ -4,14 +4,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class AlertSink implements SinkFunction { +public class AlertSink implements SinkFunction { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(AlertSink.class); @Override - public void invoke(AlertPublishEvent value, Context context) { + public void invoke(AlertStreamEvent value, Context context) { LOG.info(value.toString()); } } \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java index 9dd193f03f..903ee9fa19 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java @@ -28,7 +28,6 @@ public class AlertStreamCallback extends StreamCallback { private static final Logger LOG = LoggerFactory.getLogger(AlertStreamCallback.class); private final String outputStream; - private final Collector collector; private final PolicyHandlerContext context; private final StreamDefinition definition; @@ -36,11 +35,9 @@ public class AlertStreamCallback extends StreamCallback { public AlertStreamCallback(String outputStream, StreamDefinition streamDefinition, - Collector collector, PolicyHandlerContext context, int currentIndex) { this.outputStream = outputStream; - this.collector = collector; this.context = context; this.definition = streamDefinition; this.currentIndex = currentIndex; @@ -54,10 +51,9 @@ public void receive(Event[] events) { String policyName = context.getPolicyDefinition().getName(); String siteId = context.getPolicyDefinition().getSiteId(); CompositePolicyHandler handler = ((PolicyGroupEvaluatorImpl) context.getPolicyEvaluator()).getPolicyHandler(policyName); - if (LOG.isDebugEnabled()) { - LOG.debug("Generated {} alerts from policy '{}' in {}, index of definiton {} ", events.length, policyName, context.getPolicyEvaluatorId(), currentIndex); - } + LOG.info("Generated {} alerts from policy '{}' in {}, index of definiton {} ", events.length, policyName, context.getPolicyEvaluatorId(), currentIndex); for (Event e : events) { + org.apache.eagle.flink.Collector eagleCollector = (org.apache.eagle.flink.Collector)e.getData()[0]; AlertStreamEvent event = new AlertStreamEvent(); event.setSiteId(siteId); event.setTimestamp(e.getTimestamp()); @@ -73,19 +69,20 @@ public void receive(Event[] events) { if (LOG.isDebugEnabled()) { LOG.debug("Generate new alert event: {}", event); } - try { - if (handler == null) { - // extreme case: the handler is removed from the evaluator. Just emit. - if (LOG.isDebugEnabled()) { - LOG.debug(" handler not found when callback received event, directly emit. policy removed? "); - } - collector.emit(event); - } else { - handler.send(event, currentIndex + 1); - } - } catch (Exception ex) { - LOG.error(String.format("send event %s to index %d failed with exception. ", event, currentIndex), ex); - } + eagleCollector.emit(event); +// try { +// if (handler == null) { +// // extreme case: the handler is removed from the evaluator. Just emit. +// if (LOG.isDebugEnabled()) { +// LOG.debug(" handler not found when callback received event, directly emit. policy removed? "); +// } +// eagleCollector.emit(event); +// } else { +// handler.send(event, currentIndex + 1); +// } +// } catch (Exception ex) { +// LOG.error(String.format("send event %s to index %d failed with exception. ", event, currentIndex), ex); +// } } context.getPolicyCounter().incrBy(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "alert_count"), events.length); } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java index 2a6c36057b..a117dfdbe0 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java @@ -31,7 +31,6 @@ public class AlertStreamEvent extends StreamEvent { private String createdBy; private long createdTime; private String category; - private AlertSeverity severity = AlertSeverity.WARNING; private boolean duplicationChecked = false; // ---------------------- @@ -136,6 +135,10 @@ public String getAlertId() { return alertId; } + public void setAlertId(String alertId){ + this.alertId = alertId; + } + public void ensureAlertId() { if (this.alertId == null) { this.alertId = UUID.randomUUID().toString(); @@ -166,14 +169,6 @@ public void setCategory(String category) { this.category = category; } - public AlertSeverity getSeverity() { - return severity; - } - - public void setSeverity(AlertSeverity severity) { - this.severity = severity; - } - public String getSiteId() { return siteId; } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java b/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java index 50ae079bd9..032a8ec2b6 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/CompositePolicyHandler.java @@ -42,22 +42,22 @@ public CompositePolicyHandler(Map sds) { } @Override - public void prepare(Collector collector, PolicyHandlerContext context) throws Exception { + public void prepare(PolicyHandlerContext context) throws Exception { this.collector = collector; // TODO: create two handlers policyHandler = PolicyStreamHandlers.createHandler(context.getPolicyDefinition().getDefinition(), sds); - policyHandler.prepare(collector, context); +// policyHandler.prepare(collector, context); handlers.add(policyHandler); if (context.getPolicyDefinition().getStateDefinition() != null) { stateHandler = PolicyStreamHandlers.createStateHandler(context.getPolicyDefinition().getStateDefinition().type, sds); - stateHandler.prepare(collector, context); +// stateHandler.prepare(collector, context); handlers.add(stateHandler); } } @Override - public void send(StreamEvent event) throws Exception { + public void send(StreamEvent event, Collector collector) throws Exception { // policyHandler.send(event); send(event, 0); } @@ -65,7 +65,7 @@ public void send(StreamEvent event) throws Exception { // send event to index of stream handler public void send(StreamEvent event, int idx) throws Exception { if (handlers.size() > idx) { - handlers.get(idx).send(event); + handlers.get(idx).send(event, null); } else if (event instanceof AlertStreamEvent) { if (LOG.isDebugEnabled()) { LOG.debug("Emit new alert event: {}", event); diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java b/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java index 76a317f393..590d96bfc3 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java @@ -11,7 +11,7 @@ public static void main(String[] args) throws Exception { .addSource(new StreamEventSource()) .name("eagle-events"); - DataStream alerts = events + DataStream alerts = events .keyBy(StreamEvent::getKey) .process(new SiddhiCEPOp()) .name("eagle-alert-engine"); diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java b/eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java new file mode 100644 index 0000000000..0d17f7677f --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java @@ -0,0 +1,193 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + + +import java.util.*; +import java.util.concurrent.ThreadLocalRandom; + +@SuppressWarnings("serial") +public class MockSampleMetadataFactory { + private static MockStreamMetadataService mockStreamMetadataServiceInstance = null; + + public static MockStreamMetadataService createSingletonMetadataServiceWithSample() { + if (mockStreamMetadataServiceInstance != null) { + return mockStreamMetadataServiceInstance; + } + mockStreamMetadataServiceInstance = new MockStreamMetadataService(); + mockStreamMetadataServiceInstance.registerStream("sampleStream", createSampleStreamDefinition("sampleStream")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_1", createSampleStreamDefinition("sampleStream_1")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_2", createSampleStreamDefinition("sampleStream_2")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_3", createSampleStreamDefinition("sampleStream_3")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_4", createSampleStreamDefinition("sampleStream_4")); + return mockStreamMetadataServiceInstance; + } + + public static StreamDefinition createSampleStreamDefinition(String streamId) { + StreamDefinition sampleStreamDefinition = new StreamDefinition(); + sampleStreamDefinition.setStreamId(streamId); + sampleStreamDefinition.setTimeseries(true); + sampleStreamDefinition.setValidate(true); + sampleStreamDefinition.setDescription("Schema for " + streamId); + List streamColumns = new ArrayList<>(); + + streamColumns.add(new StreamColumn.Builder().name("__collector__").type(StreamColumn.Type.OBJECT).build()); + streamColumns.add(new StreamColumn.Builder().name("name").type(StreamColumn.Type.STRING).build()); + streamColumns.add(new StreamColumn.Builder().name("host").type(StreamColumn.Type.STRING).build()); + streamColumns.add(new StreamColumn.Builder().name("flag").type(StreamColumn.Type.BOOL).build()); + streamColumns.add(new StreamColumn.Builder().name("timestamp").type(StreamColumn.Type.LONG).build()); + streamColumns.add(new StreamColumn.Builder().name("value").type(StreamColumn.Type.DOUBLE).build()); + sampleStreamDefinition.setColumns(streamColumns); + return sampleStreamDefinition; + } + + + /** + * Policy: from sampleStream_1[name == "cpu" and value > 50.0] select name, host, flag, value insert into outputStream; + * + * @return PolicyDefinition[from sampleStream_1[name == "cpu" and value > 50.0] select name, host, flag, value insert into outputStream;] + */ + public static PolicyDefinition createSingleMetricSamplePolicy() { + String definePolicy = "from sampleStream_1[name == \"cpu\" and value > 50.0] select __collector__, name, host, flag, value insert into outputStream;"; + PolicyDefinition policyDefinition = new PolicyDefinition(); + policyDefinition.setName("SamplePolicyForTest"); + policyDefinition.setInputStreams(Arrays.asList("sampleStream_1")); + policyDefinition.setOutputStreams(Arrays.asList("outputStream")); + policyDefinition.setDefinition(new PolicyDefinition.Definition( + PolicyStreamHandlers.SIDDHI_ENGINE, + definePolicy + )); + policyDefinition.setPartitionSpec(Arrays.asList(createSampleStreamGroupbyPartition("sampleStream_1", Arrays.asList("name")))); + return policyDefinition; + } + + public static StreamPartition createSampleStreamGroupbyPartition(String streamId, List groupByField) { + StreamPartition streamPartition = new StreamPartition(); + streamPartition.setStreamId(streamId); + streamPartition.setColumns(new ArrayList<>(groupByField)); + streamPartition.setType(StreamPartition.Type.GROUPBY); + return streamPartition; + } + + public static PartitionedEvent createSimpleStreamEvent() { + StreamEvent event = null; + try { + event = StreamEvent.builder() + .schema(MockSampleMetadataFactory.createSingletonMetadataServiceWithSample().getStreamDefinition("sampleStream_1")) + .streamId("sampleStream_1") + .timestamep(System.currentTimeMillis()) + .attributes(new HashMap() {{ + put("name", "cpu"); + put("value", 60.0); + put("unknown", "unknown column value"); + }}).build(); + } catch (StreamNotDefinedException e) { + e.printStackTrace(); + } + PartitionedEvent pEvent = new PartitionedEvent(); + pEvent.setEvent(event); + return pEvent; + } + + private final static String[] SAMPLE_STREAM_NAME_OPTIONS = new String[] { + "cpu", "memory", "disk", "network" + }; + + private final static String[] SAMPLE_STREAM_HOST_OPTIONS = new String[] { + "localhost_1", "localhost_2", "localhost_3", "localhost_4" + }; + + private final static Boolean[] SAMPLE_STREAM_FLAG_OPTIONS = new Boolean[] { + true, false + }; + + private final static Double[] SAMPLE_STREAM_VALUE_OPTIONS = new Double[] { + -0.20, 40.4, 50.5, 60.6, 10000.1 + }; + private final static String[] SAMPLE_STREAM_ID_OPTIONS = new String[] { + "sampleStream_1", "sampleStream_2", "sampleStream_3", "sampleStream_4", + }; + private final static Random RANDOM = ThreadLocalRandom.current(); + + public static StreamEvent createRandomStreamEvent() { + return createRandomStreamEvent(SAMPLE_STREAM_ID_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_ID_OPTIONS.length)]); + } + + public static StreamEvent createRandomStreamEvent(String streamId) { + return createRandomStreamEvent(streamId, System.currentTimeMillis()); + } + + private final static Long[] TIME_DELTA_OPTIONS = new Long[] { + -30000L, -10000L, -5000L, -1000L, 0L, 1000L, 5000L, 10000L, 30000L + }; + + public static StreamEvent createRandomOutOfTimeOrderStreamEvent(String streamId) { + StreamEvent event = createRandomStreamEvent(streamId); + event.setTimestamp(System.currentTimeMillis() + TIME_DELTA_OPTIONS[RANDOM.nextInt(TIME_DELTA_OPTIONS.length)]); + return event; + } + + + public static PartitionedEvent createRandomOutOfTimeOrderEventGroupedByName(String streamId) { + StreamEvent event = createRandomStreamEvent(streamId); + event.setTimestamp(System.currentTimeMillis() + TIME_DELTA_OPTIONS[RANDOM.nextInt(TIME_DELTA_OPTIONS.length)]); + return new PartitionedEvent(event, createSampleStreamGroupbyPartition(streamId, Arrays.asList("name")), event.getData()[0].hashCode()); + } + + public static PartitionedEvent createPartitionedEventGroupedByName(String streamId, long timestamp) { + StreamEvent event = createRandomStreamEvent(streamId); + event.setTimestamp(timestamp); + return new PartitionedEvent(event, createSampleStreamGroupbyPartition(streamId, Arrays.asList("name")), event.getData()[0].hashCode()); + } + + public static PartitionedEvent createRandomSortedEventGroupedByName(String streamId) { + StreamEvent event = createRandomStreamEvent(streamId); + event.setTimestamp(System.currentTimeMillis()); + return new PartitionedEvent(event, createSampleStreamGroupbyPartition(streamId, Arrays.asList("name")), event.getData()[0].hashCode()); + } + + public static StreamEvent createRandomStreamEvent(String streamId, long timestamp) { + StreamEvent event; + try { + event = StreamEvent.builder() + .schema(MockSampleMetadataFactory.createSingletonMetadataServiceWithSample().getStreamDefinition(streamId)) + .streamId(streamId) + .timestamep(timestamp) + .attributes(new HashMap() {{ + put("name", SAMPLE_STREAM_NAME_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_NAME_OPTIONS.length)]); + put("value", SAMPLE_STREAM_VALUE_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_VALUE_OPTIONS.length)]); + put("host", SAMPLE_STREAM_HOST_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_HOST_OPTIONS.length)]); + put("flag", SAMPLE_STREAM_FLAG_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_FLAG_OPTIONS.length)]); +// put("value1", SAMPLE_STREAM_VALUE_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_VALUE_OPTIONS.length)]); +// put("value2", SAMPLE_STREAM_VALUE_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_VALUE_OPTIONS.length)]); +// put("value3", SAMPLE_STREAM_VALUE_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_VALUE_OPTIONS.length)]); +// put("value4", SAMPLE_STREAM_VALUE_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_VALUE_OPTIONS.length)]); +// put("value5", SAMPLE_STREAM_VALUE_OPTIONS[RANDOM.nextInt(SAMPLE_STREAM_VALUE_OPTIONS.length)]); + put("unknown", "unknown column value"); + }}).build(); + } catch (StreamNotDefinedException e) { + throw new IllegalStateException(e.getMessage(), e); + } + return event; + } + + public static PartitionedEvent createRandomPartitionedEvent(String streamId, long timestamp) { + StreamEvent event = createRandomStreamEvent(streamId, timestamp); + PartitionedEvent partitionedEvent = new PartitionedEvent(event, createSampleStreamGroupbyPartition(streamId, Arrays.asList("name")), event.getData()[0].hashCode()); + return partitionedEvent; + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java b/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java new file mode 100755 index 0000000000..313ccdaef7 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.LinkedList; +import java.util.List; + +public class MockStreamCollector implements Collector { + @SuppressWarnings("unused") + private final static Logger LOG = LoggerFactory.getLogger(MockStreamCollector.class); + private List cache; + + public MockStreamCollector() { + cache = new LinkedList<>(); + } + + public void emit(AlertStreamEvent event) { + cache.add(event); + LOG.info("AlertStreamEvent received: {}",event); + } + + public void clear() { + cache.clear(); + } + + public List get() { + return cache; + } + + public int size() { + return cache.size(); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java b/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java new file mode 100644 index 0000000000..becbef6a37 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink; + +import org.apache.eagle.flink.StreamDefinition; +import org.apache.eagle.flink.StreamNotDefinedException; + +import java.util.HashMap; +import java.util.Map; + +public class MockStreamMetadataService { + private final Map streamSchemaMap = new HashMap<>(); + + public StreamDefinition getStreamDefinition(String streamId) throws StreamNotDefinedException { + if (streamSchemaMap.containsKey(streamId)) { + return streamSchemaMap.get(streamId); + } else { + throw new StreamNotDefinedException(streamId); + } + } + + public void registerStream(String streamId, StreamDefinition schema) { + streamSchemaMap.put(streamId, schema); + } +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java b/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java new file mode 100644 index 0000000000..4da0f0b43c --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java @@ -0,0 +1,18 @@ +package org.apache.eagle.flink; + +public class MyStreamCounter implements StreamCounter { + @Override + public void incr(String scopeName) { + + } + + @Override + public void incrBy(String scopeName, int length) { + + } + + @Override + public void scope(String scopeName) { + + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java index fd6d736bf7..f26b4215ce 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PartitionedEvent.java @@ -113,7 +113,7 @@ public Object[] getData() { } public boolean isSortRequired() { - return isPartitionRequired() && this.getPartition().getSortSpec() != null; + return isPartitionRequired(); } public boolean isPartitionRequired() { diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java index 11b2ae803b..2a1d26d61e 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyChangeListener.java @@ -19,6 +19,12 @@ import java.util.Collection; import java.util.List; +import java.util.List; +import java.util.Map; + public interface PolicyChangeListener { - void onPolicyChange(List allPolicies, Collection addedPolicies, Collection removedPolicies, Collection modifiedPolicies); -} + void onPolicyChange(String version, + List added, + List removed, + List modified, Map sds); +} \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java index 66bbb3ab27..2fb643f207 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/PolicyGroupEvaluatorImpl.java @@ -80,7 +80,7 @@ private void dispatch(PartitionedEvent partitionedEvent) { try { handled = true; this.context.counter().incr("eval_count"); - policyStreamHandler.getValue().send(partitionedEvent.getEvent()); + policyStreamHandler.getValue().send(partitionedEvent.getEvent(), null); } catch (Exception e) { this.context.counter().incr("fail_count"); LOG.error("{} failed to handle {}", policyStreamHandler.getValue(), partitionedEvent.getEvent(), e); @@ -101,33 +101,29 @@ private static boolean isAcceptedByPolicy(PartitionedEvent event, PolicyDefiniti || policy.getDefinition().getInputStreams().contains(event.getEvent().getStreamId())); } + @Override - public void onPolicyChange(List allPolicies, Collection addedPolicies, Collection removedPolicies, Collection modifiedPolicies) { + public void onPolicyChange(String version, List added, List removed, List modified, Map sds) { + Map copyPolicies = new HashMap<>(policyDefinitionMap); + Map copyHandlers = new HashMap<>(policyStreamHandlerMap); + for (PolicyDefinition pd : added) { + inplaceAdd(copyPolicies, copyHandlers, pd, sds); + } + for (PolicyDefinition pd : removed) { + inplaceRemove(copyPolicies, copyHandlers, pd); + } + for (PolicyDefinition pd : modified) { + inplaceRemove(copyPolicies, copyHandlers, pd); + inplaceAdd(copyPolicies, copyHandlers, pd, sds); + } - } + // logging + LOG.info("{} with {} Policy metadata updated with added={}, removed={}, modified={}", policyEvaluatorId, version, added, removed, modified); -// @Override -// public void onPolicyChange(String version, List added, List removed, List modified, Map sds) { -// Map copyPolicies = new HashMap<>(policyDefinitionMap); -// Map copyHandlers = new HashMap<>(policyStreamHandlerMap); -// for (PolicyDefinition pd : added) { -// inplaceAdd(copyPolicies, copyHandlers, pd, sds); -// } -// for (PolicyDefinition pd : removed) { -// inplaceRemove(copyPolicies, copyHandlers, pd); -// } -// for (PolicyDefinition pd : modified) { -// inplaceRemove(copyPolicies, copyHandlers, pd); -// inplaceAdd(copyPolicies, copyHandlers, pd, sds); -// } -// -// // logging -// LOG.info("{} with {} Policy metadata updated with added={}, removed={}, modified={}", policyEvaluatorId, version, added, removed, modified); -// -// // switch reference -// this.policyDefinitionMap = copyPolicies; -// this.policyStreamHandlerMap = copyHandlers; -// } + // switch reference + this.policyDefinitionMap = copyPolicies; + this.policyStreamHandlerMap = copyHandlers; + } private void inplaceAdd(Map policies, Map handlers, PolicyDefinition policy, Map sds) { if (handlers.containsKey(policy.getName())) { @@ -142,7 +138,7 @@ private void inplaceAdd(Map policies, Map collector, PolicyHandlerContext context) throws Exception; + void prepare(PolicyHandlerContext context) throws Exception; - void send(StreamEvent event) throws Exception; + void send(StreamEvent event, Collector collector) throws Exception; void close() throws Exception; } \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java index 9ddda847b5..148a57427b 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java @@ -1,14 +1,51 @@ package org.apache.eagle.flink; +import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.KeyedProcessFunction; import org.apache.flink.util.Collector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; + +public class SiddhiCEPOp extends KeyedProcessFunction { + private static final Logger LOG = LoggerFactory.getLogger(SiddhiCEPOp.class); + private transient SiddhiPolicyHandler handler; + + @Override + public void open(Configuration parameters) throws Exception{ + handler = new SiddhiPolicyHandler(createDefinition("sampleStream_1"), 0); + PolicyDefinition policyDefinition = MockSampleMetadataFactory.createSingleMetricSamplePolicy(); + PolicyHandlerContext context = new PolicyHandlerContext(); + context.setPolicyDefinition(policyDefinition); + context.setPolicyCounter(new MyStreamCounter()); + context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); + handler.prepare(context); + } -public class SiddhiCEPOp extends KeyedProcessFunction { @Override - public void processElement(StreamEvent value, Context ctx, Collector out) throws Exception { - if(value.data[0].equals(100)) { - AlertPublishEvent event = new AlertPublishEvent(); - out.collect(event); + public void processElement(StreamEvent value, Context ctx, Collector out) throws Exception { + handler.send(value, new org.apache.eagle.flink.Collector(){ + @Override + public void emit(AlertStreamEvent o) { + out.collect(o); + } + }); + } + + private Map createDefinition(String... streamIds) { + Map sds = new HashMap<>(); + for (String streamId : streamIds) { + // construct StreamDefinition + StreamDefinition sd = MockSampleMetadataFactory.createSampleStreamDefinition(streamId); + sds.put(streamId, sd); } + return sds; + } + + @Override + public void close() throws Exception { + handler.close(); } } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java index f74148de0a..53281a54be 100755 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyHandler.java @@ -45,11 +45,12 @@ protected String generateExecutionPlan(PolicyDefinition policyDefinition, Map collector, PolicyHandlerContext context) throws Exception { + public void prepare(PolicyHandlerContext context) throws Exception { LOG.info("Initializing handler for policy {}", context.getPolicyDefinition()); this.policy = context.getPolicyDefinition(); this.siddhiManager = new SiddhiManager(); String plan = generateExecutionPlan(policy, sds); + LOG.info("Siddhi execution plan: {}", plan); try { this.executionRuntime = siddhiManager.createExecutionPlanRuntime(plan); LOG.info("Created siddhi runtime {}", executionRuntime.getName()); @@ -65,7 +66,7 @@ public void prepare(final Collector collector, PolicyHandlerCo StreamDefinition streamDefinition = SiddhiDefinitionAdapter.convertFromSiddiDefinition(executionRuntime.getStreamDefinitionMap().get(outputStream)); this.executionRuntime.addCallback(outputStream, new AlertStreamCallback(outputStream, streamDefinition, - collector, context, currentIndex)); + context, currentIndex)); } else { throw new IllegalStateException("Undefined output stream " + outputStream); } @@ -79,12 +80,17 @@ protected List getOutputStreams(PolicyDefinition policy) { return policy.getOutputStreams().isEmpty() ? policy.getDefinition().getOutputStreams() : policy.getOutputStreams(); } - public void send(StreamEvent event) throws Exception { + public void send(StreamEvent event, Collector collector) throws Exception { context.getPolicyCounter().incr(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "receive_count")); String streamId = event.getStreamId(); InputHandler inputHandler = executionRuntime.getInputHandler(streamId); if (inputHandler != null) { context.getPolicyCounter().incr(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "eval_count")); + // wrap event with collector +// Object[] wrapper = new Object[event.getData().length + 1]; +// wrapper[0] = collector; +// System.arraycopy(event.getData(), 0, wrapper, 1, event.getData().length); + event.getData()[0] = collector; inputHandler.send(event.getTimestamp(), event.getData()); if (LOG.isDebugEnabled()) { diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java index 14aec4af4d..010f3a4c32 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamContextImpl.java @@ -16,16 +16,14 @@ */ package org.apache.eagle.flink; -import backtype.storm.metric.api.MultiCountMetric; -import backtype.storm.task.TopologyContext; import com.typesafe.config.Config; public class StreamContextImpl implements StreamContext { private final Config config; private final StreamCounter counter; - public StreamContextImpl(Config config, MultiCountMetric counter, TopologyContext context) { - this.counter = new StormMultiCountMetric(counter); + public StreamContextImpl(Config config, MyStreamCounter counter) { + this.counter = counter; this.config = config; } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java index 2461d858f8..2e3c314a16 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java @@ -61,7 +61,7 @@ public void setStreamId(String streamId) { } public long getKey(){ - return 1; + return timestamp; } public void setData(Object[] data) { diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java index c256ee6cd6..9778838d6a 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java @@ -21,6 +21,7 @@ import java.io.Serializable; import java.sql.Timestamp; import java.util.Arrays; +import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -74,32 +75,39 @@ public StreamEvent next() { return StreamEvent; } + private static StreamEvent createSampleEvent(long timestamp, String name, double value) { + return StreamEvent.builder() + .schema(MockSampleMetadataFactory.createSampleStreamDefinition("sampleStream_1")) + .streamId("sampleStream_1") + .timestamep(start + timestamp) + .attributes(new HashMap() {{ + put("name", name); + put("value", value); + }}).build(); + } + + private static long start = System.currentTimeMillis(); private static List data = Arrays.asList( - new StreamEvent("testStream_1", 0L, new Object[]{188.23}), - new StreamEvent("testStream_1", 10L, new Object[]{100}), - new StreamEvent("testStream_1", 10L, new Object[]{188.23}), - new StreamEvent("testStream_1", 20L, new Object[]{188.23}), - new StreamEvent("testStream_1", 20L, new Object[]{188.23}), - new StreamEvent("testStream_1", 20L, new Object[]{188.23}), - new StreamEvent("testStream_1", 30L, new Object[]{188.23}), - new StreamEvent("testStream_1", 40L, new Object[]{188.23}), - new StreamEvent("testStream_1", 50L, new Object[]{188.23}), - new StreamEvent("testStream_1", 70L, new Object[]{188.23}), - new StreamEvent("testStream_1", 90L, new Object[]{100}), - new StreamEvent("testStream_1", 100L, new Object[]{188.23}), - new StreamEvent("testStream_1", 200L, new Object[]{188.23}), - new StreamEvent("testStream_1", 210L, new Object[]{188.23}), - new StreamEvent("testStream_1", 220L, new Object[]{188.23}), - new StreamEvent("testStream_1", 230L, new Object[]{188.23}), - new StreamEvent("testStream_1", 250L, new Object[]{188.23}), - new StreamEvent("testStream_1", 260L, new Object[]{188.23}), - new StreamEvent("testStream_1", 270L, new Object[]{188.23}), - new StreamEvent("testStream_1", 300L, new Object[]{188.23}), - new StreamEvent("testStream_1", 400L, new Object[]{188.23}), - new StreamEvent("testStream_1", 600L, new Object[]{188.23}), - new StreamEvent("testStream_1", 1000L, new Object[]{188.23}), - new StreamEvent("testStream_1", 12000L, new Object[]{188.23}), - new StreamEvent("testStream_1", 12001L, new Object[]{188.23}), - new StreamEvent("testStream_1", 12002L, new Object[]{188.23}) + createSampleEvent(0, "cpu", 60.0), + createSampleEvent(1, "nic", 10.0), + createSampleEvent(1, "nic", 20.0), + createSampleEvent(10, "cpu", 60.0), + createSampleEvent(10, "cpu", 60.0), + createSampleEvent(10, "cpu", 60.0), + createSampleEvent(20, "cpu", 60.0), + createSampleEvent(30, "cpu", 60.0), + createSampleEvent(50, "cpu", 60.0), + createSampleEvent(100, "cpu", 60.0), + createSampleEvent(120, "cpu", 60.0), + createSampleEvent(120, "cpu", 60.0), + createSampleEvent(130, "cpu", 60.0), + createSampleEvent(160, "cpu", 60.0), + createSampleEvent(2000, "door", 100.0), + createSampleEvent(2000, "cpu", 60.0), + createSampleEvent(2200, "cpu", 60.0), + createSampleEvent(2500, "cpu", 60.0), + createSampleEvent(2500, "cpu", 60.0), + createSampleEvent(2500, "cpu", 60.0), + createSampleEvent(3000, "cpu", 60.0) ); } diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java new file mode 100755 index 0000000000..4e925984f6 --- /dev/null +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.eagle.flink.test; + +import org.apache.eagle.flink.*; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +public class SiddhiCEPPolicyEventHandlerTest { + private final static Logger LOG = LoggerFactory.getLogger(SiddhiCEPPolicyEventHandlerTest.class); + + private Map createDefinition(String... streamIds) { + Map sds = new HashMap<>(); + for (String streamId : streamIds) { + // construct StreamDefinition + StreamDefinition sd = MockSampleMetadataFactory.createSampleStreamDefinition(streamId); + sds.put(streamId, sd); + } + return sds; + } + + @SuppressWarnings("serial") + @Test + public void testBySendSimpleEvent() throws Exception { + SiddhiPolicyHandler handler; + MockStreamCollector collector; + + handler = new SiddhiPolicyHandler(createDefinition("sampleStream_1"), 0); + collector = new MockStreamCollector(); + PolicyDefinition policyDefinition = MockSampleMetadataFactory.createSingleMetricSamplePolicy(); + PolicyHandlerContext context = new PolicyHandlerContext(); + context.setPolicyDefinition(policyDefinition); + context.setPolicyCounter(new MyStreamCounter()); + context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); + handler.prepare(context); + StreamEvent event = StreamEvent.builder() + .schema(MockSampleMetadataFactory.createSampleStreamDefinition("sampleStream_1")) + .streamId("sampleStream_1") + .timestamep(System.currentTimeMillis()) + .attributes(new HashMap() {{ + put("name", "cpu"); + put("value", 60.0); + put("bad", "bad column value"); + }}).build(); + handler.send(event, collector); + handler.close(); + } + + @SuppressWarnings("serial") + @Test + public void testWithTwoStreamJoinPolicy() throws Exception { + Map ssd = createDefinition("sampleStream_1", "sampleStream_2"); + + PolicyDefinition policyDefinition = new PolicyDefinition(); + policyDefinition.setName("SampleJoinPolicyForTest"); + policyDefinition.setInputStreams(Arrays.asList("sampleStream_1", "sampleStream_2")); + policyDefinition.setOutputStreams(Collections.singletonList("joinedStream")); + policyDefinition.setDefinition(new PolicyDefinition.Definition(PolicyStreamHandlers.SIDDHI_ENGINE, + "from sampleStream_1#window.length(10) as left " + + "join sampleStream_2#window.length(10) as right " + + "on left.name == right.name and left.value == right.value " + + "select left.timestamp,left.name,left.value " + + "insert into joinedStream")); + policyDefinition.setPartitionSpec(Collections.singletonList(MockSampleMetadataFactory.createSampleStreamGroupbyPartition("sampleStream_1", Collections.singletonList("name")))); + SiddhiPolicyHandler handler; + Semaphore mutex = new Semaphore(0); + List alerts = new ArrayList<>(0); + Collector collector = (event) -> { + LOG.info("Collected {}", event); + Assert.assertTrue(event != null); + alerts.add(event); + mutex.release(); + }; + + handler = new SiddhiPolicyHandler(ssd, 0); + PolicyHandlerContext context = new PolicyHandlerContext(); + context.setPolicyDefinition(policyDefinition); + context.setPolicyCounter(new MyStreamCounter()); + context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); + handler.prepare(context); + + + long ts_1 = System.currentTimeMillis(); + long ts_2 = System.currentTimeMillis() + 1; + + handler.send(StreamEvent.builder() + .schema(ssd.get("sampleStream_1")) + .streamId("sampleStream_1") + .timestamep(ts_1) + .attributes(new HashMap() {{ + put("name", "cpu"); + put("value", 60.0); + put("bad", "bad column value"); + }}).build(), collector); + + handler.send(StreamEvent.builder() + .schema(ssd.get("sampleStream_2")) + .streamId("sampleStream_2") + .timestamep(ts_2) + .attributes(new HashMap() {{ + put("name", "cpu"); + put("value", 61.0); + }}).build(), collector); + + handler.send(StreamEvent.builder() + .schema(ssd.get("sampleStream_2")) + .streamId("sampleStream_2") + .timestamep(ts_2) + .attributes(new HashMap() {{ + put("name", "disk"); + put("value", 60.0); + }}).build(), collector); + + handler.send(StreamEvent.builder() + .schema(ssd.get("sampleStream_2")) + .streamId("sampleStream_2") + .timestamep(ts_2) + .attributes(new HashMap() {{ + put("name", "cpu"); + put("value", 60.0); + }}).build(), collector); + + handler.close(); + + Assert.assertTrue("Should get result in 5 s", mutex.tryAcquire(5, TimeUnit.SECONDS)); + Assert.assertEquals(1, alerts.size()); + Assert.assertEquals("joinedStream", alerts.get(0).getStreamId()); + Assert.assertEquals("cpu", alerts.get(0).getData()[1]); + } +} \ No newline at end of file From aeefcfda93bb6c5dcd808f52351952ad9f02433f Mon Sep 17 00:00:00 2001 From: yonzhang Date: Sun, 16 Aug 2020 21:40:09 -0700 Subject: [PATCH 5/6] create siddhi policy evaluator as Flink processor --- eagle-flink/README.md | 22 +++++ eagle-flink/pom.xml | 6 ++ .../eagle/flink/AlertStreamCallback.java | 49 ++-------- .../apache/eagle/flink/MyStreamCounter.java | 2 + .../flink/SiddhiPolicyFlinkProcessor.java | 95 +++++++++++++++++++ .../org/apache/eagle/flink/StreamEvent.java | 12 +++ .../flink/test}/EagleFlinkStreamApp.java | 9 +- .../test}/MockSampleMetadataFactory.java | 4 +- .../flink/test}/MockStreamCollector.java | 6 +- .../test}/MockStreamMetadataService.java | 2 +- .../eagle/flink/test/SampleSiddhiCEPOp.java} | 10 +- .../test/SiddhiCEPPolicyEventHandlerTest.java | 14 ++- .../flink/test}/StreamEventIterator.java | 4 +- .../eagle/flink/test}/StreamEventSource.java | 3 +- 14 files changed, 185 insertions(+), 53 deletions(-) create mode 100644 eagle-flink/README.md create mode 100644 eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java rename eagle-flink/src/{main/java/org/apache/eagle/flink => test/java/org/apache/eagle/flink/test}/EagleFlinkStreamApp.java (75%) rename eagle-flink/src/{main/java/org/apache/eagle/flink => test/java/org/apache/eagle/flink/test}/MockSampleMetadataFactory.java (99%) rename eagle-flink/src/{main/java/org/apache/eagle/flink => test/java/org/apache/eagle/flink/test}/MockStreamCollector.java (89%) rename eagle-flink/src/{main/java/org/apache/eagle/flink => test/java/org/apache/eagle/flink/test}/MockStreamMetadataService.java (97%) rename eagle-flink/src/{main/java/org/apache/eagle/flink/SiddhiCEPOp.java => test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java} (82%) rename eagle-flink/src/{main/java/org/apache/eagle/flink => test/java/org/apache/eagle/flink/test}/StreamEventIterator.java (97%) rename eagle-flink/src/{main/java/org/apache/eagle/flink => test/java/org/apache/eagle/flink/test}/StreamEventSource.java (93%) diff --git a/eagle-flink/README.md b/eagle-flink/README.md new file mode 100644 index 0000000000..4e1595f1d9 --- /dev/null +++ b/eagle-flink/README.md @@ -0,0 +1,22 @@ +## Design goals + +### 1. execute rules on one or multiple streams + +### 2. dynamically inject new rules on existing streams + +### 3. reuse streams as much as possible + +## Primivite operations + +### 1. rules on single stream keyed by some fields + avg(cpu) > 0.8 [1m] group by host + + sum(failed_requests) > 60 [1m] group by host + + avg(failure_ratio) > 0.1 [1m] group by host + +### 2. rules on multiple streams joined by some fields + + + + diff --git a/eagle-flink/pom.xml b/eagle-flink/pom.xml index 04928c3a6a..6f6db82b8c 100644 --- a/eagle-flink/pom.xml +++ b/eagle-flink/pom.xml @@ -15,6 +15,12 @@ 2.11 + + org.projectlombok + lombok + 1.16.20 + provided + org.apache.flink flink-streaming-java_${scala.binary.version} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java index 903ee9fa19..f76415be16 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java @@ -16,32 +16,18 @@ */ package org.apache.eagle.flink; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.wso2.siddhi.core.event.Event; import org.wso2.siddhi.core.stream.output.StreamCallback; -/** - * Created on 8/2/16. - */ +@RequiredArgsConstructor +@Slf4j public class AlertStreamCallback extends StreamCallback { - - private static final Logger LOG = LoggerFactory.getLogger(AlertStreamCallback.class); private final String outputStream; - private final PolicyHandlerContext context; private final StreamDefinition definition; - - private int currentIndex; - - public AlertStreamCallback(String outputStream, - StreamDefinition streamDefinition, - PolicyHandlerContext context, - int currentIndex) { - this.outputStream = outputStream; - this.context = context; - this.definition = streamDefinition; - this.currentIndex = currentIndex; - } + private final PolicyHandlerContext context; + private final int currentIndex; /** * Possibly more than one event will be triggered for alerting. @@ -50,10 +36,10 @@ public AlertStreamCallback(String outputStream, public void receive(Event[] events) { String policyName = context.getPolicyDefinition().getName(); String siteId = context.getPolicyDefinition().getSiteId(); - CompositePolicyHandler handler = ((PolicyGroupEvaluatorImpl) context.getPolicyEvaluator()).getPolicyHandler(policyName); - LOG.info("Generated {} alerts from policy '{}' in {}, index of definiton {} ", events.length, policyName, context.getPolicyEvaluatorId(), currentIndex); + log.info("Generated {} alerts from policy '{}' in {}, index of definiton {} ", + events.length, policyName, context.getPolicyEvaluatorId(), currentIndex); for (Event e : events) { - org.apache.eagle.flink.Collector eagleCollector = (org.apache.eagle.flink.Collector)e.getData()[0]; + org.apache.eagle.flink.Collector eagleCollector = (org.apache.eagle.flink.Collector)e.getData()[0]; AlertStreamEvent event = new AlertStreamEvent(); event.setSiteId(siteId); event.setTimestamp(e.getTimestamp()); @@ -66,23 +52,8 @@ public void receive(Event[] events) { event.setCreatedTime(System.currentTimeMillis()); event.setSchema(definition); - if (LOG.isDebugEnabled()) { - LOG.debug("Generate new alert event: {}", event); - } + log.debug("Generate new alert event: {}", event); eagleCollector.emit(event); -// try { -// if (handler == null) { -// // extreme case: the handler is removed from the evaluator. Just emit. -// if (LOG.isDebugEnabled()) { -// LOG.debug(" handler not found when callback received event, directly emit. policy removed? "); -// } -// eagleCollector.emit(event); -// } else { -// handler.send(event, currentIndex + 1); -// } -// } catch (Exception ex) { -// LOG.error(String.format("send event %s to index %d failed with exception. ", event, currentIndex), ex); -// } } context.getPolicyCounter().incrBy(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "alert_count"), events.length); } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java b/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java index 4da0f0b43c..a92345e036 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/MyStreamCounter.java @@ -1,5 +1,7 @@ package org.apache.eagle.flink; +import org.apache.eagle.flink.StreamCounter; + public class MyStreamCounter implements StreamCounter { @Override public void incr(String scopeName) { diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java new file mode 100644 index 0000000000..f3ee1928a0 --- /dev/null +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java @@ -0,0 +1,95 @@ +package org.apache.eagle.flink; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.functions.KeyedProcessFunction; +import org.apache.flink.util.Collector; +import org.wso2.siddhi.core.ExecutionPlanRuntime; +import org.wso2.siddhi.core.SiddhiManager; +import org.wso2.siddhi.core.stream.input.InputHandler; + +import java.util.List; +import java.util.Map; + +/** + * A Flink processor to evaluate Siddhi policy + * + * The steps to evaluate Siddhi policy should follow Flink's lifecycle methods, + * Note: this processor must be serialiable as Flink runtime may persist it for failover + * 1. setup phase. Initialize Siddhi runtime in open() method + * 2. event process phase. + * In Siddhi stream callback, invoke Flink collector + * 3. cleanup phase + */ +@RequiredArgsConstructor +@Slf4j +public class SiddhiPolicyFlinkProcessor extends KeyedProcessFunction { + private final Map streamDefs; + private final PolicyDefinition policyDefinition; + + private volatile SiddhiManager siddhiManager; + private ExecutionPlanRuntime executionRuntime; + /** + * setup phase + */ + @Override + public void open(Configuration parameters) throws Exception{ + this.siddhiManager = new SiddhiManager(); + String plan = SiddhiDefinitionAdapter.buildSiddhiExecutionPlan(policyDefinition, streamDefs); + log.info("Siddhi execution plan: {}", plan); + try { + this.executionRuntime = siddhiManager.createExecutionPlanRuntime(plan); + log.info("Created siddhi runtime {}", executionRuntime.getName()); + } catch (Exception parserException) { + log.error("Failed to create siddhi runtime for policy: {}, siddhi plan: \n\n{}\n", + this.policyDefinition.getName(), plan, parserException); + throw parserException; + } + + // fixme what to set up for PolicyHandlerContext + PolicyHandlerContext context = new PolicyHandlerContext(); + + // add output stream callback + List outputStreams = this.policyDefinition.getOutputStreams(); + for (final String outputStream : outputStreams) { + if (executionRuntime.getStreamDefinitionMap().containsKey(outputStream)) { + StreamDefinition streamDefinition = SiddhiDefinitionAdapter.convertFromSiddiDefinition(executionRuntime.getStreamDefinitionMap().get(outputStream)); + this.executionRuntime.addCallback(outputStream, + new AlertStreamCallback(outputStream, streamDefinition, + context, 0)); + } else { + throw new IllegalStateException("Undefined output stream " + outputStream); + } + } + this.executionRuntime.start(); + } + + /** + * event process phase + * input StreamEvent and output AlertStreamEvent + * Note: in order for Siddhi runtime's callback can use @param out to collect output, we should add @param out + * into @param value. In Siddhi callback, it can get original event so the collector. + * In fact, the Object[] data's first element is reserved for this purpose. + */ + @Override + public void processElement(StreamEvent value, Context ctx, Collector out) throws Exception { + String streamId = value.getStreamId(); + InputHandler inputHandler = executionRuntime.getInputHandler(streamId); + if (inputHandler != null) { + value.attachFlinkCollector(out); + inputHandler.send(value.getTimestamp(), value.getData()); + log.debug("sent event to siddhi stream {} ", streamId); + } else { + log.warn("No input handler found for stream {}", streamId); + } + } + + /** + * cleanup phse + */ + @Override + public void close() throws Exception { + this.executionRuntime.shutdown(); + } +} diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java index 2e3c314a16..3fba396d78 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java @@ -18,6 +18,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.flink.util.Collector; import java.io.Serializable; import java.util.ArrayList; @@ -52,6 +53,17 @@ public StreamEvent(String streamId, long timestamp, Object[] data, String metaVe this.setMetaVersion(metaVersion); } + /** + * Attach flink collector to the first element of Object[] data + */ + public void attachFlinkCollector(Collector out){ + data[0] = out; + } + + public Collector getFlinkCollector(){ + return (Collector)(data[0]); + } + public String getStreamId() { return streamId; } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamApp.java similarity index 75% rename from eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamApp.java index 590d96bfc3..f3026fa350 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/EagleFlinkStreamApp.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamApp.java @@ -1,5 +1,6 @@ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; +import org.apache.eagle.flink.*; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -7,13 +8,13 @@ public class EagleFlinkStreamApp { public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - DataStream events = env + DataStream source = env .addSource(new StreamEventSource()) .name("eagle-events"); - DataStream alerts = events + DataStream alerts = source .keyBy(StreamEvent::getKey) - .process(new SiddhiCEPOp()) + .process(new SampleSiddhiCEPOp()) .name("eagle-alert-engine"); alerts.addSink(new AlertSink()) diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java similarity index 99% rename from eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java index 0d17f7677f..895f92d4cd 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/MockSampleMetadataFactory.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java @@ -14,9 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; +import org.apache.eagle.flink.*; + import java.util.*; import java.util.concurrent.ThreadLocalRandom; diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockStreamCollector.java similarity index 89% rename from eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/MockStreamCollector.java index 313ccdaef7..78485e9844 100755 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamCollector.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockStreamCollector.java @@ -14,8 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; +import org.apache.eagle.flink.AlertStreamEvent; +import org.apache.eagle.flink.Collector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,7 +35,7 @@ public MockStreamCollector() { public void emit(AlertStreamEvent event) { cache.add(event); - LOG.info("AlertStreamEvent received: {}",event); + LOG.info("AlertStreamEvent received: {}",event); } public void clear() { diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockStreamMetadataService.java similarity index 97% rename from eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/MockStreamMetadataService.java index becbef6a37..49d38ce4d7 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/MockStreamMetadataService.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockStreamMetadataService.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; import org.apache.eagle.flink.StreamDefinition; import org.apache.eagle.flink.StreamNotDefinedException; diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java similarity index 82% rename from eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java index 148a57427b..52e6aa39f0 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiCEPOp.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java @@ -1,5 +1,6 @@ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; +import org.apache.eagle.flink.*; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.KeyedProcessFunction; import org.apache.flink.util.Collector; @@ -9,8 +10,8 @@ import java.util.HashMap; import java.util.Map; -public class SiddhiCEPOp extends KeyedProcessFunction { - private static final Logger LOG = LoggerFactory.getLogger(SiddhiCEPOp.class); +public class SampleSiddhiCEPOp extends KeyedProcessFunction { + private static final Logger LOG = LoggerFactory.getLogger(SampleSiddhiCEPOp.class); private transient SiddhiPolicyHandler handler; @Override @@ -24,6 +25,9 @@ public void open(Configuration parameters) throws Exception{ handler.prepare(context); } + /** + * Collector is not defined in prepare stage, that is why handler.send is used for collect output here + */ @Override public void processElement(StreamEvent value, Context ctx, Collector out) throws Exception { handler.send(value, new org.apache.eagle.flink.Collector(){ diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java index 4e925984f6..868f49f763 100755 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java @@ -39,7 +39,17 @@ private Map createDefinition(String... streamIds) { return sds; } - @SuppressWarnings("serial") + /** + * use Siddhi API to go through + * 1) stream definition + * 2) policy definition + * 3) output collector + */ + @Test + public void testSimpleSiddhiPolicyEvaluation(){ + + } + @Test public void testBySendSimpleEvent() throws Exception { SiddhiPolicyHandler handler; @@ -64,6 +74,8 @@ public void testBySendSimpleEvent() throws Exception { }}).build(); handler.send(event, collector); handler.close(); + + Assert.assertTrue(collector.size() == 1); } @SuppressWarnings("serial") diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java similarity index 97% rename from eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java index 9778838d6a..1034fb7ce5 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventIterator.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java @@ -16,7 +16,9 @@ * limitations under the License. */ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; + +import org.apache.eagle.flink.StreamEvent; import java.io.Serializable; import java.sql.Timestamp; diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java similarity index 93% rename from eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java index 7f099369b6..20febb1f28 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEventSource.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java @@ -1,5 +1,6 @@ -package org.apache.eagle.flink; +package org.apache.eagle.flink.test; +import org.apache.eagle.flink.StreamEvent; import org.apache.flink.annotation.Public; import org.apache.flink.streaming.api.functions.source.FromIteratorFunction; From 674459d1e81b6a3a4bc7967b02284b7122cd29b9 Mon Sep 17 00:00:00 2001 From: yonzhang Date: Sat, 22 Aug 2020 10:39:03 -0700 Subject: [PATCH 6/6] single event handling in flink siddhi processor --- .../eagle/flink/AlertStreamCallback.java | 50 ++-- .../apache/eagle/flink/AlertStreamEvent.java | 3 + .../eagle/flink/SiddhiDefinitionAdapter.java | 172 +++++++---- .../flink/SiddhiPolicyFlinkProcessor.java | 34 +-- .../org/apache/eagle/flink/StreamColumn.java | 221 +------------- .../apache/eagle/flink/StreamDefinition.java | 161 +--------- .../org/apache/eagle/flink/StreamEvent.java | 7 - ...p.java => EagleFlinkStreamExampleApp.java} | 13 +- .../flink/test/MockSampleMetadataFactory.java | 58 ++-- .../eagle/flink/test/SampleSiddhiCEPOp.java | 55 ---- .../test/SiddhiCEPPolicyEventHandlerTest.java | 279 ++++++++++-------- .../eagle/flink/test/StreamEventIterator.java | 2 +- .../eagle/flink/test/StreamEventSource.java | 6 +- 13 files changed, 397 insertions(+), 664 deletions(-) rename eagle-flink/src/test/java/org/apache/eagle/flink/test/{EagleFlinkStreamApp.java => EagleFlinkStreamExampleApp.java} (61%) delete mode 100644 eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java index f76415be16..7aaf947f3b 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamCallback.java @@ -16,45 +16,61 @@ */ package org.apache.eagle.flink; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.wso2.siddhi.core.event.Event; import org.wso2.siddhi.core.stream.output.StreamCallback; -@RequiredArgsConstructor -@Slf4j +/** + * Get called back by Siddhi runtime when policy is evaluated to be true, then + * it will forward the alert to next processor in Flink + */ public class AlertStreamCallback extends StreamCallback { + + private static final Logger LOG = LoggerFactory.getLogger(AlertStreamCallback.class); private final String outputStream; - private final StreamDefinition definition; private final PolicyHandlerContext context; - private final int currentIndex; + private final StreamDefinition definition; + + private int currentIndex; + + public AlertStreamCallback(String outputStream, + StreamDefinition streamDefinition, + PolicyHandlerContext context, + int currentIndex) { + this.outputStream = outputStream; + this.context = context; + this.definition = streamDefinition; + this.currentIndex = currentIndex; + } /** * Possibly more than one event will be triggered for alerting. */ @Override public void receive(Event[] events) { - String policyName = context.getPolicyDefinition().getName(); - String siteId = context.getPolicyDefinition().getSiteId(); - log.info("Generated {} alerts from policy '{}' in {}, index of definiton {} ", - events.length, policyName, context.getPolicyEvaluatorId(), currentIndex); + LOG.info("Generated {} alerts in {}, index of definiton {} ", events.length, context.getPolicyEvaluatorId(), currentIndex); for (Event e : events) { - org.apache.eagle.flink.Collector eagleCollector = (org.apache.eagle.flink.Collector)e.getData()[0]; + org.apache.flink.util.Collector eagleCollector = (org.apache.flink.util.Collector) e.getData()[0]; AlertStreamEvent event = new AlertStreamEvent(); - event.setSiteId(siteId); + event.setSiteId(""); event.setTimestamp(e.getTimestamp()); - event.setData(e.getData()); + // remove collector from event + Object[] payload = new Object[e.getData().length - 1]; + System.arraycopy(e.getData(), 1, payload, 0, e.getData().length - 1); + event.setData(payload); event.setStreamId(outputStream); - event.setPolicyId(context.getPolicyDefinition().getName()); + event.setPolicyId(""); if (this.context.getPolicyEvaluator() != null) { event.setCreatedBy(context.getPolicyEvaluator().getName()); } event.setCreatedTime(System.currentTimeMillis()); event.setSchema(definition); - log.debug("Generate new alert event: {}", event); - eagleCollector.emit(event); + if (LOG.isDebugEnabled()) { + LOG.debug("Generate new alert event: {}", event); + } + eagleCollector.collect(event); } - context.getPolicyCounter().incrBy(String.format("%s.%s", this.context.getPolicyDefinition().getName(), "alert_count"), events.length); } } diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java index a117dfdbe0..ca9c0f8051 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/AlertStreamEvent.java @@ -70,6 +70,9 @@ public String getPolicyId() { @Override public String toString() { + if(this.getData() == null){ + return ""; + } List dataStrings = new ArrayList<>(this.getData().length); for (Object obj : this.getData()) { if (obj != null) { diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java index b26eefe7b9..c404c0e254 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiDefinitionAdapter.java @@ -17,6 +17,7 @@ package org.apache.eagle.flink; import com.google.common.base.Preconditions; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,9 +29,27 @@ import java.util.List; import java.util.Map; +@Slf4j public class SiddhiDefinitionAdapter { - private static final Logger LOG = LoggerFactory.getLogger(SiddhiDefinitionAdapter.class); - public static final String DEFINE_STREAM_TEMPLATE = "define stream %s ( %s );"; + private static final String DEFINE_STREAM_TEMPLATE = "define stream %s ( %s );"; + private static final String OUTPUT_STREAM_TEMPLATE = "select %s insert into %s;"; + + public static String buildSiddhiOutpuStreamClause(String outStreamId, List columns){ + List siddhiColumns = new ArrayList<>(); + for (StreamColumn column : columns) { + siddhiColumns.add(column.getName()); + } + return String.format(OUTPUT_STREAM_TEMPLATE, StringUtils.join(siddhiColumns, ","), outStreamId); + } + + public static String buildSiddhiDefineStreamClause(String inStreamId, List columns){ + List siddhiColumns = new ArrayList<>(); + for (StreamColumn column : columns) { + siddhiColumns.add(String.format("%s %s", column.getName(), + convertToSiddhiAttributeType(column.getType()).toString().toLowerCase())); + } + return String.format(DEFINE_STREAM_TEMPLATE, inStreamId, StringUtils.join(siddhiColumns, ",")); + } public static String buildStreamDefinition(StreamDefinition streamDefinition) { List columns = new ArrayList<>(); @@ -40,12 +59,12 @@ public static String buildStreamDefinition(StreamDefinition streamDefinition) { columns.add(String.format("%s %s", column.getName(), convertToSiddhiAttributeType(column.getType()).toString().toLowerCase())); } } else { - LOG.warn("No columns found for stream {}" + streamDefinition.getStreamId()); + log.warn("No columns found for stream {}" + streamDefinition.getStreamId()); } return String.format(DEFINE_STREAM_TEMPLATE, streamDefinition.getStreamId(), StringUtils.join(columns, ",")); } - public static Attribute.Type convertToSiddhiAttributeType(StreamColumn.Type type) { + public static Attribute.Type convertToSiddhiAttributeType(StreamColumn.ColumnType type) { if (_EAGLE_SIDDHI_TYPE_MAPPING.containsKey(type)) { return _EAGLE_SIDDHI_TYPE_MAPPING.get(type); } @@ -53,7 +72,7 @@ public static Attribute.Type convertToSiddhiAttributeType(StreamColumn.Type type throw new IllegalArgumentException("Unknown stream type: " + type); } - public static Class convertToJavaAttributeType(StreamColumn.Type type) { + public static Class convertToJavaAttributeType(StreamColumn.ColumnType type) { if (_EAGLE_JAVA_TYPE_MAPPING.containsKey(type)) { return _EAGLE_JAVA_TYPE_MAPPING.get(type); } @@ -61,7 +80,7 @@ public static Class convertToJavaAttributeType(StreamColumn.Type type) { throw new IllegalArgumentException("Unknown stream type: " + type); } - public static StreamColumn.Type convertFromJavaAttributeType(Class type) { + public static StreamColumn.ColumnType convertFromJavaAttributeType(Class type) { if (_JAVA_EAGLE_TYPE_MAPPING.containsKey(type)) { return _JAVA_EAGLE_TYPE_MAPPING.get(type); } @@ -69,7 +88,7 @@ public static StreamColumn.Type convertFromJavaAttributeType(Class type) { throw new IllegalArgumentException("Unknown stream type: " + type); } - public static StreamColumn.Type convertFromSiddhiAttributeType(Attribute.Type type) { + public static StreamColumn.ColumnType convertFromSiddhiAttributeType(Attribute.Type type) { if (_SIDDHI_EAGLE_TYPE_MAPPING.containsKey(type)) { return _SIDDHI_EAGLE_TYPE_MAPPING.get(type); } @@ -77,6 +96,39 @@ public static StreamColumn.Type convertFromSiddhiAttributeType(Attribute.Type ty throw new IllegalArgumentException("Unknown siddhi type: " + type); } + /** + * Build Siddhi execution plan off a single input stream and output stream + * A Siddhi execution plan consists of three parts: input stream definitions, output stream definitions and policy + * So the evaluation flow is: + * input stream -> policy evaluation -> output stream + */ + public static String buildSiddhiExecutionPlan(StreamDefinition inStreamDef, + String policy, StreamDefinition outStreamDef) { + StringBuilder builder = new StringBuilder(); + List modifiedIn = new ArrayList<>(inStreamDef.getColumns()); + StreamColumn iCollectorCol = StreamColumn.builder().name("__collector__"). + type(StreamColumn.ColumnType.OBJECT).build(); + modifiedIn.add(0, iCollectorCol); + builder.append(SiddhiDefinitionAdapter.buildSiddhiDefineStreamClause(inStreamDef.getStreamId(), modifiedIn)); + builder.append("\n"); + + // concatenate policy and output stream definition + // ex: "from sampleStream_1[name == \"cpu\" and value > 50.0] select __collector__, name, host, flag, value insert into outputStream;" + builder.append("from "); + builder.append(inStreamDef.getStreamId()); + builder.append(" ["); + builder.append(policy); + builder.append("] "); + + List modifiedOut = new ArrayList<>(inStreamDef.getColumns()); + StreamColumn oCollectorCol = StreamColumn.builder().name("__collector__"). + type(StreamColumn.ColumnType.OBJECT).build(); + modifiedOut.add(0, oCollectorCol); + builder.append(SiddhiDefinitionAdapter.buildSiddhiOutpuStreamClause(outStreamDef.getStreamId(), modifiedOut)); + log.debug("Generated siddhi execution plan: {}", builder.toString()); + return builder.toString(); + } + public static String buildSiddhiExecutionPlan(PolicyDefinition policyDefinition, Map sds) { StringBuilder builder = new StringBuilder(); PolicyDefinition.Definition coreDefinition = policyDefinition.getDefinition(); @@ -87,13 +139,17 @@ public static String buildSiddhiExecutionPlan(PolicyDefinition policyDefinition, } for (String inputStream : inputStreams) { - builder.append(SiddhiDefinitionAdapter.buildStreamDefinition(sds.get(inputStream))); + StreamDefinition sd = sds.get(inputStream); + List columns = sd.getColumns(); + columns = new ArrayList<>(columns); + StreamColumn collectorCol = StreamColumn.builder().name("__collector__"). + type(StreamColumn.ColumnType.OBJECT).build(); + columns.add(0, collectorCol); + builder.append(SiddhiDefinitionAdapter.buildSiddhiDefineStreamClause(sd.getStreamId(), columns)); builder.append("\n"); } builder.append(coreDefinition.value); - if (LOG.isDebugEnabled()) { - LOG.debug("Generated siddhi execution plan: {} from definition: {}", builder.toString(), coreDefinition); - } + log.debug("Generated siddhi execution plan: {} from definition: {}", builder.toString(), coreDefinition); return builder.toString(); } @@ -104,9 +160,7 @@ public static String buildSiddhiExecutionPlan(String policyDefinition, Map _EAGLE_SIDDHI_TYPE_MAPPING = new HashMap<>(); - private static final Map> _EAGLE_JAVA_TYPE_MAPPING = new HashMap<>(); - private static final Map, StreamColumn.Type> _JAVA_EAGLE_TYPE_MAPPING = new HashMap<>(); - private static final Map _SIDDHI_EAGLE_TYPE_MAPPING = new HashMap<>(); + private static final Map _EAGLE_SIDDHI_TYPE_MAPPING = new HashMap<>(); + private static final Map> _EAGLE_JAVA_TYPE_MAPPING = new HashMap<>(); + private static final Map, StreamColumn.ColumnType> _JAVA_EAGLE_TYPE_MAPPING = new HashMap<>(); + private static final Map _SIDDHI_EAGLE_TYPE_MAPPING = new HashMap<>(); static { - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.STRING, Attribute.Type.STRING); - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.INT, Attribute.Type.INT); - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.LONG, Attribute.Type.LONG); - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.FLOAT, Attribute.Type.FLOAT); - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.DOUBLE, Attribute.Type.DOUBLE); - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.BOOL, Attribute.Type.BOOL); - _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.Type.OBJECT, Attribute.Type.OBJECT); - - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.STRING, String.class); - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.INT, Integer.class); - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.LONG, Long.class); - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.FLOAT, Float.class); - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.DOUBLE, Double.class); - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.BOOL, Boolean.class); - _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.Type.OBJECT, Object.class); - - _JAVA_EAGLE_TYPE_MAPPING.put(String.class, StreamColumn.Type.STRING); - _JAVA_EAGLE_TYPE_MAPPING.put(Integer.class, StreamColumn.Type.INT); - _JAVA_EAGLE_TYPE_MAPPING.put(Long.class, StreamColumn.Type.LONG); - _JAVA_EAGLE_TYPE_MAPPING.put(Float.class, StreamColumn.Type.FLOAT); - _JAVA_EAGLE_TYPE_MAPPING.put(Double.class, StreamColumn.Type.DOUBLE); - _JAVA_EAGLE_TYPE_MAPPING.put(Boolean.class, StreamColumn.Type.BOOL); - _JAVA_EAGLE_TYPE_MAPPING.put(Object.class, StreamColumn.Type.OBJECT); - - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.STRING, StreamColumn.Type.STRING); - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.INT, StreamColumn.Type.INT); - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.LONG, StreamColumn.Type.LONG); - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.FLOAT, StreamColumn.Type.FLOAT); - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.DOUBLE, StreamColumn.Type.DOUBLE); - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.BOOL, StreamColumn.Type.BOOL); - _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.OBJECT, StreamColumn.Type.OBJECT); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.STRING, Attribute.Type.STRING); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.INT, Attribute.Type.INT); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.LONG, Attribute.Type.LONG); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.FLOAT, Attribute.Type.FLOAT); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.DOUBLE, Attribute.Type.DOUBLE); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.BOOL, Attribute.Type.BOOL); + _EAGLE_SIDDHI_TYPE_MAPPING.put(StreamColumn.ColumnType.OBJECT, Attribute.Type.OBJECT); + + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.STRING, String.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.INT, Integer.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.LONG, Long.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.FLOAT, Float.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.DOUBLE, Double.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.BOOL, Boolean.class); + _EAGLE_JAVA_TYPE_MAPPING.put(StreamColumn.ColumnType.OBJECT, Object.class); + + _JAVA_EAGLE_TYPE_MAPPING.put(String.class, StreamColumn.ColumnType.STRING); + _JAVA_EAGLE_TYPE_MAPPING.put(Integer.class, StreamColumn.ColumnType.INT); + _JAVA_EAGLE_TYPE_MAPPING.put(Long.class, StreamColumn.ColumnType.LONG); + _JAVA_EAGLE_TYPE_MAPPING.put(Float.class, StreamColumn.ColumnType.FLOAT); + _JAVA_EAGLE_TYPE_MAPPING.put(Double.class, StreamColumn.ColumnType.DOUBLE); + _JAVA_EAGLE_TYPE_MAPPING.put(Boolean.class, StreamColumn.ColumnType.BOOL); + _JAVA_EAGLE_TYPE_MAPPING.put(Object.class, StreamColumn.ColumnType.OBJECT); + + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.STRING, StreamColumn.ColumnType.STRING); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.INT, StreamColumn.ColumnType.INT); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.LONG, StreamColumn.ColumnType.LONG); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.FLOAT, StreamColumn.ColumnType.FLOAT); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.DOUBLE, StreamColumn.ColumnType.DOUBLE); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.BOOL, StreamColumn.ColumnType.BOOL); + _SIDDHI_EAGLE_TYPE_MAPPING.put(Attribute.Type.OBJECT, StreamColumn.ColumnType.OBJECT); } public static StreamDefinition convertFromSiddiDefinition(AbstractDefinition siddhiDefinition) { - StreamDefinition streamDefinition = new StreamDefinition(); - streamDefinition.setStreamId(siddhiDefinition.getId()); + StreamDefinition.StreamDefinitionBuilder builder = StreamDefinition.builder(); + builder.streamId(siddhiDefinition.getId()); List columns = new ArrayList<>(siddhiDefinition.getAttributeNameArray().length); for (Attribute attribute : siddhiDefinition.getAttributeList()) { - StreamColumn column = new StreamColumn(); - column.setType(convertFromSiddhiAttributeType(attribute.getType())); - column.setName(attribute.getName()); - columns.add(column); + StreamColumn.StreamColumnBuilder colBuilder = StreamColumn.builder(); + colBuilder.type(convertFromSiddhiAttributeType(attribute.getType())); + colBuilder.name(attribute.getName()); + columns.add(colBuilder.build()); } - streamDefinition.setColumns(columns); - streamDefinition.setTimeseries(true); - streamDefinition.setDescription("Auto-generated stream schema from siddhi for " + siddhiDefinition.getId()); - return streamDefinition; + builder.columns(columns); + builder.timeseries(true); + builder.description("Auto-generated stream schema from siddhi for " + siddhiDefinition.getId()); + return builder.build(); } } \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java index f3ee1928a0..d04836082c 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/SiddhiPolicyFlinkProcessor.java @@ -25,43 +25,38 @@ @RequiredArgsConstructor @Slf4j public class SiddhiPolicyFlinkProcessor extends KeyedProcessFunction { - private final Map streamDefs; - private final PolicyDefinition policyDefinition; + private final StreamDefinition inStreamDef; + private final String policy; + private final StreamDefinition outStreamDef; private volatile SiddhiManager siddhiManager; private ExecutionPlanRuntime executionRuntime; + /** * setup phase */ @Override public void open(Configuration parameters) throws Exception{ this.siddhiManager = new SiddhiManager(); - String plan = SiddhiDefinitionAdapter.buildSiddhiExecutionPlan(policyDefinition, streamDefs); + String plan = SiddhiDefinitionAdapter.buildSiddhiExecutionPlan(inStreamDef, policy, outStreamDef); log.info("Siddhi execution plan: {}", plan); try { this.executionRuntime = siddhiManager.createExecutionPlanRuntime(plan); log.info("Created siddhi runtime {}", executionRuntime.getName()); } catch (Exception parserException) { - log.error("Failed to create siddhi runtime for policy: {}, siddhi plan: \n\n{}\n", - this.policyDefinition.getName(), plan, parserException); + log.error("Failed to create siddhi runtime for input stream: {}, output stream: {}, siddhi plan: \n\n{}\n", + inStreamDef.getStreamId(), outStreamDef.getStreamId(), plan, parserException); throw parserException; } // fixme what to set up for PolicyHandlerContext PolicyHandlerContext context = new PolicyHandlerContext(); + context.setPolicyDefinition(null); + context.setPolicyCounter(new MyStreamCounter()); // add output stream callback - List outputStreams = this.policyDefinition.getOutputStreams(); - for (final String outputStream : outputStreams) { - if (executionRuntime.getStreamDefinitionMap().containsKey(outputStream)) { - StreamDefinition streamDefinition = SiddhiDefinitionAdapter.convertFromSiddiDefinition(executionRuntime.getStreamDefinitionMap().get(outputStream)); - this.executionRuntime.addCallback(outputStream, - new AlertStreamCallback(outputStream, streamDefinition, - context, 0)); - } else { - throw new IllegalStateException("Undefined output stream " + outputStream); - } - } + this.executionRuntime.addCallback(outStreamDef.getStreamId(), + new AlertStreamCallback(outStreamDef.getStreamId(), outStreamDef, context, 0)); this.executionRuntime.start(); } @@ -77,8 +72,11 @@ public void processElement(StreamEvent value, Context ctx, Collector { - - @Override - public Type unmarshal(String v) throws Exception { - return Type.getEnumFromValue(v); - } - - @Override - public String marshal(Type v) throws Exception { - return v.name; - } - } - - public static class DefaultValueAdapter extends XmlAdapter { - @Override - public Object unmarshal(String v) throws Exception { - return v; - } - - @Override - public String marshal(Object v) throws Exception { - return v.toString(); - } - } - - public static class Builder { - private StreamColumn column; - - public Builder() { - column = new StreamColumn(); - } - - public Builder name(String name) { - column.setName(name); - return this; - } - - public Builder type(Type type) { - column.setType(type); - return this; - } - - public Builder defaultValue(Object defaultValue) { - column.setDefaultValue(defaultValue); - return this; - } - - public Builder required(boolean required) { - column.setRequired(required); - return this; - } - - public StreamColumn build() { - return column; - } + public enum ColumnType{ + STRING, + INT, + LONG, + FLOAT, + DOUBLE, + BOOL, + OBJECT } } \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java index b7589f7a0c..6872685743 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamDefinition.java @@ -16,24 +16,14 @@ */ package org.apache.eagle.flink; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.builder.HashCodeBuilder; - -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlElementWrapper; +import lombok.Builder; +import lombok.Data; import java.io.Serializable; -import java.util.ArrayList; import java.util.List; -import java.util.Objects; -/** - * This is actually a data source schema. - * - * @since Apr 5, 2016 - */ +@Builder(toBuilder = true) +@Data public class StreamDefinition implements Serializable { - private static final long serialVersionUID = 2352202882328931825L; - // Stream unique ID private String streamId; @@ -41,123 +31,22 @@ public class StreamDefinition implements Serializable { private String description; // Is validateable or not - private boolean validate = true; + private boolean validate; // Is timeseries-based stream or not private boolean timeseries; - // TODO: Decouple dataSource and siteId from stream definition - // Stream data source ID private String dataSource; - private String group = "global"; + private String group; - // private String streamSource; // Tenant (Site) ID private String siteId; - private List columns = new ArrayList<>(); - - public String toString() { - return String.format("StreamDefinition[group=%s, streamId=%s, dataSource=%s, description=%s, validate=%s, timeseries=%s, columns=%s", - group, - streamId, - dataSource, - description, - validate, - timeseries, - columns - ); - } - - @Override - public int hashCode() { - return new HashCodeBuilder() - .append(this.streamId) - .append(this.group) - .append(this.description) - .append(this.validate) - .append(this.timeseries) - .append(this.dataSource) - .append(streamSource) - .append(this.siteId) - .append(this.columns) - .build(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (!(obj instanceof StreamDefinition)) { - return false; - } - StreamDefinition streamDefinition = (StreamDefinition) obj; - return Objects.equals(this.streamId, streamDefinition.streamId) - && Objects.equals(this.group, streamDefinition.group) - && Objects.equals(this.description, streamDefinition.description) - && Objects.equals(this.validate, streamDefinition.validate) - && Objects.equals(this.timeseries, streamDefinition.timeseries) - && Objects.equals(this.dataSource, streamDefinition.dataSource) - && Objects.equals(this.streamSource, streamDefinition.streamSource) - && Objects.equals(this.siteId, streamDefinition.siteId) - && CollectionUtils.isEqualCollection(this.columns, streamDefinition.columns); - } - - public String getStreamId() { - return streamId; - } - - public void setStreamId(String streamId) { - this.streamId = streamId; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - @Deprecated - public boolean isValidate() { - return validate; - } - - public void setValidate(boolean validate) { - this.validate = validate; - } - - public boolean isTimeseries() { - return timeseries; - } - - public void setTimeseries(boolean timeseries) { - this.timeseries = timeseries; - } - - @XmlElementWrapper(name = "columns") - @XmlElement(name = "column") - public List getColumns() { - return columns; - } - - public void setColumns(List columns) { - this.columns = columns; - } - - public String getDataSource() { - return dataSource; - } - - public void setDataSource(String dataSource) { - this.dataSource = dataSource; - } + private List columns; public int getColumnIndex(String column) { int i = 0; @@ -170,40 +59,4 @@ public int getColumnIndex(String column) { return -1; } - public String getSiteId() { - return siteId; - } - - public void setSiteId(String siteId) { - this.siteId = siteId; - } - - public String getStreamSource() { - return streamSource; - } - - public void setStreamSource(String streamSource) { - this.streamSource = streamSource; - } - - public StreamDefinition copy() { - StreamDefinition copied = new StreamDefinition(); - copied.setColumns(this.getColumns()); - copied.setDataSource(this.getDataSource()); - copied.setDescription(this.getDescription()); - copied.setSiteId(this.getSiteId()); - copied.setStreamId(this.getStreamId()); - copied.setGroup(this.getGroup()); - copied.setTimeseries(this.isTimeseries()); - copied.setValidate(this.isValidate()); - return copied; - } - - public String getGroup() { - return group; - } - - public void setGroup(String group) { - this.group = group; - } } \ No newline at end of file diff --git a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java index 3fba396d78..33ccbdfaf5 100644 --- a/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java +++ b/eagle-flink/src/main/java/org/apache/eagle/flink/StreamEvent.java @@ -53,13 +53,6 @@ public StreamEvent(String streamId, long timestamp, Object[] data, String metaVe this.setMetaVersion(metaVersion); } - /** - * Attach flink collector to the first element of Object[] data - */ - public void attachFlinkCollector(Collector out){ - data[0] = out; - } - public Collector getFlinkCollector(){ return (Collector)(data[0]); } diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamApp.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamExampleApp.java similarity index 61% rename from eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamApp.java rename to eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamExampleApp.java index f3026fa350..3b15b87f66 100644 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamApp.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/EagleFlinkStreamExampleApp.java @@ -4,22 +4,31 @@ import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -public class EagleFlinkStreamApp { +import java.util.HashMap; +import java.util.Map; + +public class EagleFlinkStreamExampleApp { public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + // prepare stream definition and policy + StreamDefinition inStreamDef = MockSampleMetadataFactory.createInStreamDef("sampleStream_1"); + String policy = MockSampleMetadataFactory.createPolicy(); + StreamDefinition outStreamDef = MockSampleMetadataFactory.createInStreamDef("outputStream"); + DataStream source = env .addSource(new StreamEventSource()) .name("eagle-events"); DataStream alerts = source .keyBy(StreamEvent::getKey) - .process(new SampleSiddhiCEPOp()) + .process(new SiddhiPolicyFlinkProcessor(inStreamDef, policy, outStreamDef)) .name("eagle-alert-engine"); alerts.addSink(new AlertSink()) .name("eagle-alert-publisher"); env.execute("Eagle Alert Engine"); + } } diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java index 895f92d4cd..2fd435ee65 100644 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/MockSampleMetadataFactory.java @@ -31,32 +31,52 @@ public static MockStreamMetadataService createSingletonMetadataServiceWithSample return mockStreamMetadataServiceInstance; } mockStreamMetadataServiceInstance = new MockStreamMetadataService(); - mockStreamMetadataServiceInstance.registerStream("sampleStream", createSampleStreamDefinition("sampleStream")); - mockStreamMetadataServiceInstance.registerStream("sampleStream_1", createSampleStreamDefinition("sampleStream_1")); - mockStreamMetadataServiceInstance.registerStream("sampleStream_2", createSampleStreamDefinition("sampleStream_2")); - mockStreamMetadataServiceInstance.registerStream("sampleStream_3", createSampleStreamDefinition("sampleStream_3")); - mockStreamMetadataServiceInstance.registerStream("sampleStream_4", createSampleStreamDefinition("sampleStream_4")); + mockStreamMetadataServiceInstance.registerStream("sampleStream", createInStreamDef("sampleStream")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_1", createInStreamDef("sampleStream_1")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_2", createInStreamDef("sampleStream_2")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_3", createInStreamDef("sampleStream_3")); + mockStreamMetadataServiceInstance.registerStream("sampleStream_4", createInStreamDef("sampleStream_4")); return mockStreamMetadataServiceInstance; } - public static StreamDefinition createSampleStreamDefinition(String streamId) { - StreamDefinition sampleStreamDefinition = new StreamDefinition(); - sampleStreamDefinition.setStreamId(streamId); - sampleStreamDefinition.setTimeseries(true); - sampleStreamDefinition.setValidate(true); - sampleStreamDefinition.setDescription("Schema for " + streamId); + public static StreamDefinition createInStreamDef(String streamId) { + StreamDefinition.StreamDefinitionBuilder builder = StreamDefinition.builder(); + builder.streamId(streamId); + builder.timeseries(true); + builder.validate(true); + builder.description("Schema for " + streamId); List streamColumns = new ArrayList<>(); - streamColumns.add(new StreamColumn.Builder().name("__collector__").type(StreamColumn.Type.OBJECT).build()); - streamColumns.add(new StreamColumn.Builder().name("name").type(StreamColumn.Type.STRING).build()); - streamColumns.add(new StreamColumn.Builder().name("host").type(StreamColumn.Type.STRING).build()); - streamColumns.add(new StreamColumn.Builder().name("flag").type(StreamColumn.Type.BOOL).build()); - streamColumns.add(new StreamColumn.Builder().name("timestamp").type(StreamColumn.Type.LONG).build()); - streamColumns.add(new StreamColumn.Builder().name("value").type(StreamColumn.Type.DOUBLE).build()); - sampleStreamDefinition.setColumns(streamColumns); - return sampleStreamDefinition; + streamColumns.add(StreamColumn.builder().name("name").type(StreamColumn.ColumnType.STRING).build()); + streamColumns.add(StreamColumn.builder().name("host").type(StreamColumn.ColumnType.STRING).build()); + streamColumns.add(StreamColumn.builder().name("flag").type(StreamColumn.ColumnType.BOOL).build()); + streamColumns.add(StreamColumn.builder().name("timestamp").type(StreamColumn.ColumnType.LONG).build()); + streamColumns.add(StreamColumn.builder().name("value").type(StreamColumn.ColumnType.DOUBLE).build()); + builder.columns(streamColumns); + return builder.build(); } + public static StreamDefinition createOutStreamDef(String streamId) { + StreamDefinition.StreamDefinitionBuilder builder = StreamDefinition.builder(); + builder.streamId(streamId); + builder.timeseries(true); + builder.validate(true); + builder.description("Schema for " + streamId); + List streamColumns = new ArrayList<>(); + + streamColumns.add(StreamColumn.builder().name("name").type(StreamColumn.ColumnType.STRING).build()); + streamColumns.add(StreamColumn.builder().name("host").type(StreamColumn.ColumnType.STRING).build()); + streamColumns.add(StreamColumn.builder().name("flag").type(StreamColumn.ColumnType.BOOL).build()); + streamColumns.add(StreamColumn.builder().name("timestamp").type(StreamColumn.ColumnType.LONG).build()); + streamColumns.add(StreamColumn.builder().name("value").type(StreamColumn.ColumnType.DOUBLE).build()); + builder.columns(streamColumns); + return builder.build(); + } + + + public static String createPolicy(){ + return "name == \"cpu\" and value > 50.0"; + } /** * Policy: from sampleStream_1[name == "cpu" and value > 50.0] select name, host, flag, value insert into outputStream; diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java deleted file mode 100644 index 52e6aa39f0..0000000000 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SampleSiddhiCEPOp.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.apache.eagle.flink.test; - -import org.apache.eagle.flink.*; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.functions.KeyedProcessFunction; -import org.apache.flink.util.Collector; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.Map; - -public class SampleSiddhiCEPOp extends KeyedProcessFunction { - private static final Logger LOG = LoggerFactory.getLogger(SampleSiddhiCEPOp.class); - private transient SiddhiPolicyHandler handler; - - @Override - public void open(Configuration parameters) throws Exception{ - handler = new SiddhiPolicyHandler(createDefinition("sampleStream_1"), 0); - PolicyDefinition policyDefinition = MockSampleMetadataFactory.createSingleMetricSamplePolicy(); - PolicyHandlerContext context = new PolicyHandlerContext(); - context.setPolicyDefinition(policyDefinition); - context.setPolicyCounter(new MyStreamCounter()); - context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); - handler.prepare(context); - } - - /** - * Collector is not defined in prepare stage, that is why handler.send is used for collect output here - */ - @Override - public void processElement(StreamEvent value, Context ctx, Collector out) throws Exception { - handler.send(value, new org.apache.eagle.flink.Collector(){ - @Override - public void emit(AlertStreamEvent o) { - out.collect(o); - } - }); - } - - private Map createDefinition(String... streamIds) { - Map sds = new HashMap<>(); - for (String streamId : streamIds) { - // construct StreamDefinition - StreamDefinition sd = MockSampleMetadataFactory.createSampleStreamDefinition(streamId); - sds.put(streamId, sd); - } - return sds; - } - - @Override - public void close() throws Exception { - handler.close(); - } -} diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java index 868f49f763..2f06b9a89b 100755 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/SiddhiCEPPolicyEventHandlerTest.java @@ -16,147 +16,178 @@ */ package org.apache.eagle.flink.test; +import lombok.extern.slf4j.Slf4j; import org.apache.eagle.flink.*; +import org.apache.flink.api.common.functions.FilterFunction; +import org.apache.flink.api.java.DataSet; +import org.apache.flink.api.java.ExecutionEnvironment; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.datastream.DataStreamUtils; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; +import static org.junit.Assert.fail; + +@Slf4j public class SiddhiCEPPolicyEventHandlerTest { private final static Logger LOG = LoggerFactory.getLogger(SiddhiCEPPolicyEventHandlerTest.class); - private Map createDefinition(String... streamIds) { - Map sds = new HashMap<>(); - for (String streamId : streamIds) { - // construct StreamDefinition - StreamDefinition sd = MockSampleMetadataFactory.createSampleStreamDefinition(streamId); - sds.put(streamId, sd); - } - return sds; - } - - /** - * use Siddhi API to go through - * 1) stream definition - * 2) policy definition - * 3) output collector - */ @Test - public void testSimpleSiddhiPolicyEvaluation(){ - - } + public void easyTest()throws Exception{ + ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); - @Test - public void testBySendSimpleEvent() throws Exception { - SiddhiPolicyHandler handler; - MockStreamCollector collector; + DataSet data = env.readTextFile("file:///tmp/eagle_flink_input"); - handler = new SiddhiPolicyHandler(createDefinition("sampleStream_1"), 0); - collector = new MockStreamCollector(); - PolicyDefinition policyDefinition = MockSampleMetadataFactory.createSingleMetricSamplePolicy(); - PolicyHandlerContext context = new PolicyHandlerContext(); - context.setPolicyDefinition(policyDefinition); - context.setPolicyCounter(new MyStreamCounter()); - context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); - handler.prepare(context); - StreamEvent event = StreamEvent.builder() - .schema(MockSampleMetadataFactory.createSampleStreamDefinition("sampleStream_1")) - .streamId("sampleStream_1") - .timestamep(System.currentTimeMillis()) - .attributes(new HashMap() {{ - put("name", "cpu"); - put("value", 60.0); - put("bad", "bad column value"); - }}).build(); - handler.send(event, collector); - handler.close(); + data.filter(new FilterFunction() { + public boolean filter(String value) { + return value.startsWith("x"); + } + }) + .writeAsText("file:///tmp/eagle_flink_output"); - Assert.assertTrue(collector.size() == 1); + env.execute(); } - @SuppressWarnings("serial") @Test - public void testWithTwoStreamJoinPolicy() throws Exception { - Map ssd = createDefinition("sampleStream_1", "sampleStream_2"); - - PolicyDefinition policyDefinition = new PolicyDefinition(); - policyDefinition.setName("SampleJoinPolicyForTest"); - policyDefinition.setInputStreams(Arrays.asList("sampleStream_1", "sampleStream_2")); - policyDefinition.setOutputStreams(Collections.singletonList("joinedStream")); - policyDefinition.setDefinition(new PolicyDefinition.Definition(PolicyStreamHandlers.SIDDHI_ENGINE, - "from sampleStream_1#window.length(10) as left " + - "join sampleStream_2#window.length(10) as right " + - "on left.name == right.name and left.value == right.value " + - "select left.timestamp,left.name,left.value " + - "insert into joinedStream")); - policyDefinition.setPartitionSpec(Collections.singletonList(MockSampleMetadataFactory.createSampleStreamGroupbyPartition("sampleStream_1", Collections.singletonList("name")))); - SiddhiPolicyHandler handler; - Semaphore mutex = new Semaphore(0); - List alerts = new ArrayList<>(0); - Collector collector = (event) -> { - LOG.info("Collected {}", event); - Assert.assertTrue(event != null); - alerts.add(event); - mutex.release(); - }; - - handler = new SiddhiPolicyHandler(ssd, 0); - PolicyHandlerContext context = new PolicyHandlerContext(); - context.setPolicyDefinition(policyDefinition); - context.setPolicyCounter(new MyStreamCounter()); - context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); - handler.prepare(context); - - - long ts_1 = System.currentTimeMillis(); - long ts_2 = System.currentTimeMillis() + 1; - - handler.send(StreamEvent.builder() - .schema(ssd.get("sampleStream_1")) - .streamId("sampleStream_1") - .timestamep(ts_1) - .attributes(new HashMap() {{ - put("name", "cpu"); - put("value", 60.0); - put("bad", "bad column value"); - }}).build(), collector); - - handler.send(StreamEvent.builder() - .schema(ssd.get("sampleStream_2")) - .streamId("sampleStream_2") - .timestamep(ts_2) - .attributes(new HashMap() {{ - put("name", "cpu"); - put("value", 61.0); - }}).build(), collector); - - handler.send(StreamEvent.builder() - .schema(ssd.get("sampleStream_2")) - .streamId("sampleStream_2") - .timestamep(ts_2) - .attributes(new HashMap() {{ - put("name", "disk"); - put("value", 60.0); - }}).build(), collector); - - handler.send(StreamEvent.builder() - .schema(ssd.get("sampleStream_2")) - .streamId("sampleStream_2") - .timestamep(ts_2) - .attributes(new HashMap() {{ - put("name", "cpu"); - put("value", 60.0); - }}).build(), collector); - - handler.close(); - - Assert.assertTrue("Should get result in 5 s", mutex.tryAcquire(5, TimeUnit.SECONDS)); - Assert.assertEquals(1, alerts.size()); - Assert.assertEquals("joinedStream", alerts.get(0).getStreamId()); - Assert.assertEquals("cpu", alerts.get(0).getData()[1]); + public void testSingleEventAndAlert(){ + StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(); + + // prepare stream definition and policy + StreamDefinition inStreamDef = MockSampleMetadataFactory.createInStreamDef("sampleStream_1"); + String policy = MockSampleMetadataFactory.createPolicy(); + StreamDefinition outStreamDef = MockSampleMetadataFactory.createInStreamDef("outputStream"); + + // bounded stream events + DataStream source = env + .addSource(new StreamEventSource(false)) + .name("eagle-events"); + + DataStream alerts = source + .keyBy(StreamEvent::getKey) + .process(new SiddhiPolicyFlinkProcessor(inStreamDef, policy, outStreamDef)) + .name("eagle-alert-engine"); + try { + Iterator myOutput = DataStreamUtils.collect(alerts); + Assert.assertTrue(myOutput.hasNext()); + Assert.assertTrue(myOutput.next().getDataMap().get("name").equals("cpu")); + }catch (Exception ex){ + log.error("", ex); + fail("Should not have thrown any exception, but thrown: " + ex.getMessage()); + } } + +// @Test +// public void testBySendSimpleEvent() throws Exception { +// SiddhiPolicyHandler handler; +// MockStreamCollector collector; +// +// handler = new SiddhiPolicyHandler(createDefinition("sampleStream_1"), 0); +// collector = new MockStreamCollector(); +// PolicyDefinition policyDefinition = MockSampleMetadataFactory.createSingleMetricSamplePolicy(); +// PolicyHandlerContext context = new PolicyHandlerContext(); +// context.setPolicyDefinition(policyDefinition); +// context.setPolicyCounter(new MyStreamCounter()); +// context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); +// handler.prepare(context); +// StreamEvent event = StreamEvent.builder() +// .schema(MockSampleMetadataFactory.createInStreamDef("sampleStream_1")) +// .streamId("sampleStream_1") +// .timestamep(System.currentTimeMillis()) +// .attributes(new HashMap() {{ +// put("name", "cpu"); +// put("value", 60.0); +// put("bad", "bad column value"); +// }}).build(); +// handler.send(event, collector); +// handler.close(); +// +// Assert.assertTrue(collector.size() == 1); +// } +// +// @SuppressWarnings("serial") +// @Test +// public void testWithTwoStreamJoinPolicy() throws Exception { +// Map ssd = createDefinition("sampleStream_1", "sampleStream_2"); +// +// PolicyDefinition policyDefinition = new PolicyDefinition(); +// policyDefinition.setName("SampleJoinPolicyForTest"); +// policyDefinition.setInputStreams(Arrays.asList("sampleStream_1", "sampleStream_2")); +// policyDefinition.setOutputStreams(Collections.singletonList("joinedStream")); +// policyDefinition.setDefinition(new PolicyDefinition.Definition(PolicyStreamHandlers.SIDDHI_ENGINE, +// "from sampleStream_1#window.length(10) as left " + +// "join sampleStream_2#window.length(10) as right " + +// "on left.name == right.name and left.value == right.value " + +// "select left.timestamp,left.name,left.value " + +// "insert into joinedStream")); +// policyDefinition.setPartitionSpec(Collections.singletonList(MockSampleMetadataFactory.createSampleStreamGroupbyPartition("sampleStream_1", Collections.singletonList("name")))); +// SiddhiPolicyHandler handler; +// Semaphore mutex = new Semaphore(0); +// List alerts = new ArrayList<>(0); +// Collector collector = (event) -> { +// LOG.info("Collected {}", event); +// Assert.assertTrue(event != null); +// alerts.add(event); +// mutex.release(); +// }; +// +// handler = new SiddhiPolicyHandler(ssd, 0); +// PolicyHandlerContext context = new PolicyHandlerContext(); +// context.setPolicyDefinition(policyDefinition); +// context.setPolicyCounter(new MyStreamCounter()); +// context.setPolicyEvaluator(new PolicyGroupEvaluatorImpl("evalutorId")); +// handler.prepare(context); +// +// +// long ts_1 = System.currentTimeMillis(); +// long ts_2 = System.currentTimeMillis() + 1; +// +// handler.send(StreamEvent.builder() +// .schema(ssd.get("sampleStream_1")) +// .streamId("sampleStream_1") +// .timestamep(ts_1) +// .attributes(new HashMap() {{ +// put("name", "cpu"); +// put("value", 60.0); +// put("bad", "bad column value"); +// }}).build(), collector); +// +// handler.send(StreamEvent.builder() +// .schema(ssd.get("sampleStream_2")) +// .streamId("sampleStream_2") +// .timestamep(ts_2) +// .attributes(new HashMap() {{ +// put("name", "cpu"); +// put("value", 61.0); +// }}).build(), collector); +// +// handler.send(StreamEvent.builder() +// .schema(ssd.get("sampleStream_2")) +// .streamId("sampleStream_2") +// .timestamep(ts_2) +// .attributes(new HashMap() {{ +// put("name", "disk"); +// put("value", 60.0); +// }}).build(), collector); +// +// handler.send(StreamEvent.builder() +// .schema(ssd.get("sampleStream_2")) +// .streamId("sampleStream_2") +// .timestamep(ts_2) +// .attributes(new HashMap() {{ +// put("name", "cpu"); +// put("value", 60.0); +// }}).build(), collector); +// +// handler.close(); +// +// Assert.assertTrue("Should get result in 5 s", mutex.tryAcquire(5, TimeUnit.SECONDS)); +// Assert.assertEquals(1, alerts.size()); +// Assert.assertEquals("joinedStream", alerts.get(0).getStreamId()); +// Assert.assertEquals("cpu", alerts.get(0).getData()[1]); +// } } \ No newline at end of file diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java index 1034fb7ce5..f8e7190600 100644 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventIterator.java @@ -79,7 +79,7 @@ public StreamEvent next() { private static StreamEvent createSampleEvent(long timestamp, String name, double value) { return StreamEvent.builder() - .schema(MockSampleMetadataFactory.createSampleStreamDefinition("sampleStream_1")) + .schema(MockSampleMetadataFactory.createInStreamDef("sampleStream_1")) .streamId("sampleStream_1") .timestamep(start + timestamp) .attributes(new HashMap() {{ diff --git a/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java index 20febb1f28..d0ed90771b 100644 --- a/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java +++ b/eagle-flink/src/test/java/org/apache/eagle/flink/test/StreamEventSource.java @@ -15,8 +15,12 @@ public class StreamEventSource extends FromIteratorFunction { private static final long serialVersionUID = 1L; + public StreamEventSource(boolean unbounded) { + super(new RateLimitedIterator<>(unbounded ? StreamEventIterator.unbounded() : StreamEventIterator.bounded())); + } + public StreamEventSource() { - super(new RateLimitedIterator<>(StreamEventIterator.unbounded())); + this(true); } private static class RateLimitedIterator implements Iterator, Serializable {