-
Notifications
You must be signed in to change notification settings - Fork 0
/
bigdl.json
1 lines (1 loc) · 18.4 KB
/
bigdl.json
1
{"paragraphs":[{"text":"%dep\n//z.load(\"/home/osboxes/Desktop/dist-spark-1.6.0-scala-2.10.5-linux64-0.1.0-dist/lib/bigdl-0.1.0-jar-with-dependencies.jar\") // dowloaded from bigdl website\nz.load(\"/home/osboxes/Desktop/BigDL-master/dist/lib/bigdl-0.2.0-SNAPSHOT-jar-with-dependencies.jar\") // build using source code","dateUpdated":"2017-07-11T12:58:26-0400","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1499718696200_-220166775","id":"20170710-163136_604089172","result":{"code":"SUCCESS","type":"TEXT","msg":"res0: org.apache.zeppelin.dep.Dependency = org.apache.zeppelin.dep.Dependency@831f419\n"},"dateCreated":"2017-07-10T04:31:36-0400","dateStarted":"2017-07-11T12:58:26-0400","dateFinished":"2017-07-11T12:58:33-0400","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:2375"},{"text":"import com.intel.analytics.bigdl.example.utils._\nimport com.intel.analytics.bigdl.nn.{ClassNLLCriterion, _}\nimport com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, T}\nimport org.apache.log4j.{Level => Levle4j, Logger => Logger4j}\nimport org.slf4j.{Logger, LoggerFactory}\nimport scopt.OptionParser","dateUpdated":"2017-07-11T12:58:35-0400","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1499718731549_-1356268981","id":"20170710-163211_730961508","result":{"code":"SUCCESS","type":"TEXT","msg":"import com.intel.analytics.bigdl.example.utils._\nimport com.intel.analytics.bigdl.nn.{ClassNLLCriterion, _}\nimport com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, T}\nimport org.apache.log4j.{Level=>Levle4j, Logger=>Logger4j}\nimport org.slf4j.{Logger, LoggerFactory}\nimport scopt.OptionParser\n"},"dateCreated":"2017-07-10T04:32:11-0400","dateStarted":"2017-07-11T12:58:35-0400","dateFinished":"2017-07-11T12:58:46-0400","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:2376"},{"text":"val log: Logger = LoggerFactory.getLogger(this.getClass)\n LoggerFilter.redirectSparkInfoLogs()\n Logger4j.getLogger(\"com.intel.analytics.bigdl.optim\").setLevel(Levle4j.INFO)","dateUpdated":"2017-07-11T12:58:51-0400","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1499719348369_-1475884850","id":"20170710-164228_1780828477","result":{"code":"SUCCESS","type":"TEXT","msg":"log: org.slf4j.Logger = org.slf4j.impl.Log4jLoggerAdapter($iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC)\n"},"dateCreated":"2017-07-10T04:42:28-0400","dateStarted":"2017-07-11T12:58:51-0400","dateFinished":"2017-07-11T12:58:51-0400","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:2377"},{"text":"sc.stop()\n\n\nval localParser = new OptionParser[TextClassificationParams](\"BigDL Example\") {\n opt[String]('b', \"baseDir\")\n .required()\n .text(\"Base dir containing the training and word2Vec data\")\n .action((x, c) => c.copy(baseDir = x))\n opt[String]('p', \"partitionNum\")\n .text(\"you may want to tune the partitionNum if run into spark mode\")\n .action((x, c) => c.copy(partitionNum = x.toInt))\n opt[String]('s', \"maxSequenceLength\")\n .text(\"maxSequenceLength\")\n .action((x, c) => c.copy(maxSequenceLength = x.toInt))\n opt[String]('w', \"maxWordsNum\")\n .text(\"maxWordsNum\")\n .action((x, c) => c.copy(maxWordsNum = x.toInt))\n opt[String]('l', \"trainingSplit\")\n .text(\"trainingSplit\")\n .action((x, c) => c.copy(trainingSplit = x.toDouble))\n opt[String]('z', \"batchSize\")\n .text(\"batchSize\")\n .action((x, c) => c.copy(batchSize = x.toInt))\n }\n\n\n val args = Array(\"--batchSize\", \"128\", \"--baseDir\", \"/home/osboxes/Desktop/gigaspaces-insightedge-1.0.0-premium/data\", \"--partitionNum\", \"4\")\n localParser.parse(args, TextClassificationParams()).map { param =>\n log.info(s\"Current parameters: $param\")\n val textClassification = new TextClassifier(param)\n textClassification.train()\n }","dateUpdated":"2017-07-11T12:58:56-0400","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala","lineNumbers":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1499719388981_-1398873773","id":"20170710-164308_737979928","result":{"code":"ERROR","type":"TEXT","msg":"localParser: scopt.OptionParser[com.intel.analytics.bigdl.example.utils.TextClassificationParams] = OptionParser(BigDL Example)\nargs: Array[String] = Array(--batchSize, 128, --baseDir, /home/osboxes/Desktop/gigaspaces-insightedge-1.0.0-premium/data, --partitionNum, 4)\norg.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 6.0 failed 1 times, most recent failure: Lost task 0.0 in stage 6.0 (TID 20, localhost): java.lang.OutOfMemoryError: Java heap space\n\tat java.lang.reflect.Array.newInstance(Array.java:75)\n\tat java.io.ObjectInputStream.readArray(ObjectInputStream.java:1678)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)\n\tat java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)\n\tat java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)\n\tat java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)\n\tat java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)\n\tat java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)\n\tat java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)\n\tat java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)\n\tat java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)\n\tat java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)\n\tat java.io.ObjectInputStream.readObject(ObjectInputStream.java:373)\n\tat org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)\n\tat org.apache.spark.serializer.DeserializationStream.readValue(Serializer.scala:171)\n\tat org.apache.spark.serializer.DeserializationStream$$anon$2.getNext(Serializer.scala:201)\n\tat org.apache.spark.serializer.DeserializationStream$$anon$2.getNext(Serializer.scala:198)\n\tat org.apache.spark.util.NextIterator.hasNext(NextIterator.scala:73)\n\tat scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)\n\tat scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)\n\tat org.apache.spark.util.CompletionIterator.hasNext(CompletionIterator.scala:32)\n\tat org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:39)\n\tat scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)\n\tat scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)\n\tat scala.collection.Iterator$class.foreach(Iterator.scala:727)\n\tat scala.collection.AbstractIterator.foreach(Iterator.scala:1157)\n\tat scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)\n\tat scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)\n\tat scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)\n\nDriver stacktrace:\n\tat org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1431)\n\tat org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1419)\n\tat org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1418)\n\tat scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)\n\tat scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)\n\tat org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1418)\n\tat org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)\n\tat org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)\n\tat scala.Option.foreach(Option.scala:236)\n\tat org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:799)\n\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1640)\n\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1599)\n\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1588)\n\tat org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)\n\tat org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)\n\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)\n\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)\n\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)\n\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)\n\tat org.apache.spark.rdd.RDD.count(RDD.scala:1143)\n\tat com.intel.analytics.bigdl.dataset.DistributedDataSet$$anon$5.cache(DataSet.scala:188)\n\tat com.intel.analytics.bigdl.optim.DistriOptimizer.prepareInput(DistriOptimizer.scala:707)\n\tat com.intel.analytics.bigdl.optim.DistriOptimizer.optimize(DistriOptimizer.scala:727)\n\tat com.intel.analytics.bigdl.example.utils.TextClassifier.train(TextClassifier.scala:243)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:53)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:50)\n\tat scala.Option.map(Option.scala:145)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:50)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:59)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:61)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:63)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:65)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:67)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:69)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:71)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:73)\n\tat $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:75)\n\tat $iwC$$iwC$$iwC$$iwC.<init>(<console>:77)\n\tat $iwC$$iwC$$iwC.<init>(<console>:79)\n\tat $iwC$$iwC.<init>(<console>:81)\n\tat $iwC.<init>(<console>:83)\n\tat <init>(<console>:85)\n\tat .<init>(<console>:89)\n\tat .<clinit>(<console>)\n\tat .<init>(<console>:7)\n\tat .<clinit>(<console>)\n\tat $print(<console>)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)\n\tat org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)\n\tat org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)\n\tat org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)\n\tat org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)\n\tat org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:836)\n\tat org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:779)\n\tat org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:772)\n\tat org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:94)\n\tat org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:341)\n\tat org.apache.zeppelin.scheduler.Job.run(Job.java:176)\n\tat org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:139)\n\tat java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)\n\tat java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\nCaused by: java.lang.OutOfMemoryError: Java heap space\n\tat java.lang.reflect.Array.newInstance(Array.java:75)\n\tat java.io.ObjectInputStream.readArray(ObjectInputStream.java:1678)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)\n\tat java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)\n\tat java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)\n\tat java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)\n\tat java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)\n\tat java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)\n\tat java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)\n\tat java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)\n\tat java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)\n\tat java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)\n\tat java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)\n\tat java.io.ObjectInputStream.readObject(ObjectInputStream.java:373)\n\tat org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)\n\tat org.apache.spark.serializer.DeserializationStream.readValue(Serializer.scala:171)\n\tat org.apache.spark.serializer.DeserializationStream$$anon$2.getNext(Serializer.scala:201)\n\tat org.apache.spark.serializer.DeserializationStream$$anon$2.getNext(Serializer.scala:198)\n\tat org.apache.spark.util.NextIterator.hasNext(NextIterator.scala:73)\n\tat scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)\n\tat scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)\n\tat org.apache.spark.util.CompletionIterator.hasNext(CompletionIterator.scala:32)\n\tat org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:39)\n\tat scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)\n\tat scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)\n\tat scala.collection.Iterator$class.foreach(Iterator.scala:727)\n\tat scala.collection.AbstractIterator.foreach(Iterator.scala:1157)\n\tat scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)\n\tat scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)\n\tat scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)\n\n"},"dateCreated":"2017-07-10T04:43:08-0400","dateStarted":"2017-07-11T12:58:56-0400","dateFinished":"2017-07-11T13:00:01-0400","status":"ERROR","progressUpdateIntervalMs":500,"$$hashKey":"object:2378"},{"config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1499719084254_-216569407","id":"20170710-163804_868742217","dateCreated":"2017-07-10T04:38:04-0400","status":"ERROR","errorMessage":"org.apache.thrift.transport.TTransportException\n\tat org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)\n\tat org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)\n\tat org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429)\n\tat org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318)\n\tat org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219)\n\tat org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:69)\n\tat org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService$Client.recv_interpret(RemoteInterpreterService.java:249)\n\tat org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService$Client.interpret(RemoteInterpreterService.java:233)\n\tat org.apache.zeppelin.interpreter.remote.RemoteInterpreter.interpret(RemoteInterpreter.java:269)\n\tat org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:94)\n\tat org.apache.zeppelin.notebook.Paragraph.jobRun(Paragraph.java:279)\n\tat org.apache.zeppelin.scheduler.Job.run(Job.java:176)\n\tat org.apache.zeppelin.scheduler.RemoteScheduler$JobRunner.run(RemoteScheduler.java:328)\n\tat java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)\n\tat java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n","progressUpdateIntervalMs":500,"$$hashKey":"object:2379","text":"","dateUpdated":"2017-07-11T13:01:59-0400","dateFinished":"2017-07-11T13:01:42-0400","dateStarted":"2017-07-11T13:01:42-0400","result":"org.apache.thrift.transport.TTransportException"},{"config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1499792502447_202020289","id":"20170711-130142_672634945","dateCreated":"2017-07-11T13:01:42-0400","status":"READY","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:2728"}],"name":"bigdl","id":"2CNY9TFMD","angularObjects":{"2BY3CWM32:shared_process":[],"2C1697SXS:shared_process":[]},"config":{"looknfeel":"default"},"info":{}}