diff --git a/assembly.sbt b/assembly.sbt new file mode 100644 index 0000000..c08c87c --- /dev/null +++ b/assembly.sbt @@ -0,0 +1,26 @@ +import AssemblyKeys._ + +assemblySettings + +jarName in assembly := "PowerPanelKSO.jar" + +target in assembly:= file("src/universal/PowerPanel") + +mergeStrategy in assembly := { + case PathList("javax", "servlet", xs @ _*) => MergeStrategy.last + case PathList("javax", "activation", xs @ _*) => MergeStrategy.last + case PathList("org", "apache", xs @ _*) => MergeStrategy.last + case PathList("com", "google", xs @ _*) => MergeStrategy.last + case PathList("com", "esotericsoftware", xs @ _*) => MergeStrategy.last + case PathList("com", "codahale", xs @ _*) => MergeStrategy.last + case PathList("com", "yammer", xs @ _*) => MergeStrategy.last + case "about.html" => MergeStrategy.rename + case "META-INF/ECLIPSEF.RSA" => MergeStrategy.last + case "META-INF/mailcap" => MergeStrategy.last + case "META-INF/mimetypes.default" => MergeStrategy.last + case "plugin.properties" => MergeStrategy.last + case "log4j.properties" => MergeStrategy.last + case x => + val oldStrategy = (mergeStrategy in assembly).value + oldStrategy(x) +} diff --git a/build.sbt b/build.sbt new file mode 100644 index 0000000..c26ab7e --- /dev/null +++ b/build.sbt @@ -0,0 +1,34 @@ +name := "PowerPanel Data Processor" + +version := "0.0.1" + +scalaVersion := "2.12.8" + +enablePlugins(UniversalPlugin) + +packageZipTarball in Universal := { + val originalFileName = (packageZipTarball in Universal).value + val (base, ext) = originalFileName.baseAndExt + val newFileName = file(originalFileName.getParent) / (base + ".tar.gz") + IO.move(originalFileName, newFileName) + newFileName +} + +libraryDependencies ++= Seq( + "org.apache.spark" %% "spark-core" % "1.6.0" % "provided", + "org.apache.spark" %% "spark-streaming" % "1.6.0" % "provided", + "org.apache.spark" %% "spark-streaming-kafka" % "1.6.0", + "org.apache.kafka" %% "kafka" % "0.8.2.2", + "org.apache.avro" % "avro" % "1.7.7", + "org.codehaus.jackson" % "jackson-mapper-asl" % "1.9.13", + "joda-time" % "joda-time" % "2.7", + "log4j" % "log4j" % "1.2.14", + "org.apache.httpcomponents" % "httpcore" % "4.2.5" % "provided", + "org.apache.httpcomponents" % "httpclient" % "4.2.5" % "provided" +) + +commands += Command.command("packageApp") { state => + "assembly" :: + "universal:packageZipTarball" :: + state + } diff --git a/project/assembly.sbt b/project/assembly.sbt new file mode 100644 index 0000000..9c01471 --- /dev/null +++ b/project/assembly.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") diff --git a/project/build.properties b/project/build.properties new file mode 100644 index 0000000..c0bab04 --- /dev/null +++ b/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.2.8 diff --git a/src/universal/powerpanel/application.properties b/src/universal/powerpanel/application.properties new file mode 100644 index 0000000..aabe4b4 --- /dev/null +++ b/src/universal/powerpanel/application.properties @@ -0,0 +1,9 @@ +kafka.brokers=${environment_kafka_brokers} +kafka.zookeeper=${environment_kafka_zookeeper} +app.checkpoint_path=${component_checkpoint_path} +app.job_name=${component_job_name} +app.processing_parallelism=${component_processing_parallelism} +app.batch_size_seconds=${component_batch_size_seconds} +kafka.topic=${component_input_topic} +kafka.consume_from_beginning=${component_consume_from_beginning} +opentsdb.ip=${environment_opentsdb} diff --git a/src/universal/powerpanel/log4j.properties b/src/universal/powerpanel/log4j.properties new file mode 100644 index 0000000..e0b077e --- /dev/null +++ b/src/universal/powerpanel/log4j.properties @@ -0,0 +1,11 @@ +log4j.rootLogger=ERROR,rolling +log4j.logger.com.cisco.pnda=${component_log_level},rolling +log4j.additivity.com.cisco.pnda=false + +log4j.appender.rolling=org.apache.log4j.RollingFileAppender +log4j.appender.rolling.layout=org.apache.log4j.PatternLayout +log4j.appender.rolling.layout.conversionPattern=[%d] %p %m (%c)%n +log4j.appender.rolling.maxFileSize=50MB +log4j.appender.rolling.maxBackupIndex=1 +log4j.appender.rolling.file=${spark.yarn.app.container.log.dir}/spark.log +log4j.appender.rolling.encoding=UTF-8 diff --git a/src/universal/powerpanel/opentsdb.json b/src/universal/powerpanel/opentsdb.json new file mode 100644 index 0000000..c81e1bc --- /dev/null +++ b/src/universal/powerpanel/opentsdb.json @@ -0,0 +1,131 @@ +[ + { + "name": "l1.l2.voltage" + }, + { + "name": "l2.l3.voltage" + }, + { + "name": "l3.l1.voltage" + }, + { + "name": "l1.l0.voltage" + }, + { + "name": "l2.l0.voltage" + }, + { + "name": "l3.l0.voltage" + }, + { + "name": "l1.current" + }, + { + "name": "l2.current" + }, + { + "name": "l3.current" + }, + { + "name": "frequency" + }, + { + "name": "total.kw" + }, + { + "name": "rate.kw" + }, + { + "name": "total.pf" + }, + { + "name": "l1.kw" + }, + { + "name": "l1.pf" + }, + { + "name": "l2.kw" + }, + { + "name": "l2.pf" + }, + { + "name": "l3.kw" + }, + { + "name": "l3.pf" + }, + { + "name": "total.kvar" + }, + { + "name": "l1.kvar" + }, + { + "name": "l2.kvar" + }, + { + "name": "l3.kvar" + }, + { + "name": "total.kva" + }, + { + "name": "l1.kva" + }, + { + "name": "l2.kva" + }, + { + "name": "l3.kva" + }, + { + "name": "oil.pressure" + }, + { + "name": "coolant.temp" + }, + { + "name": "engine.rpm" + }, + { + "name": "battery.voltage" + }, + { + "name": "fuel.pressure" + }, + { + "name": "fuel.temp" + }, + { + "name": "fuel.rate" + }, + { + "name": "coolant.pressure" + }, + { + "name": "coolant.level" + }, + { + "name": "oil.temp" + }, + { + "name": "oil.level" + }, + { + "name": "crankcase.pressure" + }, + { + "name": "ambient.temp" + }, + { + "name": "ecm.battery.voltage" + }, + { + "name": "intake.temp" + }, + { + "name": "intake.pressure" + } +] diff --git a/src/universal/powerpanel/properties.json b/src/universal/powerpanel/properties.json new file mode 100644 index 0000000..55cd1bc --- /dev/null +++ b/src/universal/powerpanel/properties.json @@ -0,0 +1,11 @@ +{ + "main_class":"com.powerpanel.kso.Consumer", + "main_jar": "PowerPanelKSO.jar", + "log_level":"INFO", + "batch_size_seconds" : "2", + "processing_parallelism" : "1", + "checkpoint_path":"", + "input_topic": "GeneratorData", + "consume_from_beginning":"false", + "spark_submit_args": "" +}