diff --git a/.classpath b/.classpath
deleted file mode 100644
index baa990f..0000000
--- a/.classpath
+++ /dev/null
@@ -1,612 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index c6ce5a8..7eb27da 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,76 @@
-# Created by https://www.gitignore.io/api/sbt,scala
-# Edit at https://www.gitignore.io/?templates=sbt,scala
+
+# Created by https://www.gitignore.io/api/sbt,scala,eclipse
+# Edit at https://www.gitignore.io/?templates=sbt,scala,eclipse
+
+### Eclipse ###
+
+.metadata
+bin/
+tmp/
+*.tmp
+*.bak
+*.swp
+*~.nib
+local.properties
+.settings/
+.loadpath
+.recommenders
+
+# External tool builders
+.externalToolBuilders/
+
+# Locally stored "Eclipse launch configurations"
+*.launch
+
+# PyDev specific (Python IDE for Eclipse)
+*.pydevproject
+
+# CDT-specific (C/C++ Development Tooling)
+.cproject
+
+# CDT- autotools
+.autotools
+
+# Java annotation processor (APT)
+.factorypath
+
+# PDT-specific (PHP Development Tools)
+.buildpath
+
+# sbteclipse plugin
+.target
+
+# Tern plugin
+.tern-project
+
+# TeXlipse plugin
+.texlipse
+
+# STS (Spring Tool Suite)
+.springBeans
+
+# Code Recommenders
+.recommenders/
+
+# Annotation Processing
+.apt_generated/
+
+# Scala IDE specific (Scala & Java development for Eclipse)
+.cache-main
+.scala_dependencies
+.worksheet
+
+### Eclipse Patch ###
+# Eclipse Core
+.project
+
+# JDT-specific (Eclipse Java Development Tools)
+.classpath
+
+# Annotation Processing
+.apt_generated
+
+.sts4-cache/
### SBT ###
# Simple Build Tool
@@ -19,4 +90,4 @@ project/plugins/project/
*.class
*.log
-# End of https://www.gitignore.io/api/sbt,scala
+# End of https://www.gitignore.io/api/sbt,scala,eclipse
\ No newline at end of file
diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs
deleted file mode 100644
index 284c145..0000000
--- a/.settings/org.eclipse.core.resources.prefs
+++ /dev/null
@@ -1,3 +0,0 @@
-#Generated by sbteclipse
-#Mon Mar 04 17:38:21 EST 2019
-encoding/=UTF-8
diff --git a/build.sbt b/build.sbt
index 9be4206..f3b3882 100644
--- a/build.sbt
+++ b/build.sbt
@@ -17,8 +17,8 @@ packageZipTarball in Universal := {
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-core" % "2.4.0",
"org.apache.spark" %% "spark-streaming" % "2.4.0",
- "org.apache.spark" %% "spark-streaming-kafka" % "1.6.3",
- "org.apache.kafka" %% "kafka" % "0.9.0.1",
+ "org.apache.spark" %% "spark-streaming-kafka-0-10" % "2.4.0",
+ "org.apache.kafka" %% "kafka" % "2.0.0",
"org.apache.avro" % "avro" % "1.8.2",
"org.codehaus.jackson" % "jackson-mapper-asl" % "1.9.13",
"joda-time" % "joda-time" % "2.7",
diff --git a/src/main/resources/application.proerties b/src/main/resources/application.proerties
new file mode 100644
index 0000000..316e68b
--- /dev/null
+++ b/src/main/resources/application.proerties
@@ -0,0 +1,7 @@
+job_name="pp.kso.collectd"
+zookeeper_connect="sd5-data.engr.uconn.edu:2181"
+broker_list="sd5-data.engr.uconn.edu:9092"
+consumer_group="pp.kso.collectd"
+zookeeper_timeout=6000
+topic_list="GeneratorData"
+opentsdb="sd5-data.engr.uconn.edu:4242"
\ No newline at end of file
diff --git a/src/main/resources/datapackage.avsc b/src/main/resources/datapackage.avsc
new file mode 100644
index 0000000..6e98a07
--- /dev/null
+++ b/src/main/resources/datapackage.avsc
@@ -0,0 +1,68 @@
+{
+ "namespace": "com.powerpanel.kso.model",
+ "type": "record",
+ "name": "DataPackage",
+ "fields": [
+ {
+ "type": "int",
+ "name": "generator"
+ },
+ {
+ "name": "region",
+ "type": {
+ "name": "Region",
+ "type": "enum",
+ "symbols": ["NORTHEAST", "SOUTHEAST", "WEST", "MIDWEST"]
+ }
+ },
+ {
+ "name": "state",
+ "type": {
+ "name": "State",
+ "type": "enum",
+ "symbols": ["AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DE", "FL", "GA", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"]
+ }
+ },
+ {
+ "name": "data",
+ "type": {
+ "type": "array",
+ "items": {
+ "name": "MetricData",
+ "type": "record",
+ "fields": [
+ {
+ "name": "metric",
+ "type": "string"
+ },
+ {
+ "name": "datapoints",
+ "type": {
+ "type": "array",
+ "items": {
+ "name": "DataPoint",
+ "type": "record",
+ "fields": [
+ {
+ "name": "timestamp",
+ "type": "long"
+ },
+ {
+ "name": "value",
+ "type": ["long", "double"]
+ }
+ ]
+ }
+
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "rawdata",
+ "type": "bytes"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/src/main/scala/com/powerpanel/kso/Consumer.scala b/src/main/scala/com/powerpanel/kso/Consumer.scala
index a2acadb..38a2c32 100644
--- a/src/main/scala/com/powerpanel/kso/Consumer.scala
+++ b/src/main/scala/com/powerpanel/kso/Consumer.scala
@@ -4,7 +4,7 @@ import com.powerpanel.kso._
import org.apache.spark.streaming.StreamingContext
import org.apache.log4j.Logger;
-object PPConsumer {
+object Consumer {
private[this] val logger = Logger.getLogger(getClass().getName());
@@ -27,7 +27,7 @@ object PPConsumer {
}
logger.info("Starting spark streaming execution")
- logger.info("Logger url: " + loggerUrl)
+ // logger.info("Logger url: " + loggerUrl)
// ssc.addStreamingListener(new StatReporter(appName, loggerUrl))
ssc.start()
ssc.awaitTermination()
diff --git a/src/main/scala/com/powerpanel/kso/KafkaInput.scala b/src/main/scala/com/powerpanel/kso/KafkaInput.scala
index b25764a..84582bb 100644
--- a/src/main/scala/com/powerpanel/kso/KafkaInput.scala
+++ b/src/main/scala/com/powerpanel/kso/KafkaInput.scala
@@ -3,7 +3,9 @@ package com.powerpanel.kso;
import kafka.serializer.StringDecoder
import kafka.serializer.DefaultDecoder
import com.powerpanel.kso.model._
-import org.apache.spark.streaming.kafka.KafkaUtils
+import org.apache.spark.streaming.kafka010.KafkaUtils
+import org.apache.spark.streaming.kafka010.ConsumerStrategies
+import org.apache.spark.streaming.kafka010.LocationStrategies
import org.apache.spark.streaming.StreamingContext
class KafkaInput extends Serializable {
@@ -15,16 +17,21 @@ class KafkaInput extends Serializable {
{
kafkaParams.put("auto.offset.reset", "smallest");
}
- val messages = KafkaUtils.createDirectStream[String, Array[Byte], StringDecoder, DefaultDecoder](
- ssc, kafkaParams.toMap, topicsSet).repartition(Integer.parseInt(props.getProperty("app.processing_parallelism")));
+ val messages = KafkaUtils.createDirectStream[String, Array[Byte]](
+ ssc,
+ LocationStrategies.PreferConsistent,
+ ConsumerStrategies.Subscribe[String, Array[Byte]](topicsSet, kafkaParams)
+ );
+ //.repartition(Integer.parseInt(props.getProperty("app.processing_parallelism")));
// Decode avro container format
- val avroSchemaString = StaticHelpers.loadResourceFile("dataplatform-raw.avsc");
+ // val avroSchemaString = StaticHelpers.loadResourceFile("dataplatform-raw.avsc");
val rawMessages = messages.map(x => {
- val eventDecoder = new DataPlatformEventDecoder(avroSchemaString);
- val payload = x._2;
- val dataPlatformEvent = eventDecoder.decode(payload);
- dataPlatformEvent;
+ // val eventDecoder = new DataPlatformEventDecoder(avroSchemaString);
+ val dataPackageDecoder = DataPackage.getDecoder();
+ val payload = x.value();
+ val dataPackage = dataPackageDecoder.decode(payload);
+ dataPackage;
});
rawMessages;
};
diff --git a/src/main/scala/com/powerpanel/kso/KafkaPipeline.scala b/src/main/scala/com/powerpanel/kso/KafkaPipeline.scala
index 716619d..c959fd9 100644
--- a/src/main/scala/com/powerpanel/kso/KafkaPipeline.scala
+++ b/src/main/scala/com/powerpanel/kso/KafkaPipeline.scala
@@ -15,14 +15,15 @@ class KafkaPipeline extends Serializable {
}
def create() = {
-
- val parseMessages = (messagesDstream: DStream[DataPlatformEvent]) => {
- val parsedMessages = messagesDstream.flatMap(dataPlatformEvent => {
+ /*
+ val parseMessages = (messagesDstream: DStream[DataPackage]) => {
+ val parsedMessages = messagesDstream.flatMap(dataPackage => {
val parsed = dataPlatformEvent.getRawdata();
Some(parsed);
});
parsedMessages
}: DStream[String];
+ */
val props = AppConfig.loadProperties();
val checkpointDirectory = props.getProperty("app.checkpoint_path");
@@ -36,13 +37,13 @@ class KafkaPipeline extends Serializable {
ssc.checkpoint(checkpointDirectory);
}
- val inputStream = new KafkaInput().readFromKafka(ssc);
- val parsedStream = parseMessages(inputStream);
+ val dataStream = new KafkaInput().readFromKafka(ssc);
+ // val parsedStream = parseMessages(inputStream);
val writeCounts: DStream[Integer] =
new OpenTSDBOutput().putOpentsdb(
props.getProperty("opentsdb.ip"),
- parsedStream);
+ dataStream);
writeCounts.reduce(_ + _).print(1);
ssc;
diff --git a/src/main/scala/com/powerpanel/kso/OpenTSDBOutput.scala b/src/main/scala/com/powerpanel/kso/OpenTSDBOutput.scala
index bb289fb..f59e082 100644
--- a/src/main/scala/com/powerpanel/kso/OpenTSDBOutput.scala
+++ b/src/main/scala/com/powerpanel/kso/OpenTSDBOutput.scala
@@ -2,6 +2,7 @@ package com.powerpanel.kso;
import org.joda.time.DateTime
import scala.util.control.NonFatal
+import com.powerpanel.kso.model._;
import org.apache.log4j.Logger
import org.apache.spark.streaming.dstream.DStream
import org.apache.http.client.methods.HttpPost
@@ -10,17 +11,17 @@ import org.apache.http.impl.client.DefaultHttpClient
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.JsonDSL._
+import scala.collection.JavaConversions._
class OpenTSDBOutput extends Serializable {
- def putOpentsdb[T](opentsdbIP: String,
- stream: DStream[String]) = {
+ def putOpentsdb[T](opentsdbIP: String, stream: DStream[DataPackage]) = {
stream.mapPartitions(partition => {
var count = 0;
- partition.foreach(rowData =>
+ partition.foreach((generatorData: DataPackage) =>
{
-
+ /*
val json = parse(rowData.replace("'", "\""))
val host = compact(render((json \\ "host"))).replace("\"", "")
val timestampStr = compact(render((json \\ "timestamp"))).replace("\"", "")
@@ -35,7 +36,27 @@ class OpenTSDBOutput extends Serializable {
| "timestamp": $timestamp,
| "tags": {"host": "$host"}
|}""".stripMargin
-
+ */
+ var body: String = "[";
+ for(metricData <- generatorData.getData()) {
+ for(dataPoint <- metricData.getDatapoints()) {
+ val metric = metricData.getMetric();
+ val value = dataPoint.getValue();
+ val timestamp = dataPoint.getTimestamp();
+ val generator_id = generatorData.getGenerator();
+ val region = generatorData.getRegion();
+ val state = generatorData.getState;
+ val pstring = f"""{
+ | "metric": "$metric",
+ | "value": "$value",
+ | "timestamp": $timestamp,
+ | "tags": {"generator": $generator_id, "state": "$state", "region": "$region"}
+ |},""";
+ body.concat(pstring);
+ }
+ }
+ body.concat("]");
+
var openTSDBUrl = "http://" + opentsdbIP + "/api/put"
try {
val httpClient = new DefaultHttpClient()
diff --git a/src/main/scala/com/powerpanel/kso/model/DataPackage.java b/src/main/scala/com/powerpanel/kso/model/DataPackage.java
new file mode 100644
index 0000000..9864d9c
--- /dev/null
+++ b/src/main/scala/com/powerpanel/kso/model/DataPackage.java
@@ -0,0 +1,517 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package com.powerpanel.kso.model;
+
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.message.BinaryMessageEncoder;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.message.SchemaStore;
+
+@SuppressWarnings("all")
+@org.apache.avro.specific.AvroGenerated
+public class DataPackage extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+ private static final long serialVersionUID = -1480426181877249637L;
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"DataPackage\",\"namespace\":\"com.powerpanel.kso.model\",\"fields\":[{\"name\":\"generator\",\"type\":\"int\"},{\"name\":\"region\",\"type\":{\"type\":\"enum\",\"name\":\"Region\",\"symbols\":[\"NORTHEAST\",\"SOUTHEAST\",\"WEST\",\"MIDWEST\"]}},{\"name\":\"state\",\"type\":{\"type\":\"enum\",\"name\":\"State\",\"symbols\":[\"AL\",\"AK\",\"AZ\",\"AR\",\"CA\",\"CO\",\"CT\",\"DE\",\"FL\",\"GA\",\"HI\",\"ID\",\"IL\",\"IN\",\"IA\",\"KS\",\"KY\",\"LA\",\"ME\",\"MD\",\"MA\",\"MI\",\"MN\",\"MS\",\"MO\",\"MT\",\"NE\",\"NV\",\"NH\",\"NJ\",\"NM\",\"NY\",\"NC\",\"ND\",\"OH\",\"OK\",\"OR\",\"PA\",\"RI\",\"SC\",\"SD\",\"TN\",\"TX\",\"UT\",\"VT\",\"VA\",\"WA\",\"WV\",\"WI\",\"WY\"]}},{\"name\":\"data\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"MetricData\",\"fields\":[{\"name\":\"metric\",\"type\":\"string\"},{\"name\":\"datapoints\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"DataPoint\",\"fields\":[{\"name\":\"timestamp\",\"type\":\"long\"},{\"name\":\"value\",\"type\":[\"long\",\"double\"]}]}}}]}}},{\"name\":\"rawdata\",\"type\":\"bytes\"}]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+
+ private static SpecificData MODEL$ = new SpecificData();
+
+ private static final BinaryMessageEncoder ENCODER =
+ new BinaryMessageEncoder(MODEL$, SCHEMA$);
+
+ private static final BinaryMessageDecoder DECODER =
+ new BinaryMessageDecoder(MODEL$, SCHEMA$);
+
+ /**
+ * Return the BinaryMessageDecoder instance used by this class.
+ */
+ public static BinaryMessageDecoder getDecoder() {
+ return DECODER;
+ }
+
+ /**
+ * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
+ * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
+ */
+ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
+ return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
+ }
+
+ /** Serializes this DataPackage to a ByteBuffer. */
+ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
+ return ENCODER.encode(this);
+ }
+
+ /** Deserializes a DataPackage from a ByteBuffer. */
+ public static DataPackage fromByteBuffer(
+ java.nio.ByteBuffer b) throws java.io.IOException {
+ return DECODER.decode(b);
+ }
+
+ @Deprecated public int generator;
+ @Deprecated public com.powerpanel.kso.model.Region region;
+ @Deprecated public com.powerpanel.kso.model.State state;
+ @Deprecated public java.util.List data;
+ @Deprecated public java.nio.ByteBuffer rawdata;
+
+ /**
+ * Default constructor. Note that this does not initialize fields
+ * to their default values from the schema. If that is desired then
+ * one should use newBuilder()
.
+ */
+ public DataPackage() {}
+
+ /**
+ * All-args constructor.
+ * @param generator The new value for generator
+ * @param region The new value for region
+ * @param state The new value for state
+ * @param data The new value for data
+ * @param rawdata The new value for rawdata
+ */
+ public DataPackage(java.lang.Integer generator, com.powerpanel.kso.model.Region region, com.powerpanel.kso.model.State state, java.util.List data, java.nio.ByteBuffer rawdata) {
+ this.generator = generator;
+ this.region = region;
+ this.state = state;
+ this.data = data;
+ this.rawdata = rawdata;
+ }
+
+ public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+ // Used by DatumWriter. Applications should not call.
+ public java.lang.Object get(int field$) {
+ switch (field$) {
+ case 0: return generator;
+ case 1: return region;
+ case 2: return state;
+ case 3: return data;
+ case 4: return rawdata;
+ default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+ }
+ }
+
+ // Used by DatumReader. Applications should not call.
+ @SuppressWarnings(value="unchecked")
+ public void put(int field$, java.lang.Object value$) {
+ switch (field$) {
+ case 0: generator = (java.lang.Integer)value$; break;
+ case 1: region = (com.powerpanel.kso.model.Region)value$; break;
+ case 2: state = (com.powerpanel.kso.model.State)value$; break;
+ case 3: data = (java.util.List)value$; break;
+ case 4: rawdata = (java.nio.ByteBuffer)value$; break;
+ default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+ }
+ }
+
+ /**
+ * Gets the value of the 'generator' field.
+ * @return The value of the 'generator' field.
+ */
+ public java.lang.Integer getGenerator() {
+ return generator;
+ }
+
+ /**
+ * Sets the value of the 'generator' field.
+ * @param value the value to set.
+ */
+ public void setGenerator(java.lang.Integer value) {
+ this.generator = value;
+ }
+
+ /**
+ * Gets the value of the 'region' field.
+ * @return The value of the 'region' field.
+ */
+ public com.powerpanel.kso.model.Region getRegion() {
+ return region;
+ }
+
+ /**
+ * Sets the value of the 'region' field.
+ * @param value the value to set.
+ */
+ public void setRegion(com.powerpanel.kso.model.Region value) {
+ this.region = value;
+ }
+
+ /**
+ * Gets the value of the 'state' field.
+ * @return The value of the 'state' field.
+ */
+ public com.powerpanel.kso.model.State getState() {
+ return state;
+ }
+
+ /**
+ * Sets the value of the 'state' field.
+ * @param value the value to set.
+ */
+ public void setState(com.powerpanel.kso.model.State value) {
+ this.state = value;
+ }
+
+ /**
+ * Gets the value of the 'data' field.
+ * @return The value of the 'data' field.
+ */
+ public java.util.List getData() {
+ return data;
+ }
+
+ /**
+ * Sets the value of the 'data' field.
+ * @param value the value to set.
+ */
+ public void setData(java.util.List value) {
+ this.data = value;
+ }
+
+ /**
+ * Gets the value of the 'rawdata' field.
+ * @return The value of the 'rawdata' field.
+ */
+ public java.nio.ByteBuffer getRawdata() {
+ return rawdata;
+ }
+
+ /**
+ * Sets the value of the 'rawdata' field.
+ * @param value the value to set.
+ */
+ public void setRawdata(java.nio.ByteBuffer value) {
+ this.rawdata = value;
+ }
+
+ /**
+ * Creates a new DataPackage RecordBuilder.
+ * @return A new DataPackage RecordBuilder
+ */
+ public static com.powerpanel.kso.model.DataPackage.Builder newBuilder() {
+ return new com.powerpanel.kso.model.DataPackage.Builder();
+ }
+
+ /**
+ * Creates a new DataPackage RecordBuilder by copying an existing Builder.
+ * @param other The existing builder to copy.
+ * @return A new DataPackage RecordBuilder
+ */
+ public static com.powerpanel.kso.model.DataPackage.Builder newBuilder(com.powerpanel.kso.model.DataPackage.Builder other) {
+ return new com.powerpanel.kso.model.DataPackage.Builder(other);
+ }
+
+ /**
+ * Creates a new DataPackage RecordBuilder by copying an existing DataPackage instance.
+ * @param other The existing instance to copy.
+ * @return A new DataPackage RecordBuilder
+ */
+ public static com.powerpanel.kso.model.DataPackage.Builder newBuilder(com.powerpanel.kso.model.DataPackage other) {
+ return new com.powerpanel.kso.model.DataPackage.Builder(other);
+ }
+
+ /**
+ * RecordBuilder for DataPackage instances.
+ */
+ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
+ implements org.apache.avro.data.RecordBuilder {
+
+ private int generator;
+ private com.powerpanel.kso.model.Region region;
+ private com.powerpanel.kso.model.State state;
+ private java.util.List data;
+ private java.nio.ByteBuffer rawdata;
+
+ /** Creates a new Builder */
+ private Builder() {
+ super(SCHEMA$);
+ }
+
+ /**
+ * Creates a Builder by copying an existing Builder.
+ * @param other The existing Builder to copy.
+ */
+ private Builder(com.powerpanel.kso.model.DataPackage.Builder other) {
+ super(other);
+ if (isValidValue(fields()[0], other.generator)) {
+ this.generator = data().deepCopy(fields()[0].schema(), other.generator);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.region)) {
+ this.region = data().deepCopy(fields()[1].schema(), other.region);
+ fieldSetFlags()[1] = true;
+ }
+ if (isValidValue(fields()[2], other.state)) {
+ this.state = data().deepCopy(fields()[2].schema(), other.state);
+ fieldSetFlags()[2] = true;
+ }
+ if (isValidValue(fields()[3], other.data)) {
+ this.data = data().deepCopy(fields()[3].schema(), other.data);
+ fieldSetFlags()[3] = true;
+ }
+ if (isValidValue(fields()[4], other.rawdata)) {
+ this.rawdata = data().deepCopy(fields()[4].schema(), other.rawdata);
+ fieldSetFlags()[4] = true;
+ }
+ }
+
+ /**
+ * Creates a Builder by copying an existing DataPackage instance
+ * @param other The existing instance to copy.
+ */
+ private Builder(com.powerpanel.kso.model.DataPackage other) {
+ super(SCHEMA$);
+ if (isValidValue(fields()[0], other.generator)) {
+ this.generator = data().deepCopy(fields()[0].schema(), other.generator);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.region)) {
+ this.region = data().deepCopy(fields()[1].schema(), other.region);
+ fieldSetFlags()[1] = true;
+ }
+ if (isValidValue(fields()[2], other.state)) {
+ this.state = data().deepCopy(fields()[2].schema(), other.state);
+ fieldSetFlags()[2] = true;
+ }
+ if (isValidValue(fields()[3], other.data)) {
+ this.data = data().deepCopy(fields()[3].schema(), other.data);
+ fieldSetFlags()[3] = true;
+ }
+ if (isValidValue(fields()[4], other.rawdata)) {
+ this.rawdata = data().deepCopy(fields()[4].schema(), other.rawdata);
+ fieldSetFlags()[4] = true;
+ }
+ }
+
+ /**
+ * Gets the value of the 'generator' field.
+ * @return The value.
+ */
+ public java.lang.Integer getGenerator() {
+ return generator;
+ }
+
+ /**
+ * Sets the value of the 'generator' field.
+ * @param value The value of 'generator'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder setGenerator(int value) {
+ validate(fields()[0], value);
+ this.generator = value;
+ fieldSetFlags()[0] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'generator' field has been set.
+ * @return True if the 'generator' field has been set, false otherwise.
+ */
+ public boolean hasGenerator() {
+ return fieldSetFlags()[0];
+ }
+
+
+ /**
+ * Clears the value of the 'generator' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder clearGenerator() {
+ fieldSetFlags()[0] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'region' field.
+ * @return The value.
+ */
+ public com.powerpanel.kso.model.Region getRegion() {
+ return region;
+ }
+
+ /**
+ * Sets the value of the 'region' field.
+ * @param value The value of 'region'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder setRegion(com.powerpanel.kso.model.Region value) {
+ validate(fields()[1], value);
+ this.region = value;
+ fieldSetFlags()[1] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'region' field has been set.
+ * @return True if the 'region' field has been set, false otherwise.
+ */
+ public boolean hasRegion() {
+ return fieldSetFlags()[1];
+ }
+
+
+ /**
+ * Clears the value of the 'region' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder clearRegion() {
+ region = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'state' field.
+ * @return The value.
+ */
+ public com.powerpanel.kso.model.State getState() {
+ return state;
+ }
+
+ /**
+ * Sets the value of the 'state' field.
+ * @param value The value of 'state'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder setState(com.powerpanel.kso.model.State value) {
+ validate(fields()[2], value);
+ this.state = value;
+ fieldSetFlags()[2] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'state' field has been set.
+ * @return True if the 'state' field has been set, false otherwise.
+ */
+ public boolean hasState() {
+ return fieldSetFlags()[2];
+ }
+
+
+ /**
+ * Clears the value of the 'state' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder clearState() {
+ state = null;
+ fieldSetFlags()[2] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'data' field.
+ * @return The value.
+ */
+ public java.util.List getData() {
+ return data;
+ }
+
+ /**
+ * Sets the value of the 'data' field.
+ * @param value The value of 'data'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder setData(java.util.List value) {
+ validate(fields()[3], value);
+ this.data = value;
+ fieldSetFlags()[3] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'data' field has been set.
+ * @return True if the 'data' field has been set, false otherwise.
+ */
+ public boolean hasData() {
+ return fieldSetFlags()[3];
+ }
+
+
+ /**
+ * Clears the value of the 'data' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder clearData() {
+ data = null;
+ fieldSetFlags()[3] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'rawdata' field.
+ * @return The value.
+ */
+ public java.nio.ByteBuffer getRawdata() {
+ return rawdata;
+ }
+
+ /**
+ * Sets the value of the 'rawdata' field.
+ * @param value The value of 'rawdata'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder setRawdata(java.nio.ByteBuffer value) {
+ validate(fields()[4], value);
+ this.rawdata = value;
+ fieldSetFlags()[4] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'rawdata' field has been set.
+ * @return True if the 'rawdata' field has been set, false otherwise.
+ */
+ public boolean hasRawdata() {
+ return fieldSetFlags()[4];
+ }
+
+
+ /**
+ * Clears the value of the 'rawdata' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPackage.Builder clearRawdata() {
+ rawdata = null;
+ fieldSetFlags()[4] = false;
+ return this;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public DataPackage build() {
+ try {
+ DataPackage record = new DataPackage();
+ record.generator = fieldSetFlags()[0] ? this.generator : (java.lang.Integer) defaultValue(fields()[0]);
+ record.region = fieldSetFlags()[1] ? this.region : (com.powerpanel.kso.model.Region) defaultValue(fields()[1]);
+ record.state = fieldSetFlags()[2] ? this.state : (com.powerpanel.kso.model.State) defaultValue(fields()[2]);
+ record.data = fieldSetFlags()[3] ? this.data : (java.util.List) defaultValue(fields()[3]);
+ record.rawdata = fieldSetFlags()[4] ? this.rawdata : (java.nio.ByteBuffer) defaultValue(fields()[4]);
+ return record;
+ } catch (java.lang.Exception e) {
+ throw new org.apache.avro.AvroRuntimeException(e);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumWriter
+ WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
+
+ @Override public void writeExternal(java.io.ObjectOutput out)
+ throws java.io.IOException {
+ WRITER$.write(this, SpecificData.getEncoder(out));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumReader
+ READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
+
+ @Override public void readExternal(java.io.ObjectInput in)
+ throws java.io.IOException {
+ READER$.read(this, SpecificData.getDecoder(in));
+ }
+
+}
diff --git a/src/main/scala/com/powerpanel/kso/model/DataPlatformEvent.java b/src/main/scala/com/powerpanel/kso/model/DataPlatformEvent.java-bak
similarity index 100%
rename from src/main/scala/com/powerpanel/kso/model/DataPlatformEvent.java
rename to src/main/scala/com/powerpanel/kso/model/DataPlatformEvent.java-bak
diff --git a/src/main/scala/com/powerpanel/kso/model/DataPlatformEventDecoder.java b/src/main/scala/com/powerpanel/kso/model/DataPlatformEventDecoder.java-bak
similarity index 99%
rename from src/main/scala/com/powerpanel/kso/model/DataPlatformEventDecoder.java
rename to src/main/scala/com/powerpanel/kso/model/DataPlatformEventDecoder.java-bak
index 7d26f2d..2e99169 100644
--- a/src/main/scala/com/powerpanel/kso/model/DataPlatformEventDecoder.java
+++ b/src/main/scala/com/powerpanel/kso/model/DataPlatformEventDecoder.java-bak
@@ -45,7 +45,6 @@ public class DataPlatformEventDecoder
public DataPlatformEvent decode(byte[] data) throws IOException
{
-
try
{
Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
diff --git a/src/main/scala/com/powerpanel/kso/model/DataPoint.java b/src/main/scala/com/powerpanel/kso/model/DataPoint.java
new file mode 100644
index 0000000..53bf12c
--- /dev/null
+++ b/src/main/scala/com/powerpanel/kso/model/DataPoint.java
@@ -0,0 +1,307 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package com.powerpanel.kso.model;
+
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.message.BinaryMessageEncoder;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.message.SchemaStore;
+
+@SuppressWarnings("all")
+@org.apache.avro.specific.AvroGenerated
+public class DataPoint extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+ private static final long serialVersionUID = 7360705831082600519L;
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"DataPoint\",\"namespace\":\"com.powerpanel.kso.model\",\"fields\":[{\"name\":\"timestamp\",\"type\":\"long\"},{\"name\":\"value\",\"type\":[\"long\",\"double\"]}]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+
+ private static SpecificData MODEL$ = new SpecificData();
+
+ private static final BinaryMessageEncoder ENCODER =
+ new BinaryMessageEncoder(MODEL$, SCHEMA$);
+
+ private static final BinaryMessageDecoder DECODER =
+ new BinaryMessageDecoder(MODEL$, SCHEMA$);
+
+ /**
+ * Return the BinaryMessageDecoder instance used by this class.
+ */
+ public static BinaryMessageDecoder getDecoder() {
+ return DECODER;
+ }
+
+ /**
+ * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
+ * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
+ */
+ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
+ return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
+ }
+
+ /** Serializes this DataPoint to a ByteBuffer. */
+ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
+ return ENCODER.encode(this);
+ }
+
+ /** Deserializes a DataPoint from a ByteBuffer. */
+ public static DataPoint fromByteBuffer(
+ java.nio.ByteBuffer b) throws java.io.IOException {
+ return DECODER.decode(b);
+ }
+
+ @Deprecated public long timestamp;
+ @Deprecated public java.lang.Object value;
+
+ /**
+ * Default constructor. Note that this does not initialize fields
+ * to their default values from the schema. If that is desired then
+ * one should use newBuilder()
.
+ */
+ public DataPoint() {}
+
+ /**
+ * All-args constructor.
+ * @param timestamp The new value for timestamp
+ * @param value The new value for value
+ */
+ public DataPoint(java.lang.Long timestamp, java.lang.Object value) {
+ this.timestamp = timestamp;
+ this.value = value;
+ }
+
+ public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+ // Used by DatumWriter. Applications should not call.
+ public java.lang.Object get(int field$) {
+ switch (field$) {
+ case 0: return timestamp;
+ case 1: return value;
+ default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+ }
+ }
+
+ // Used by DatumReader. Applications should not call.
+ @SuppressWarnings(value="unchecked")
+ public void put(int field$, java.lang.Object value$) {
+ switch (field$) {
+ case 0: timestamp = (java.lang.Long)value$; break;
+ case 1: value = (java.lang.Object)value$; break;
+ default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+ }
+ }
+
+ /**
+ * Gets the value of the 'timestamp' field.
+ * @return The value of the 'timestamp' field.
+ */
+ public java.lang.Long getTimestamp() {
+ return timestamp;
+ }
+
+ /**
+ * Sets the value of the 'timestamp' field.
+ * @param value the value to set.
+ */
+ public void setTimestamp(java.lang.Long value) {
+ this.timestamp = value;
+ }
+
+ /**
+ * Gets the value of the 'value' field.
+ * @return The value of the 'value' field.
+ */
+ public java.lang.Object getValue() {
+ return value;
+ }
+
+ /**
+ * Sets the value of the 'value' field.
+ * @param value the value to set.
+ */
+ public void setValue(java.lang.Object value) {
+ this.value = value;
+ }
+
+ /**
+ * Creates a new DataPoint RecordBuilder.
+ * @return A new DataPoint RecordBuilder
+ */
+ public static com.powerpanel.kso.model.DataPoint.Builder newBuilder() {
+ return new com.powerpanel.kso.model.DataPoint.Builder();
+ }
+
+ /**
+ * Creates a new DataPoint RecordBuilder by copying an existing Builder.
+ * @param other The existing builder to copy.
+ * @return A new DataPoint RecordBuilder
+ */
+ public static com.powerpanel.kso.model.DataPoint.Builder newBuilder(com.powerpanel.kso.model.DataPoint.Builder other) {
+ return new com.powerpanel.kso.model.DataPoint.Builder(other);
+ }
+
+ /**
+ * Creates a new DataPoint RecordBuilder by copying an existing DataPoint instance.
+ * @param other The existing instance to copy.
+ * @return A new DataPoint RecordBuilder
+ */
+ public static com.powerpanel.kso.model.DataPoint.Builder newBuilder(com.powerpanel.kso.model.DataPoint other) {
+ return new com.powerpanel.kso.model.DataPoint.Builder(other);
+ }
+
+ /**
+ * RecordBuilder for DataPoint instances.
+ */
+ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
+ implements org.apache.avro.data.RecordBuilder {
+
+ private long timestamp;
+ private java.lang.Object value;
+
+ /** Creates a new Builder */
+ private Builder() {
+ super(SCHEMA$);
+ }
+
+ /**
+ * Creates a Builder by copying an existing Builder.
+ * @param other The existing Builder to copy.
+ */
+ private Builder(com.powerpanel.kso.model.DataPoint.Builder other) {
+ super(other);
+ if (isValidValue(fields()[0], other.timestamp)) {
+ this.timestamp = data().deepCopy(fields()[0].schema(), other.timestamp);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.value)) {
+ this.value = data().deepCopy(fields()[1].schema(), other.value);
+ fieldSetFlags()[1] = true;
+ }
+ }
+
+ /**
+ * Creates a Builder by copying an existing DataPoint instance
+ * @param other The existing instance to copy.
+ */
+ private Builder(com.powerpanel.kso.model.DataPoint other) {
+ super(SCHEMA$);
+ if (isValidValue(fields()[0], other.timestamp)) {
+ this.timestamp = data().deepCopy(fields()[0].schema(), other.timestamp);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.value)) {
+ this.value = data().deepCopy(fields()[1].schema(), other.value);
+ fieldSetFlags()[1] = true;
+ }
+ }
+
+ /**
+ * Gets the value of the 'timestamp' field.
+ * @return The value.
+ */
+ public java.lang.Long getTimestamp() {
+ return timestamp;
+ }
+
+ /**
+ * Sets the value of the 'timestamp' field.
+ * @param value The value of 'timestamp'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPoint.Builder setTimestamp(long value) {
+ validate(fields()[0], value);
+ this.timestamp = value;
+ fieldSetFlags()[0] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'timestamp' field has been set.
+ * @return True if the 'timestamp' field has been set, false otherwise.
+ */
+ public boolean hasTimestamp() {
+ return fieldSetFlags()[0];
+ }
+
+
+ /**
+ * Clears the value of the 'timestamp' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPoint.Builder clearTimestamp() {
+ fieldSetFlags()[0] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'value' field.
+ * @return The value.
+ */
+ public java.lang.Object getValue() {
+ return value;
+ }
+
+ /**
+ * Sets the value of the 'value' field.
+ * @param value The value of 'value'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPoint.Builder setValue(java.lang.Object value) {
+ validate(fields()[1], value);
+ this.value = value;
+ fieldSetFlags()[1] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'value' field has been set.
+ * @return True if the 'value' field has been set, false otherwise.
+ */
+ public boolean hasValue() {
+ return fieldSetFlags()[1];
+ }
+
+
+ /**
+ * Clears the value of the 'value' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.DataPoint.Builder clearValue() {
+ value = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public DataPoint build() {
+ try {
+ DataPoint record = new DataPoint();
+ record.timestamp = fieldSetFlags()[0] ? this.timestamp : (java.lang.Long) defaultValue(fields()[0]);
+ record.value = fieldSetFlags()[1] ? this.value : (java.lang.Object) defaultValue(fields()[1]);
+ return record;
+ } catch (java.lang.Exception e) {
+ throw new org.apache.avro.AvroRuntimeException(e);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumWriter
+ WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
+
+ @Override public void writeExternal(java.io.ObjectOutput out)
+ throws java.io.IOException {
+ WRITER$.write(this, SpecificData.getEncoder(out));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumReader
+ READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
+
+ @Override public void readExternal(java.io.ObjectInput in)
+ throws java.io.IOException {
+ READER$.read(this, SpecificData.getDecoder(in));
+ }
+
+}
diff --git a/src/main/scala/com/powerpanel/kso/model/MetricData.java b/src/main/scala/com/powerpanel/kso/model/MetricData.java
new file mode 100644
index 0000000..f68b0b5
--- /dev/null
+++ b/src/main/scala/com/powerpanel/kso/model/MetricData.java
@@ -0,0 +1,308 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package com.powerpanel.kso.model;
+
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.message.BinaryMessageEncoder;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.message.SchemaStore;
+
+@SuppressWarnings("all")
+@org.apache.avro.specific.AvroGenerated
+public class MetricData extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+ private static final long serialVersionUID = -6214518573641805216L;
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"MetricData\",\"namespace\":\"com.powerpanel.kso.model\",\"fields\":[{\"name\":\"metric\",\"type\":\"string\"},{\"name\":\"datapoints\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"DataPoint\",\"fields\":[{\"name\":\"timestamp\",\"type\":\"long\"},{\"name\":\"value\",\"type\":[\"long\",\"double\"]}]}}}]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+
+ private static SpecificData MODEL$ = new SpecificData();
+
+ private static final BinaryMessageEncoder ENCODER =
+ new BinaryMessageEncoder(MODEL$, SCHEMA$);
+
+ private static final BinaryMessageDecoder DECODER =
+ new BinaryMessageDecoder(MODEL$, SCHEMA$);
+
+ /**
+ * Return the BinaryMessageDecoder instance used by this class.
+ */
+ public static BinaryMessageDecoder getDecoder() {
+ return DECODER;
+ }
+
+ /**
+ * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
+ * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
+ */
+ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
+ return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
+ }
+
+ /** Serializes this MetricData to a ByteBuffer. */
+ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
+ return ENCODER.encode(this);
+ }
+
+ /** Deserializes a MetricData from a ByteBuffer. */
+ public static MetricData fromByteBuffer(
+ java.nio.ByteBuffer b) throws java.io.IOException {
+ return DECODER.decode(b);
+ }
+
+ @Deprecated public java.lang.CharSequence metric;
+ @Deprecated public java.util.List datapoints;
+
+ /**
+ * Default constructor. Note that this does not initialize fields
+ * to their default values from the schema. If that is desired then
+ * one should use newBuilder()
.
+ */
+ public MetricData() {}
+
+ /**
+ * All-args constructor.
+ * @param metric The new value for metric
+ * @param datapoints The new value for datapoints
+ */
+ public MetricData(java.lang.CharSequence metric, java.util.List datapoints) {
+ this.metric = metric;
+ this.datapoints = datapoints;
+ }
+
+ public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+ // Used by DatumWriter. Applications should not call.
+ public java.lang.Object get(int field$) {
+ switch (field$) {
+ case 0: return metric;
+ case 1: return datapoints;
+ default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+ }
+ }
+
+ // Used by DatumReader. Applications should not call.
+ @SuppressWarnings(value="unchecked")
+ public void put(int field$, java.lang.Object value$) {
+ switch (field$) {
+ case 0: metric = (java.lang.CharSequence)value$; break;
+ case 1: datapoints = (java.util.List)value$; break;
+ default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+ }
+ }
+
+ /**
+ * Gets the value of the 'metric' field.
+ * @return The value of the 'metric' field.
+ */
+ public java.lang.CharSequence getMetric() {
+ return metric;
+ }
+
+ /**
+ * Sets the value of the 'metric' field.
+ * @param value the value to set.
+ */
+ public void setMetric(java.lang.CharSequence value) {
+ this.metric = value;
+ }
+
+ /**
+ * Gets the value of the 'datapoints' field.
+ * @return The value of the 'datapoints' field.
+ */
+ public java.util.List getDatapoints() {
+ return datapoints;
+ }
+
+ /**
+ * Sets the value of the 'datapoints' field.
+ * @param value the value to set.
+ */
+ public void setDatapoints(java.util.List value) {
+ this.datapoints = value;
+ }
+
+ /**
+ * Creates a new MetricData RecordBuilder.
+ * @return A new MetricData RecordBuilder
+ */
+ public static com.powerpanel.kso.model.MetricData.Builder newBuilder() {
+ return new com.powerpanel.kso.model.MetricData.Builder();
+ }
+
+ /**
+ * Creates a new MetricData RecordBuilder by copying an existing Builder.
+ * @param other The existing builder to copy.
+ * @return A new MetricData RecordBuilder
+ */
+ public static com.powerpanel.kso.model.MetricData.Builder newBuilder(com.powerpanel.kso.model.MetricData.Builder other) {
+ return new com.powerpanel.kso.model.MetricData.Builder(other);
+ }
+
+ /**
+ * Creates a new MetricData RecordBuilder by copying an existing MetricData instance.
+ * @param other The existing instance to copy.
+ * @return A new MetricData RecordBuilder
+ */
+ public static com.powerpanel.kso.model.MetricData.Builder newBuilder(com.powerpanel.kso.model.MetricData other) {
+ return new com.powerpanel.kso.model.MetricData.Builder(other);
+ }
+
+ /**
+ * RecordBuilder for MetricData instances.
+ */
+ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
+ implements org.apache.avro.data.RecordBuilder {
+
+ private java.lang.CharSequence metric;
+ private java.util.List datapoints;
+
+ /** Creates a new Builder */
+ private Builder() {
+ super(SCHEMA$);
+ }
+
+ /**
+ * Creates a Builder by copying an existing Builder.
+ * @param other The existing Builder to copy.
+ */
+ private Builder(com.powerpanel.kso.model.MetricData.Builder other) {
+ super(other);
+ if (isValidValue(fields()[0], other.metric)) {
+ this.metric = data().deepCopy(fields()[0].schema(), other.metric);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.datapoints)) {
+ this.datapoints = data().deepCopy(fields()[1].schema(), other.datapoints);
+ fieldSetFlags()[1] = true;
+ }
+ }
+
+ /**
+ * Creates a Builder by copying an existing MetricData instance
+ * @param other The existing instance to copy.
+ */
+ private Builder(com.powerpanel.kso.model.MetricData other) {
+ super(SCHEMA$);
+ if (isValidValue(fields()[0], other.metric)) {
+ this.metric = data().deepCopy(fields()[0].schema(), other.metric);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.datapoints)) {
+ this.datapoints = data().deepCopy(fields()[1].schema(), other.datapoints);
+ fieldSetFlags()[1] = true;
+ }
+ }
+
+ /**
+ * Gets the value of the 'metric' field.
+ * @return The value.
+ */
+ public java.lang.CharSequence getMetric() {
+ return metric;
+ }
+
+ /**
+ * Sets the value of the 'metric' field.
+ * @param value The value of 'metric'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.MetricData.Builder setMetric(java.lang.CharSequence value) {
+ validate(fields()[0], value);
+ this.metric = value;
+ fieldSetFlags()[0] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'metric' field has been set.
+ * @return True if the 'metric' field has been set, false otherwise.
+ */
+ public boolean hasMetric() {
+ return fieldSetFlags()[0];
+ }
+
+
+ /**
+ * Clears the value of the 'metric' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.MetricData.Builder clearMetric() {
+ metric = null;
+ fieldSetFlags()[0] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'datapoints' field.
+ * @return The value.
+ */
+ public java.util.List getDatapoints() {
+ return datapoints;
+ }
+
+ /**
+ * Sets the value of the 'datapoints' field.
+ * @param value The value of 'datapoints'.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.MetricData.Builder setDatapoints(java.util.List value) {
+ validate(fields()[1], value);
+ this.datapoints = value;
+ fieldSetFlags()[1] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'datapoints' field has been set.
+ * @return True if the 'datapoints' field has been set, false otherwise.
+ */
+ public boolean hasDatapoints() {
+ return fieldSetFlags()[1];
+ }
+
+
+ /**
+ * Clears the value of the 'datapoints' field.
+ * @return This builder.
+ */
+ public com.powerpanel.kso.model.MetricData.Builder clearDatapoints() {
+ datapoints = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public MetricData build() {
+ try {
+ MetricData record = new MetricData();
+ record.metric = fieldSetFlags()[0] ? this.metric : (java.lang.CharSequence) defaultValue(fields()[0]);
+ record.datapoints = fieldSetFlags()[1] ? this.datapoints : (java.util.List) defaultValue(fields()[1]);
+ return record;
+ } catch (java.lang.Exception e) {
+ throw new org.apache.avro.AvroRuntimeException(e);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumWriter
+ WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
+
+ @Override public void writeExternal(java.io.ObjectOutput out)
+ throws java.io.IOException {
+ WRITER$.write(this, SpecificData.getEncoder(out));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumReader
+ READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
+
+ @Override public void readExternal(java.io.ObjectInput in)
+ throws java.io.IOException {
+ READER$.read(this, SpecificData.getDecoder(in));
+ }
+
+}
diff --git a/src/main/scala/com/powerpanel/kso/model/Region.java b/src/main/scala/com/powerpanel/kso/model/Region.java
new file mode 100644
index 0000000..ae6b982
--- /dev/null
+++ b/src/main/scala/com/powerpanel/kso/model/Region.java
@@ -0,0 +1,13 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package com.powerpanel.kso.model;
+@SuppressWarnings("all")
+@org.apache.avro.specific.AvroGenerated
+public enum Region {
+ NORTHEAST, SOUTHEAST, WEST, MIDWEST ;
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"Region\",\"namespace\":\"com.powerpanel.kso.model\",\"symbols\":[\"NORTHEAST\",\"SOUTHEAST\",\"WEST\",\"MIDWEST\"]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+}
diff --git a/src/main/scala/com/powerpanel/kso/model/State.java b/src/main/scala/com/powerpanel/kso/model/State.java
new file mode 100644
index 0000000..5040c81
--- /dev/null
+++ b/src/main/scala/com/powerpanel/kso/model/State.java
@@ -0,0 +1,13 @@
+/**
+ * Autogenerated by Avro
+ *
+ * DO NOT EDIT DIRECTLY
+ */
+package com.powerpanel.kso.model;
+@SuppressWarnings("all")
+@org.apache.avro.specific.AvroGenerated
+public enum State {
+ AL, AK, AZ, AR, CA, CO, CT, DE, FL, GA, HI, ID, IL, IN, IA, KS, KY, LA, ME, MD, MA, MI, MN, MS, MO, MT, NE, NV, NH, NJ, NM, NY, NC, ND, OH, OK, OR, PA, RI, SC, SD, TN, TX, UT, VT, VA, WA, WV, WI, WY ;
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"State\",\"namespace\":\"com.powerpanel.kso.model\",\"symbols\":[\"AL\",\"AK\",\"AZ\",\"AR\",\"CA\",\"CO\",\"CT\",\"DE\",\"FL\",\"GA\",\"HI\",\"ID\",\"IL\",\"IN\",\"IA\",\"KS\",\"KY\",\"LA\",\"ME\",\"MD\",\"MA\",\"MI\",\"MN\",\"MS\",\"MO\",\"MT\",\"NE\",\"NV\",\"NH\",\"NJ\",\"NM\",\"NY\",\"NC\",\"ND\",\"OH\",\"OK\",\"OR\",\"PA\",\"RI\",\"SC\",\"SD\",\"TN\",\"TX\",\"UT\",\"VT\",\"VA\",\"WA\",\"WV\",\"WI\",\"WY\"]}");
+ public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+}