Spark IoTDB连接器
mvn clean scala:compile compile install
1. Maven依赖
2. Spark-shell用户指南
spark-shell --jars spark-iotdb-connector-0.11.1.jar,iotdb-jdbc-0.11.1-jar-with-dependencies.jar
import org.apache.iotdb.spark.db._
val df = spark.read.format("org.apache.iotdb.spark.db").option("url","jdbc:iotdb://127.0.0.1:6667/").option("sql","select * from root").load
df.printSchema()
df.show()
3. 模式推断
TsFile中的现有数据如下:
你还可以使用窄表形式,如下所示:(您可以参阅第4部分,了解如何使用窄表形式)
4. 在宽和窄表之间转换
import org.apache.iotdb.spark.db._
val wide_df = spark.read.format("org.apache.iotdb.spark.db").option("url", "jdbc:iotdb://127.0.0.1:6667/").option("sql", "select * from root where time < 1100 and time > 1000").load
val narrow_df = Transformer.toNarrowForm(spark, wide_df)
5. Java用户指南
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.iotdb.spark.db.*
public class Example {
public static void main(String[] args) {
.builder()
.appName("Build a DataFrame from Scratch")
.getOrCreate();
Dataset<Row> df = spark.read().format("org.apache.iotdb.spark.db")
.option("url","jdbc:iotdb://127.0.0.1:6667/")
.option("sql","select * from root").load();
df.printSchema();
df.show();
Dataset<Row> narrowTable = Transformer.toNarrowForm(spark, df)
narrowTable.show()