Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
<org.jgrapht.version>1.4.0</org.jgrapht.version>
<rtree.version>0.10</rtree.version>
<fastjson.version>2.0.6</fastjson.version>
<kryo.version>4.0.0</kryo.version>
</properties>

<dependencyManagement>
Expand Down
33 changes: 33 additions & 0 deletions start-db-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,39 @@
<artifactId>geomesa-utils_${scala.binary.version}</artifactId>
<version>${geomesa.version}</version>
</dependency>
<!--/ <dependency>-->
<!-- <groupId>org.locationtech.geomesa</groupId>-->
<!-- <artifactId>geomesa-spark_${scala.binary.version}</artifactId>-->
<!-- <version>${geomesa.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>2.4.7</version>
</dependency>
<dependency>
<groupId>org.locationtech.geomesa</groupId>
<artifactId>geomesa-spark-sql_${scala.binary.version}</artifactId>
<version>${geomesa.version}</version>
</dependency>
<dependency>
<groupId>org.locationtech.geomesa</groupId>
<artifactId>geomesa-hbase-spark-runtime-hbase2_${scala.binary.version}</artifactId>
<version>${geomesa.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</exclusion>
</exclusions>
</dependency>

<!-- https://mvnrepository.com/artifact/com.esotericsoftware/kryo -->
<dependency>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo</artifactId>
<version>${kryo.version}</version>
</dependency>
<!-- Mybatis Core -->
<dependency>
<groupId>org.mybatis</groupId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Copyright 2022 ST-Lab
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU General Public License version 2 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*/

package org.urbcomp.start.db.executor

import org.apache.calcite.sql.{SqlDialect, SqlNode}
import org.apache.spark.sql.SparkSession
import org.urbcomp.start.db.util.SqlParam

class SparkExecutor {

def execute(sql: SqlNode): Unit = {
// 获取缓存的配置信息
val param = SqlParam.CACHE.get()

// TODO 获取数据
val geomesaParam = Map("" -> "")
val sparkSession: SparkSession = SparkSession
.builder()
.appName("start-db sql app")
.master("local[*]")
.getOrCreate()

// 读取相关的表数据
sparkSession.read
.format("geomesa")
.options(geomesaParam)
.option("geomesa.feature", "chicago")
.load()
.createTempView("")

// 基于sql对读取数据进行查询
sparkSession.sql(sql.toSqlString(new SqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql)

// 写入到中间存储介质中
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Copyright 2022 ST-Lab
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU General Public License version 2 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*/

package org.urbcomp.start.db.spark

import org.apache.spark.sql.SparkSession

object SparkDemo {

val ST_DIVIDE: Int => Int = g => g / 3

def main(args: Array[String]): Unit = {
val sparkSession: SparkSession = SparkSession
.builder()
.appName("spark demo")
.master("local[*]")
.getOrCreate()
sparkSession.sqlContext.udf.register("st_divide", ST_DIVIDE)

sparkSession.sql("select st_divide(9)").show()
}

}