//读取shapefile
def read_shapefile(shapePath: String, shapeName: String)(implicit sparkContext: SparkContext) = {
var intpuParams: Map[String, String] = Map()
var File = new File(shapePath)
intpuParams += ("url" -> File.toURI.toURL.toString)
intpuParams += ("geotools" -> "true")
val query = new Query(shapeName)
val inputRdd = GeoMesaSpark(intpuParams).rdd(new Configuration(), sparkContext, intpuParams, query)
println(inputRdd.schema) //打印方案
inputRdd
}
val shapePath: String = "D:\\jb\\gisdata"
val shapeFile: String = "dltsb_6603"
val shapeOutPath: String = "D:\\jb\\output"
def main(args: Array[String]): Unit = {
implicit val sparkContext: SparkContext = SparkUtilsScala.createSparkContext()
//读取shapefile 单文件
//val shapeRdd: SpatialRDD = HandleShapeFile.read_shapefile(shapePath, shapeFile)
//读取shapefile 路径
val shapeRdd = HandleShapeFile.read_shapePath("D:\\B工作文档\\三调共享 流入流出\\jb\\output\\local-1630396725278");
val length: Int = shapeRdd.collect().length
//保持输出shapefile
HandleShapeFile.write_shapefile_single(shapeRdd, shapeOutPath)
sparkContext.stop();
}
//测试失败,必须指定具体shapefile 文件名称,否则读取失败
//新的思路:可以通过便利文件路径,获取所有shapefile 文件,分别读取为rdd,然后合并rdd
def read_shapePath(shapePath: String)(implicit sparkContext: SparkContext) = {
var intpuParams: Map[String, String] = Map()
var File = new File(shapePath)
intpuParams += ("url" -> File.toURI.toURL.toString)
intpuParams += ("geotools" -> "true")
val query = new Query("a1ee03b5-b289-4ea7-a7c2-33bd4984365c")
val inputRdd = GeoMesaSpark(intpuParams).rdd(new Configuration(), sparkContext, intpuParams, query)
println(inputRdd.schema) //打印方案
inputRdd
}