Java集成gdal 处理解析tiff和shp数据

news/2024/9/20 20:50:49 标签: java, 开发语言

1. 配置 gdal

1.1. 官网下载

 这个是因为你电脑是 win64 位才选择哦~

下载这个,然后解压

1.2. 复制这个压缩包下的 ddl 文件

可以按照类型复制,然后复制到你的 java jDK 文件夹下

1.3. 找到你的 java jdk 文件夹

不知道 java 的文件夹位置,可以通过这个查找

 

1.4. 复制对象

将这个复制到这个1.5. 选择 release-1930-x64-gdal-3-8-5-mapserver-8-0-1\bin\gdal\java

也复制到 java/bin java/jre/java,并且放入项目里面

2. 解析 tif

package org.example.flink_springboot.geop;
 
import org.gdal.gdal.Band;
import org.gdal.gdal.Dataset;
import org.gdal.gdal.Driver;
import org.gdal.gdal.gdal;
import org.gdal.gdalconst.gdalconst;
import org.gdal.gdalconst.gdalconstConstants;
 
public class GDALTest {
	public static void main(String[] args) {
		// 注册文件格式
		gdal.AllRegister();
		// 使用只读的方式打开图像
		Dataset poDataset = gdal.Open("F:\\learn\\flink_springboot\\src\\main\\resources\\srtm_60_05.tif", gdalconst.GA_ReadOnly);
		if (poDataset == null) {
			System.out.println("The image could not be read.");
		} else {
			// 图像打开成功
			System.out.println("The image could be read.");
			
			Driver hDriver = poDataset.GetDriver();
			// 输出文件的格式
			System.out.println("文件格式:" + hDriver.GetDescription());
			//System.out.println("文件格式:" + hDriver.getShortName() + "/" + hDriver.getLongName());
			// 输出图像的大小和波段个数
			System.out.println("size is:x:" + poDataset.getRasterXSize() + " ,y:" + poDataset.getRasterYSize()
					+ " ,band size:" + poDataset.getRasterCount());
			// 输出图像的投影信息
			if (poDataset.GetProjectionRef() != null) {
				System.out.println("Projection is " + poDataset.GetProjectionRef());
			}
			// 输出图像的坐标和分辨率信息
			double[] adfGeoTransform = new double[6];
			poDataset.GetGeoTransform(adfGeoTransform);
			System.out.println("origin : " + adfGeoTransform[0] + "," + adfGeoTransform[3]);
			System.out.println("pixel size:" + adfGeoTransform[1] + "," + adfGeoTransform[5]);
 
			
			// 分别输出各个波段的块大小,并获取该波段的最大值和最小值,颜色表信息
			for (int band = 0; band < poDataset.getRasterCount(); band++) {
				Band poBand = poDataset.GetRasterBand(band + 1);
				System.out.println("Band" + (band + 1) + ":" + "size:x:" + poBand.getXSize() + ",y:" + poBand.getYSize());
				Double[] min = new Double[1];
				Double[] max = new Double[1];
				poBand.GetMinimum(min);
				poBand.GetMaximum(max);
				if (min[0] != null || max[0] != null) {
					System.out.println("Min=" + min[0] + ",max=" + max[0]);
				} else {
					System.out.println("No Min/Max values stored in raster.");
				}
				if (poBand.GetColorTable() != null) {
					System.out.println("band" + band + "has a color table with"
							+ poBand.GetRasterColorTable().GetCount() + "entries.");
				}
				
				int buf[] = new int[poDataset.getRasterXSize()];
				for (int i = 0; i < 3; i++) {
					poBand.ReadRaster(0, i, poDataset.getRasterXSize(), 1, buf);
					for (int j = 0; j < 3; j++)
						System.out.print(buf[j] + ", ");
					System.out.println("\n");
				}
			}
 
			
			
	
 
 
			poDataset.delete();
			
			
		}
	}
}
 

3. 解析 shp

package org.example.flink_springboot.shape;
 
import org.gdal.gdal.gdal;
import org.gdal.ogr.*;
import org.gdal.osr.SpatialReference;
 
import java.util.HashMap;
import java.util.Map;
 
public class GdalDemo_shp1 {
    public void opeanShp(String strVectorFile ) {
 
// 注册所有的驱动
        ogr.RegisterAll();
// 为了支持中文路径,请添加下面这句代码
        gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES");
// 为了使属性表字段支持中文,请添加下面这句
        gdal.SetConfigOption("SHAPE_ENCODING", "CP936");
// 读取数据,这里以ESRI的shp文件为例
        String strDriverName = "ESRI Shapefile";
// 创建一个文件,根据strDriverName扩展名自动判断驱动类型
 
        org.gdal.ogr.Driver oDriver = ogr.GetDriverByName(strDriverName);
 
        if (oDriver == null) {
            System.out.println(strDriverName + " 驱动不可用!\n");
            return;
        }
        DataSource dataSource = oDriver.Open(strVectorFile);
        //Layer layer = dataSource.GetLayer("test");
        Layer layer = dataSource.GetLayer(0);
 
        for(int i = 0;i<dataSource.GetLayerCount();i++) {
            Layer layerIdx = dataSource.GetLayer(i);
            System.out.println("图层名称:<==>" + layerIdx.GetName());
        }
 
        String layerName = layer.GetName();
        System.out.println("图层名称:" + layerName);
        SpatialReference spatialReference = layer.GetSpatialRef();
//System.out.println(spatialReference);
        System.out.println("空间参考坐标系:" + spatialReference.GetAttrValue("AUTHORITY", 0)
                + spatialReference.GetAttrValue("AUTHORITY", 1));
 
        double[] layerExtent = layer.GetExtent();
 
        System.out.println("图层范围:minx:" + layerExtent[0] + ",maxx:" + layerExtent[1] + ",miny:" + layerExtent[2] + ",maxy:" + layerExtent[3]);
 
 
        FeatureDefn featureDefn = layer.GetLayerDefn();
 
        int fieldCount = featureDefn.GetFieldCount();
 
        Map<String,String> fieldMap = new HashMap<String,String>();
        for (int i = 0; i < fieldCount; i++) {
            FieldDefn fieldDefn = featureDefn.GetFieldDefn(i);
            // 得到属性字段类型
            int fieldType = fieldDefn.GetFieldType();
            String fieldTypeName = fieldDefn.GetFieldTypeName(fieldType);
            // 得到属性字段名称
            String fieldName = fieldDefn.GetName();
            fieldMap.put(fieldTypeName, fieldName);
        }
        System.out.println();
        System.out.println("fileMap:");
        System.out.println(fieldMap);
 
        System.out.println(layer.GetFeature(1).GetGeometryRef().ExportToJson());
        System.out.println(layer.GetFeature(2).GetGeometryRef().ExportToJson());
        System.out.println(layer.GetFeature(3).GetGeometryRef().ExportToJson());
 
        for (int i = 0; i < 12; i++) {
            Feature feature = layer.GetFeature(i);
            Object[] arr = fieldMap.values().toArray();
            for (int k = 0; k < arr.length; k++) {
                String fvalue = feature.GetFieldAsString(arr[k].toString());
                System.out.println(" 属性名称:" + arr[k].toString() + ",属性值:" + fvalue);
            }
        }
    }
 
    public static void main(String[] args) {
        GdalDemo_shp shp = new  GdalDemo_shp();
        String strVectorFile ="F:\\learn\\flink_springboot\\src\\main\\resources\\中华人民共和国.shp";
        String info = shp.opeanShp(strVectorFile);
        System.out.println(info);
    }
}
package org.example.flink_springboot.shape;
 
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
 
public class SHP {
    /**
     * 生成shape文件
     *
     * @param shpPath  生成shape文件路径(包含文件名称) filepath
     * @param encode   编码 code
     * @param geoType  图幅类型,Point和Rolygon
     * @param shpKey   data中图幅的key  geomfiled
     * @param attrKeys 属性key集合 keylist
     * @param data     图幅和属性集合 datalist
     */
    public  void write2Shape(String shpPath, String encode, String geoType, String shpKey, List<ShpFiled> attrKeys, List<Map<String, Object>> data) {
        WKTReader reader = new WKTReader();
        try {
            //创建shape文件对象
            File file = new File(shpPath);
            Map<String, Serializable> params = new HashMap<>();
            params.put(ShapefileDataStoreFactory.URLP.key, file.toURI().toURL());
            ShapefileDataStore ds = (ShapefileDataStore) new ShapefileDataStoreFactory().createNewDataStore(params);
 
            //定义图形信息和属性信息
            SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder();
            tb.setCRS(DefaultGeographicCRS.WGS84);
            tb.setName("sx_test");
            tb.add("the_geom", getClass(geoType));
            for (ShpFiled field : attrKeys) {
                tb.add(field.getFiledname().toUpperCase(), getClass(field.getType()));
            }
            ds.createSchema(tb.buildFeatureType());
            //设置编码
            Charset charset = Charset.forName(encode);
            ds.setCharset(charset);
 
            //设置Writer
            FeatureWriter<SimpleFeatureType, SimpleFeature> writer = ds.getFeatureWriter(ds.getTypeNames()[0], Transaction.AUTO_COMMIT);
            //    写入文件信息
            for (int i = 0; i < data.size(); i++) {
 
                SimpleFeature feature = writer.next();
                Map<String, Object> row = data.get(i);
 
 
                Geometry geom = reader.read(row.get(shpKey).toString());
                feature.setAttribute("the_geom", geom);
                for (ShpFiled field : attrKeys) {
                    if (row.get(field.getFiledname()) != null) {
                        feature.setAttribute(field.getFiledname().toUpperCase(), row.get(field.getFiledname()));
                    } else {
                        feature.setAttribute(field.getFiledname().toUpperCase(), null);
                    }
                }
            }
            writer.write();
            writer.close();
            ds.dispose();
 
            //添加到压缩文件
            //zipShapeFile(shpPath);
        } catch (IOException e) {
            e.printStackTrace();
        }catch (Exception e) {
            e.printStackTrace();
        }
    }
 
    /**
     * 更新shp文件数据
     *
     * @param path 文件路径
     * @param datalist 空间及属性数据
     */
    public static void updateFeature(String path, List<Map<String, Object>> datalist,String code) {
        ShapefileDataStore dataStore = null;
        File file = new File(path);
        Transaction transaction = new DefaultTransaction("handle");
        try {
            dataStore = new ShapefileDataStore(file.toURL());
            Charset charset = Charset.forName(code);
            dataStore.setCharset(charset);
            String typeName = dataStore.getTypeNames()[0];
            SimpleFeatureStore store = (SimpleFeatureStore) dataStore.getFeatureSource(typeName);
 
            // 获取字段列表
            SimpleFeatureType featureType = store.getSchema();
            List<String> fileds = getFileds(featureType);
            store.setTransaction(transaction);
            WKTReader reader = new WKTReader();
            for (Map<String, Object> data : datalist) {
 
                Filter filter = null;
                if (data.get("where") != null) {
                    filter = ECQL.toFilter(data.get("where").toString());
                }
 
                Object[] objs = new Object[] {};
                String[] str = new String[] {};
                if (data.get("geom") != null) {
                    Geometry geometry = reader.read(data.get("geom").toString());
                    str = add(str, "the_geom");
                    objs = add(objs, geometry);
 
                }
                for (String stri : fileds) {
                    if (data.get(stri) != null) {
 
                        str = add(str, stri);
                        objs = add(objs, data.get(stri));
                    }
                }
                store.modifyFeatures(str, objs, filter);
            }
 
            transaction.commit();
            System.out.println("========updateFeature====end====");
        } catch (Exception eek) {
            eek.printStackTrace();
            try {
                transaction.rollback();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
 
        }
 
    }
 
    /**
     * 移除shp中的数据
     * @param path 文件路径
     * @param ids 字段值数组
     * @param filed 字段名
     */
    public static void removeFeature(String path, List<String>ids,String filed,String code){
        ShapefileDataStore dataStore = null;
        File file = new File(path);
        Transaction transaction = new DefaultTransaction("handle");
        try {
            dataStore = new ShapefileDataStore(file.toURL());
            Charset charset = Charset.forName(code);
            dataStore.setCharset(charset);
            String typeName = dataStore.getTypeNames()[0];
            SimpleFeatureStore store = (SimpleFeatureStore) dataStore.getFeatureSource(typeName);
            store.setTransaction(transaction);
 
            Filter filter = null;
            if(ids.size()>0) {
                String join = filed +" in ("+StringUtils.join(ids,",")+")";
                System.out.println(join);
                filter = ECQL.toFilter(join);
            }
            if(filter!=null) {
 
                store.removeFeatures(filter);
                transaction.commit();
                System.out.println("======removeFeature== done ========");
            }
 
        } catch (Exception eek) {
            eek.printStackTrace();
            try {
                transaction.rollback();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
 
        }
 
    }
 
 
 
}


http://www.niftyadmin.cn/n/5667632.html

相关文章

xilinx hbm ip运用

AXI-HBM是一个集成的IP核&#xff0c;该核提供高达16个AXI3从PORT的HBM接口&#xff0c;每个使用他自己的独立的时钟。HBM2 GEN存储器也支持&#xff0c;HBM相对传统DDR的方案&#xff0c;带宽得到极大的提高 特征 AXI3从端口存储器接口 -16个独立的256bit存储器接口 -可选的…

MATLAB方程求解:1.线性方程组

线性方程组 一.直接解法 例如&#xff1a; 方程组&#xff1a; x2y3z5 x4y9z-2 x8y27z6 可知&#xff1a; A[1 2 3;1 4 9;1 8 27] b[5;-2;6] xA\b 代码&#xff1a; A[1 2 3;1 4 9;1 8 27]; b[5;-2;6]; xinv(A)*b;线性方程组是线性代数研究的主要对象之一。 求解线性方程组的…

C语言学习导航 1.1计算机语言

第一章 C语言概述 第一节 计算机语言 认识编程语言 C语言概述 1.1.1 认识编程语言 说明&#xff1a;一种人造语言&#xff0c;设计用来编写计算机程序&#xff0c;以便人类能够精确地向计算机发出指令并控制其行为。 分类&#xff1a; ①机器语言&#xff1a; 描述&…

解决Rdkit Explicit valence for atom # 1 C, 5, is greater than permitted,价键不对的问题

本文主要介绍如何利用RDKit解决化合物SMILES表示中出现的原子价态错误&#xff0c;报错Explicit valence for atom # 1 C, 5, is greater than permitted。 问题现象&#xff1a; mol Chem.MolFromSmiles("C[CH2]1(C#N)CC1") #运行后报错如下 >>> [11:…

智慧交通,智能消防系统助力高铁站安全

智慧交通是一项基于现代技术的创新领域&#xff0c;正不断为我们生活带来便利。在智慧交通领域中&#xff0c;高铁站是一个非常重要的环节。高铁站作为人流密集的区域&#xff0c;安全问题一直备受关注。为了提升高铁站的安全性和效率&#xff0c;智慧消防设备监测与集中监控系…

麒麟操作系统快捷键设置

这些是银河麒麟操作系统常用的快捷键&#xff0c;和Windows系统有点儿相似。 但也有一些快捷键为未列出来&#xff0c;如CtrlALTT打开终端&#xff0c;Ctrld关闭终端&#xff0c;F2&#xff1a;重命名&#xff1b; CtrlshiftN&#xff1a;新建文件夹。

中国电子学会202403青少年软件编程(Python)等级考试试卷(二级)真题

一、选择题 1.期末考试结束了,全班的语文成绩都储存在列表 score 中,班主任老师请小明找到全班最高分,小明准备用 Python 来完成,以下哪个选项,可以获取最高分呢?( ) A.min(score) B.max(score) C.score.max() D.score.min() 2.已知列表 a = [1,2,3,4,5,6],想输出…

云韧性,现代云服务不可或缺的组成部分

韧性&#xff0c;一个物理学概念&#xff0c;表示材料在变形或者破裂过程中吸收能量的能力。韧性越好&#xff0c;则发生脆性断裂的可能性越小。 如今&#xff0c;韧性也延伸到企业特质、产品特征等之中&#xff0c;用于形容企业、产品乃至服务的优劣。同样&#xff0c;随着云…