commit c5481ead055c44b75d79cdc695f52b63f611e869
Author: XinYi Song <2037158277@qq.com>
Date: Wed Dec 1 14:27:49 2021 +0800
1、搭建flask框架。
2、整合JPSS,葵花8,GF3,哨兵1,哨兵2,哨兵3,资源2号,环境1号,SNPP等遥感数据解析算法。
3、flask中添加扫描各个卫星扫描任务,定时扫描,数据入库
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..73f69e0
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
+# Editor-based HTTP Client requests
+/httpRequests/
diff --git a/.idea/dms.iml b/.idea/dms.iml
new file mode 100644
index 0000000..3f70c5c
--- /dev/null
+++ b/.idea/dms.iml
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..d299771
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,179 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..a98794f
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..e95a9e2
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..94a25f7
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/__pycache__/app.cpython-39.pyc b/__pycache__/app.cpython-39.pyc
new file mode 100644
index 0000000..06757f9
Binary files /dev/null and b/__pycache__/app.cpython-39.pyc differ
diff --git a/app.py b/app.py
new file mode 100644
index 0000000..87a9632
--- /dev/null
+++ b/app.py
@@ -0,0 +1,16 @@
+from flask import Flask
+
+from application import init_app
+from common.config.factory import create_app
+
+app = init_app('dev')
+app = create_app()
+
+
+@app.route('/')
+def hello_world():
+ return 'Hello World!'
+
+
+if __name__ == '__main__':
+ app.run()
diff --git a/application/__init__.py b/application/__init__.py
new file mode 100644
index 0000000..0742144
--- /dev/null
+++ b/application/__init__.py
@@ -0,0 +1,52 @@
+# 项目初始化文件
+import logging
+from logging.handlers import RotatingFileHandler
+from flask import Flask
+from redis import StrictRedis
+from flask_wtf.csrf import CSRFProtect
+from flask_session import Session
+from flask_sqlalchemy import SQLAlchemy
+
+from application.settings.dev import DevelopmentConfig
+from application.settings.prop import ProductionConfig
+from common.config.factory import create_app
+
+config = {
+ 'dev': DevelopmentConfig,
+ 'prop': ProductionConfig,
+
+}
+
+
+def init_app(config_name):
+ """
+ 项目初始化
+ :param config_name:
+ :return:
+ """
+ # 主应用的根目录
+ app = Flask(__name__)
+
+ # 设置配置类
+ Config = config[config_name]
+
+ # 加载配置
+ app.config.from_object(Config)
+
+ # redis的连接初始化
+ # global redis_store
+ # redis_store = StrictRedis(host=Config.CACHE_REDIS_HOST, port=Config.CACHE_REDIS_PORT, db=0)
+
+ # 开启CSRF防范
+ CSRFProtect(app)
+
+ # 开启session
+ Session(app)
+
+ # 增加数据库连接
+ # db.init_app(app)
+ #
+ # # 启用日志
+ # setup_log(Config)
+
+ return app
diff --git a/application/__pycache__/__init__.cpython-39.pyc b/application/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..f77d468
Binary files /dev/null and b/application/__pycache__/__init__.cpython-39.pyc differ
diff --git a/application/settings/__init__.py b/application/settings/__init__.py
new file mode 100644
index 0000000..78ef342
--- /dev/null
+++ b/application/settings/__init__.py
@@ -0,0 +1,44 @@
+from redis import StrictRedis
+
+
+# 全局通用配置类
+class Config(object):
+ """项目配置核心类"""
+ # 调试模式
+ DEBUG = True
+
+ # todo 配置日志
+ LOG_LEVEL = "DEBUG"
+
+ # mysql数据库配置信息(指定字符集?charset=utf8)
+ # 数据库连接格式
+ 'SQLALCHEMY_DATABASE_URI ="数据库类型://用户名:密码@ip:port:库名?指定字符集编码"'
+ # SQLALCHEMY_DATABASE_URI = "postgresql://postgres:123456@192.168.2.139:5432/students?charset=utf8"
+ # # 动态追踪修改设置,如未设置只会提示警告
+ # SQLALCHEMY_TRACK_MODIFICATIONS = False
+ # # 查询时会显示原始SQL语句
+ # SQLALCHEMY_ECHO = False
+ #
+ # # 配置redis
+ # # 项目上线以后,这个地址就会被替换成真实IP地址,mysql也是
+ # CACHE_TYPE = 'redis'
+ # CACHE_REDIS_HOST = 'localhost'
+ # CACHE_REDIS_PORT = 6379
+ # CACHE_REDIS_DB = '8'
+ # CACHE_REDIS_PASSWORD = 'sdust2020'
+ #
+ # # 设置session 秘钥
+ # # 可以通过 base64.b64encode(os.urandom(48)) 来生成一个指定长度的随机字符串
+ # SECRET_KEY = "CF3tEA1J3hRyIOw3PWE3ZE9+hLOcUDq6acX/mABsEMTXNjRDm5YldRLIXazQviwP"
+ #
+ # # flask_session的配置信息
+ # SESSION_TYPE = 'redis' # 指定session保存到redis中
+ # SESSION_USE_SIGNER = True # 让 cookie 中的 session_id 被加密签名处理
+ # SESSION_REDIS = StrictRedis(host=CACHE_REDIS_HOST, port=CACHE_REDIS_PORT) # 使用 redis 的实例
+ # PERMANENT_SESSION_LIFETIME = 24 * 60 * 60 # session 的有效期,单位是秒
+ XML_PATH = 'C:/Users/HP/Desktop/Number tube/test' # xml存储路径
+ THUMBNAIL_PATH = 'C:/Users/HP/Desktop/Number tube/test' # 拇指图存储路径
+ DFS_UPLOAD_URL = 'http://192.168.2.9:4096/group1/upload' # 文件服务器请求路径
+ RESING_DATA_URL = 'http://192.168.2.9:8820/api/remote-sensing-data/add' # 入库遥感数据请求地址
+ DATA_TASK_URL = 'http://192.168.2.9:8820/api/data-storage-task-record/add' # 添加遥感数据归档任务请求地址
+ DATA_END_TASK_URL = 'http://192.168.2.9:8820/api/data-storage-task-record/end' # 结束遥感数据归档任务请求地址
diff --git a/application/settings/__pycache__/__init__.cpython-39.pyc b/application/settings/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..b62497d
Binary files /dev/null and b/application/settings/__pycache__/__init__.cpython-39.pyc differ
diff --git a/application/settings/__pycache__/dev.cpython-39.pyc b/application/settings/__pycache__/dev.cpython-39.pyc
new file mode 100644
index 0000000..4652130
Binary files /dev/null and b/application/settings/__pycache__/dev.cpython-39.pyc differ
diff --git a/application/settings/__pycache__/prop.cpython-39.pyc b/application/settings/__pycache__/prop.cpython-39.pyc
new file mode 100644
index 0000000..6c7e88a
Binary files /dev/null and b/application/settings/__pycache__/prop.cpython-39.pyc differ
diff --git a/application/settings/dev.py b/application/settings/dev.py
new file mode 100644
index 0000000..e2d014e
--- /dev/null
+++ b/application/settings/dev.py
@@ -0,0 +1,9 @@
+from . import Config
+
+
+class DevelopmentConfig(Config):
+ """
+ 开发模式下的配置
+ """
+ # 查询时会显示原始SQL语句
+ SQLALCHEMY_ECHO = True
diff --git a/application/settings/prop.py b/application/settings/prop.py
new file mode 100644
index 0000000..52b2c5c
--- /dev/null
+++ b/application/settings/prop.py
@@ -0,0 +1,6 @@
+from . import Config
+
+
+class ProductionConfig(Config):
+ """生产模式下的配置"""
+ DEBUG = False
diff --git a/common/__init__.py b/common/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/common/__pycache__/__init__.cpython-39.pyc b/common/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..5bf55a6
Binary files /dev/null and b/common/__pycache__/__init__.cpython-39.pyc differ
diff --git a/common/config/__init__.py b/common/config/__init__.py
new file mode 100644
index 0000000..83fd1a2
--- /dev/null
+++ b/common/config/__init__.py
@@ -0,0 +1,5 @@
+"""
+Author : XinYi Song
+Time : 2021/11/23 9:47
+Desc:
+"""
diff --git a/common/config/__pycache__/__init__.cpython-39.pyc b/common/config/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..120e3d3
Binary files /dev/null and b/common/config/__pycache__/__init__.cpython-39.pyc differ
diff --git a/common/config/__pycache__/factory.cpython-39.pyc b/common/config/__pycache__/factory.cpython-39.pyc
new file mode 100644
index 0000000..29ab233
Binary files /dev/null and b/common/config/__pycache__/factory.cpython-39.pyc differ
diff --git a/common/config/factory.py b/common/config/factory.py
new file mode 100644
index 0000000..e0df62e
--- /dev/null
+++ b/common/config/factory.py
@@ -0,0 +1,85 @@
+"""
+Author : XinYi Song
+Time : 2021/11/23 9:47
+Desc:
+"""
+from flask import Flask
+from flask_apscheduler import APScheduler
+
+scheduler = APScheduler()
+
+
+def create_app():
+ app = Flask(__name__)
+ # 配置任务,不然无法启动任务
+ app.config.update(
+ {
+ "SCHEDULER_API_ENABLED": True,
+ "SCHEDULER_TIMEZONE": "Asia/Shanghai",
+ "JOBS": [
+ {
+ "id": "my_job", # 任务ID
+ "func": "util:scan_file_util.scan_VJ102_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 2 * 60 # 时间间隔
+ },
+ {
+ "id": "job2", # 任务ID
+ "func": "util:scan_file_util.scan_VJ103_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job3", # 任务ID
+ "func": "util:scan_file_util.scan_GF3MDJ_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job4", # 任务ID
+ "func": "util:scan_file_util.scan_H08_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job5", # 任务ID
+ "func": "util:scan_file_util.scan_Sentinel1_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job6", # 任务ID
+ "func": "util:scan_file_util.scan_Sentinel2_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job7", # 任务ID
+ "func": "util:scan_file_util.scan_Sentinel3OL_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job8", # 任务ID
+ "func": "util:scan_file_util.scan_HJ1_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job9", # 任务ID
+ "func": "util:scan_file_util.scan_ZY3_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ },
+ {
+ "id": "job10", # 任务ID
+ "func": "util:scan_file_util.scan_SNPP_dir", # 任务位置
+ "trigger": "interval", # 触发器
+ "seconds": 3 * 60 # 时间间隔
+ }
+ ]
+ }
+ )
+ scheduler.init_app(app)
+ scheduler.start()
+ return app
diff --git a/common/tools/__init__.py b/common/tools/__init__.py
new file mode 100644
index 0000000..4fb4384
--- /dev/null
+++ b/common/tools/__init__.py
@@ -0,0 +1,5 @@
+"""
+Author : XinYi Song
+Time : 2021/11/22 14:11
+Desc:
+"""
diff --git a/common/tools/__pycache__/__init__.cpython-39.pyc b/common/tools/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..cc6a138
Binary files /dev/null and b/common/tools/__pycache__/__init__.cpython-39.pyc differ
diff --git a/common/tools/__pycache__/dms.cpython-39.pyc b/common/tools/__pycache__/dms.cpython-39.pyc
new file mode 100644
index 0000000..28edc30
Binary files /dev/null and b/common/tools/__pycache__/dms.cpython-39.pyc differ
diff --git a/common/tools/dms.py b/common/tools/dms.py
new file mode 100644
index 0000000..ee12ca2
--- /dev/null
+++ b/common/tools/dms.py
@@ -0,0 +1,40 @@
+"""
+Author : XinYi Song
+Time : 2021/11/5 14:12
+Desc:
+"""
+from util.http_util import httpUtil
+
+
+def dms_login():
+ """
+ 数管系统登录
+ :return:
+ """
+ res = httpUtil(url='http://192.168.2.9:8820/api/login',
+ params={"userName": "client1", "password": "sxy1998"}).post_param()
+ return res.json()['data']
+
+
+def dms_task_record(token_s: str, collectionCode: str):
+ """
+ 调用数管系统获取遥感数据归档任务的接口
+ :param collectionCode:
+ :param token_s:
+ :return:
+ """
+ res = httpUtil(url='http://192.168.2.9:8820/api/data-storage-task-record/get/collection-code/revision',
+ params={"collectionCode": collectionCode, "revision": 1}, token=token_s).get_herder()
+ return res.json()['data']
+
+
+def dms_sensing_data(token_s: str, collectionCode: str):
+ """
+ 调用数管系统获取所有遥感数据的接口
+ :param collectionCode:
+ :param token_s:
+ :return:
+ """
+ res = httpUtil(url='http://192.168.2.9:8820/api/remote-sensing-data/get/collection-code',
+ params={"collectionCode": collectionCode}, token=token_s).get_herder()
+ return res.json()['data']
diff --git a/scan_data/GetMetaInfo.py b/scan_data/GetMetaInfo.py
new file mode 100644
index 0000000..1151087
--- /dev/null
+++ b/scan_data/GetMetaInfo.py
@@ -0,0 +1,1554 @@
+from xml.dom import minidom
+from osgeo import gdal
+from osgeo import ogr
+from osgeo import gdalconst
+import h5py
+from PIL import Image
+import numpy as np
+import tarfile
+import zipfile
+import re
+import os
+import io
+import sys
+
+
+def exe_path():
+ """
+ [获取exe目录]
+ Returns:
+ [str]: [exe目录]
+ """
+ if hasattr(sys, 'frozen'):
+ # Handles PyInstaller
+ return os.path.dirname(sys.executable)
+ return os.path.dirname(os.path.realpath(__file__))
+
+
+os.environ['PROJ_LIB'] = exe_path() + "/PROJ"
+
+
+def uint16to8(bands, lower_percent=0.001, higher_percent=99.999):
+ """
+ 拉伸图像:图片16位转8位
+ :param bands: 输入栅格数据
+ :param lower_percent: 最低百分比
+ :param higher_percent: 最高百分比
+ :return:
+ """
+ out = np.zeros_like(bands, dtype=np.uint8)
+ n = bands.shape[0]
+ for i in range(n):
+ a = 0 # np.min(band)
+ b = 255 # np.max(band)
+ c = np.percentile(bands[i, :, :], lower_percent)
+ d = np.percentile(bands[i, :, :], higher_percent)
+ t = a + (bands[i, :, :] - c) * (b - a) / (d - c)
+ t[t < a] = a
+ t[t > b] = b
+ out[i, :, :] = t
+ return out
+
+
+def createXML(metadata, xlm_file):
+ """
+ 创建xlm文件并写入字典
+ :param metadata: 元数据信息
+ :param xlm_file: xlm文件
+ :return:
+ """
+ # 创建一个空的文档
+ document = minidom.Document() # 创建DOM文档对象
+ # 创建一个根节点对象
+ root = document.createElement('ProductMetaData')
+ # 设置根节点的属性
+ # root.setAttribute('', '')
+ # 将根节点添加到文档对象中
+ document.appendChild(root)
+ # 字典转xml
+ for key in metadata:
+ # 创建父节点
+ node_name = document.createElement(key)
+ # 给父节点设置文本
+ node_name.appendChild(document.createTextNode(str(metadata[key])))
+ # 将各父节点添加到根节点
+ root.appendChild(node_name)
+ # 写入xlm文档
+ with open(xlm_file, 'w', encoding='utf-8') as f:
+ document.writexml(f, indent='\t', newl='\n', addindent='\t', encoding='utf-8')
+ f.close()
+
+
+def GetGFPMSData(in_file, xml_path, thumbnail_ath):
+ """
+ 获取高分 PMS卫星元数据
+ :param thumbnail_ath:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ with tarfile.open(in_file, mode='r') as tar_file:
+ extensions = ('MSS2_thumb.jpg', 'PAN2_thumb.jpg', 'MSS2.xml', 'PAN2.xml')
+ file_list = [file for file in tar_file.getnames() if file.endswith(extensions)]
+
+ # 解压多光谱缩略图
+ if file_list[1].endswith('MSS2_thumb.jpg'):
+ tar_file.extract(file_list[1], thumbnail_ath)
+ ThumbnailPath_MSS = in_path + "/" + file_list[1]
+ ThumbnailName_MSS = file_list[1]
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压全色缩略图
+ if file_list[3].endswith("PAN2_thumb.jpg"):
+ tar_file.extract(file_list[3], thumbnail_ath)
+ ThumbnailPath_PAN = thumbnail_ath + "/" + file_list[3]
+ ThumbnailName_PAN = file_list[3]
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压多光谱XML文件
+ if file_list[0].endswith('MSS2.xml'):
+ # 解压XML文件
+ tar_file.extract(file_list[0], xml_path)
+ xmlPath = xml_path + "/" + file_list[0]
+ xmlFileName = file_list[0]
+ # 获取文件流
+ meta_file = tar_file.extractfile(file_list[0])
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ # WidthInPixels = dom.getElementsByTagName('WidthInPixels')[0].firstChild.data
+ # HeightInPixels = dom.getElementsByTagName('HeightInPixels')[0].firstChild.data
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建多光谱字典
+ gf_mss_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ # "WidthInPixels": WidthInPixels,
+ # "HeightInPixels": HeightInPixels,
+ "ProjectedCoordinates": "",
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath_MSS,
+ "ThumbnailName": ThumbnailName_MSS,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压全色XML文件
+ if file_list[2].endswith('PAN2.xml'):
+ # 解压XML文件
+ tar_file.extract(file_list[2], xml_path)
+ xmlPath = xml_path + "/" + file_list[2]
+ xmlFileName = file_list[2]
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(file_list[2])
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ # WidthInPixels = dom.getElementsByTagName('WidthInPixels')[0].firstChild.data
+ # HeightInPixels = dom.getElementsByTagName('HeightInPixels')[0].firstChild.data
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建全色字典
+ gf_pan_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ # "WidthInPixels": WidthInPixels,
+ # "HeightInPixels": HeightInPixels,
+ "ProjectedCoordinates": "",
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath_PAN,
+ "ThumbnailName": ThumbnailName_PAN,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+ # 关闭压缩文件
+ tar_file.close()
+ if (not gf_mss_dict) or (not gf_pan_dict):
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return gf_mss_dict, gf_pan_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetGF3MDJData(in_file, xml_path, thumbnail_path):
+ """
+ 获取高分3号MDJ(GF-3 MDJ)卫星元数据
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ with tarfile.open(in_file, mode='r') as tar_file:
+ extensions = ('.thumb.jpg', 'meta.xml')
+ file_list = [file for file in tar_file.getnames() if file.endswith(extensions)]
+ # 解压缩略图
+ if file_list[0].endswith('.thumb.jpg'):
+ tar_file.extract(file_list[0], thumbnail_path)
+ ThumbnailPath = thumbnail_path + "/" + file_list[0]
+ ThumbnailName = file_list[0]
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压XML文件
+ if file_list[1].endswith('meta.xml'):
+ tar_file.extract(file_list[1], xml_path)
+ xmlPath = xml_path + "/" + file_list[1]
+ xmlFileName = file_list[1]
+ # 获取文件流
+ meta_file = tar_file.extractfile(file_list[1])
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ CollectionCode = "GF3_MDJ"
+ ProduceTime = dom.getElementsByTagName('productGentime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName("imagingTime")[0].getElementsByTagName("start")[0].firstChild.data
+ EndTime = dom.getElementsByTagName("imagingTime")[0].getElementsByTagName("end")[0].firstChild.data
+
+ # 其他信息
+ ImageGSD = dom.getElementsByTagName('NominalResolution')[0].firstChild.data
+ # EarthModel = dom.getElementsByTagName('EarthModel')[0].firstChild.data
+ ProjectedCoordinates = dom.getElementsByTagName('ProjectModel')[0].firstChild.data
+ Bands = "1,2"
+
+ # 经纬度
+ TopLeftLatitude = dom.getElementsByTagName("topLeft")[0].getElementsByTagName("latitude")[
+ 0].firstChild.data
+ TopLeftLongitude = dom.getElementsByTagName("topLeft")[0].getElementsByTagName("longitude")[
+ 0].firstChild.data
+ TopRightLatitude = dom.getElementsByTagName("topRight")[0].getElementsByTagName("latitude")[
+ 0].firstChild.data
+ TopRightLongitude = dom.getElementsByTagName("topRight")[0].getElementsByTagName("longitude")[
+ 0].firstChild.data
+ BottomLeftLatitude = dom.getElementsByTagName("bottomLeft")[0].getElementsByTagName("latitude")[
+ 0].firstChild.data
+ BottomLeftLongitude = dom.getElementsByTagName("bottomLeft")[0].getElementsByTagName("longitude")[
+ 0].firstChild.data
+ BottomRightLatitude = dom.getElementsByTagName("bottomRight")[0].getElementsByTagName("latitude")[
+ 0].firstChild.data
+ BottomRightLongitude = dom.getElementsByTagName("bottomRight")[0].getElementsByTagName("longitude")[
+ 0].firstChild.data
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建字典
+ gf3_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": "",
+ "boundaryGeomStr": boundaryGeomStr,
+ "Bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": ProjectedCoordinates,
+ 'CollectionCode': CollectionCode,
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 判断字典是否为空
+ if not gf3_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return gf3_dict
+ except Exception as e:
+ return {"code": -1, "msg": str(e)}
+
+
+def GetGF4PMIData(in_file, xml_path, thumbnail_path):
+ """
+ 获取高分4号PMI(GF-4 PMI)卫星元数据
+ PMS(可见光、近红外 5个波段 50m)、IRS(中波红外 1个波段 400m)
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ with tarfile.open(in_file, mode='r') as tar_file:
+ extensions = ('_thumb.jpg', '.xml')
+ file_list = [file for file in tar_file.getnames() if file.endswith(extensions)]
+
+ # 解压PMS缩略图
+ if file_list[2].endswith('_thumb.jpg') and file_list[2].startswith('GF4_PMS_'):
+ tar_file.extract(file_list[2], thumbnail_path)
+ ThumbnailPath_PMS = thumbnail_path + "/" + file_list[2]
+ ThumbnailName_PMS = file_list[2]
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压IRS缩略图
+ if file_list[0].endswith('_thumb.jpg') and file_list[0].startswith('GF4_IRS_'):
+
+ tar_file.extract(file_list[0], thumbnail_path)
+ ThumbnailPath_IRS = thumbnail_path + "/" + file_list[0]
+ ThumbnailName_IRS = file_list[0]
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压PMS XML文件
+ if file_list[3].endswith('.xml') and file_list[3].startswith('GF4_PMS_'):
+ # 解压XML文件
+ tar_file.extract(file_list[3], xml_path)
+ xmlPath = xml_path + "/" + file_list[3]
+ xmlFileName = file_list[3]
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(file_list[3])
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ CollectionCode = "GF4_PMS"
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ # ProjectedCoordinates = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系
+ ProjectedCoordinates = "" # 投影坐标系
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建可见光近红外(PMS)字典
+ gf4_pms_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ "boundaryGeomStr": boundaryGeomStr,
+ "Bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": ProjectedCoordinates,
+ 'CollectionCode': CollectionCode,
+ "ThumbnailName": ThumbnailName_PMS,
+ "ThumbnailPath": ThumbnailPath_PMS,
+ "xmlFileName": xmlFileName,
+ "xmlPath": xmlPath,
+ "DirectoryDepth": "month"}
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 解压IRS XML文件
+ if file_list[1].endswith('.xml') and file_list[1].startswith('GF4_IRS_'):
+ # 解压XML文件
+ tar_file.extract(file_list[1], xml_path)
+ xmlPath = xml_path + "/" + file_list[1]
+ xmlFileName = file_list[1]
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(file_list[1])
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ CollectionCode = "GF4_IRS"
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ # ProjectedCoordinates = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系
+ ProjectedCoordinates = "" # 投影坐标系
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+ # 构建中红外(IRS)字典
+ gf4_irs_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ "boundaryGeomStr": boundaryGeomStr,
+ "Bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": ProjectedCoordinates,
+ 'CollectionCode': CollectionCode,
+ "ThumbnailName": ThumbnailName_IRS,
+ "ThumbnailPath": ThumbnailPath_IRS,
+ "xmlFileName": xmlFileName,
+ "xmlPath": xmlPath,
+ "DirectoryDepth": "month"}
+ else:
+ return {"code": -1, "msg": "找不到指定文件..."}
+
+ # 关闭压缩文件
+ tar_file.close()
+ # 判断字典是否为空
+ if (not gf4_pms_dict) or (not gf4_irs_dict):
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return gf4_pms_dict, gf4_irs_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetH08Data(in_file, xml_path, thumbnail_path):
+ """
+ 获取葵花8卫星元数据
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+
+ # 其他信息
+ with h5py.File(in_file, mode='r') as f:
+ start_time = f['start_time'][0]
+ end_time = f['end_time'][0]
+ band_id = f['band_id'][:]
+ bands = ','.join(str(i) for i in band_id)
+ ImageGSD = '1000, 500, 2000'
+
+ # 生成缩略图
+ gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
+ in_datasets = gdal.Open(in_file)
+ meta_data = in_datasets.GetMetadata()
+ # 取出子数据集
+ datasets = in_datasets.GetSubDatasets()
+ red_data = gdal.Open(datasets[7][0]).ReadAsArray()
+ gre_data = gdal.Open(datasets[6][0]).ReadAsArray()
+ blu_data = gdal.Open(datasets[5][0]).ReadAsArray()
+ img_data = np.array([red_data, gre_data, blu_data])
+ img_data = uint16to8(img_data)
+ # Array转Image
+ img_data2 = np.transpose(img_data, (1, 2, 0))
+ img_data2 = img_data2[:, :, ::-1]
+ img = Image.fromarray(img_data2)
+ # 压缩图片大小
+ if img_data.shape[1] > img_data.shape[2]:
+ width = 512
+ height = int(width / img_data.shape[1] * img_data.shape[2])
+ else:
+ height = 512
+ width = int(height / img_data.shape[1] * img_data.shape[2])
+ img.thumbnail((width, height))
+ img.save(ThumbnailPath, "PNG")
+
+ # 释放内存
+ del in_datasets
+ del img_data
+ del img_data2
+ del img
+
+ # 生成XML文件
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ createXML(meta_data, xmlPath)
+
+ # 产品日期
+ date_created = meta_data['date_created']
+ # band_number = meta_data['band_number']
+
+ # 经纬度
+ upper_left_latitude = meta_data['upper_left_latitude']
+ upper_left_longitude = int(meta_data['upper_left_longitude']) - 180
+ upper_right_latitude = meta_data['upper_left_latitude']
+ upper_right_longitude = 200 - 180
+ lower_right_latitude = -60
+ lower_right_longitude = 200 - 180
+ lower_left_latitude = -60
+ lower_left_longitude = str(int(meta_data['upper_left_longitude']) - 180)
+
+ boundaryGeomStr = f'POLYGON(({upper_left_longitude} {upper_left_latitude},' \
+ f'{upper_right_longitude} {upper_right_latitude},' \
+ f'{lower_right_longitude} {lower_right_latitude},' \
+ f'{lower_left_longitude} {lower_left_latitude},' \
+ f'{upper_left_longitude} {upper_left_latitude}))'
+
+ # 构建字典
+ himawari8_dict = {"ProduceTime": date_created,
+ "StartTime": "",
+ "EndTime": "",
+ "CloudPercent": "",
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": "",
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "day"}
+
+ # 判断字典是否为空
+ if not himawari8_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return himawari8_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetJPSSData(in_file, xml_path, thumbnail_path):
+ """
+ 获取联合极轨卫星系统(JPSS-1)元数据:NOAA-20(Joint Polar Satellite System spacecraft)
+ :param xml_path:
+ :param thumbnail_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ # 生成缩略图
+ in_path, basename = os.path.split(in_file)
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+
+ gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
+ in_datasets = gdal.Open(in_file)
+ meta_data = in_datasets.GetMetadata()
+ # 取出子数据集
+ datasets = in_datasets.GetSubDatasets()
+ red_data = gdal.Open(datasets[0][0]).ReadAsArray()
+ nir_data = gdal.Open(datasets[3][0]).ReadAsArray()
+ swir_data = gdal.Open(datasets[9][0]).ReadAsArray()
+ img_data = np.array([red_data, nir_data, swir_data])
+ img_data = uint16to8(img_data)
+ # Array转Image
+ img_data2 = np.transpose(img_data, (1, 2, 0))
+ img_data2 = img_data2[:, :, ::-1]
+ img = Image.fromarray(img_data2)
+ # 压缩图片大小
+ if img_data.shape[1] > img_data.shape[2]:
+ width = 512
+ height = int(width / img_data.shape[1] * img_data.shape[2])
+ else:
+ height = 512
+ width = int(height / img_data.shape[1] * img_data.shape[2])
+ img.thumbnail((width, height))
+ img.save(ThumbnailPath, "PNG")
+
+ # 释放内存
+ del in_datasets
+ del img_data
+ del img_data2
+ del img
+
+ # 生成XML文件
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ createXML(meta_data, xmlPath)
+
+ # 产品日期
+ ProductionTime = meta_data['ProductionTime']
+ StartTime = meta_data['StartTime']
+ EndTime = meta_data['EndTime']
+
+ # 其他信息
+ ImageGSD = str(meta_data['LongName']).split(" ")[-1]
+ Bands = str(meta_data['title']).split(" ")[1]
+
+ # 中心经纬度
+ productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度
+ productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度
+ productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度
+ productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度
+ productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度
+ productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度
+ productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度
+ productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度
+
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \
+ f'{productUpperRightLong} {productUpperRightLat},' \
+ f'{productLowerRightLong} {productLowerRightLat},' \
+ f'{productLowerLeftLong} {productLowerLeftLat},' \
+ f'{productUpperLeftLong} {productUpperLeftLat}))'
+
+ # 构建字典
+ jpss_dict = {"ProduceTime": ProductionTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": "",
+ # "TopLeftLatitude": productUpperLeftLat,
+ # "TopLeftLongitude": productUpperLeftLong,
+ # "TopRightLatitude": productUpperRightLat,
+ # "TopRightLongitude": productUpperRightLong,
+ # "BottomLeftLatitude": productLowerLeftLat,
+ # "BottomLeftLongitude": productLowerLeftLong,
+ # "BottomRightLatitude": productLowerRightLat,
+ # "BottomRightLongitude": productLowerRightLong,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": "",
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "day"}
+
+ # 判断字典是否为空
+ if not jpss_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ print(jpss_dict)
+ return jpss_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetSNPPData(in_file, xml_path, thumbnail_path):
+ """
+ 获取Suomi National Polar-orbiting Partnership(SNPP)元数据
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ # 生成缩略图
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+
+ gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
+ in_datasets = gdal.Open(in_file)
+ meta_data = in_datasets.GetMetadata()
+
+ # 取出子数据集
+ datasets = in_datasets.GetSubDatasets()
+ red_data = gdal.Open(datasets[0][0]).ReadAsArray()
+ gre_data = gdal.Open(datasets[3][0]).ReadAsArray()
+ blu_data = gdal.Open(datasets[9][0]).ReadAsArray()
+ img_data = np.array([red_data, gre_data, blu_data])
+ img_data = uint16to8(img_data)
+ # Array转Image
+ img_data2 = np.transpose(img_data, (1, 2, 0))
+ img_data2 = img_data2[:, :, ::-1]
+ img = Image.fromarray(img_data2)
+ # 压缩图片大小
+ if img_data.shape[1] > img_data.shape[2]:
+ width = 512
+ height = int(width / img_data.shape[1] * img_data.shape[2])
+ else:
+ height = 512
+ width = int(height / img_data.shape[1] * img_data.shape[2])
+ img.thumbnail((width, height))
+ img.save(ThumbnailPath, "PNG")
+
+ # 释放内存
+ del in_datasets
+ del img_data
+ del img_data2
+ del img
+
+ # 生成XML文件
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ createXML(meta_data, xmlPath)
+
+ # 产品日期
+ ProductionTime = meta_data['ProductionTime']
+ StartTime = meta_data['StartTime']
+ EndTime = meta_data['EndTime']
+
+ # 其他信息
+ ImageGSD = str(meta_data['LongName']).split(" ")[-1][:-1]
+ Bands = str(meta_data['title'])
+
+ # 中心经纬度
+ productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度
+ productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度
+ productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度
+ productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度
+ productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度
+ productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度
+ productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度
+ productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度
+
+ boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \
+ f'{productUpperRightLong} {productUpperRightLat},' \
+ f'{productLowerRightLong} {productLowerRightLat},' \
+ f'{productLowerLeftLong} {productLowerLeftLat},' \
+ f'{productUpperLeftLong} {productUpperLeftLat}))'
+
+ # 构建字典
+ snpp_dict = {"ProductionTime": ProductionTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": "",
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": "",
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "day"}
+
+ # 判断字典是否为空
+ if not snpp_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return snpp_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetSentinel1Data(in_file, xml_path, thumbnail_path):
+ """
+ 获取哨兵1卫星元数据
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ with zipfile.ZipFile(in_file, mode='r') as zip_file:
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+ for member in zip_file.namelist():
+ if re.match(r'[0-9a-zA-Z\_]+.SAFE/annotation/s1a-iw-grd-vv[0-9a-z\-]+.xml', member):
+ # 输出xml文件
+ meta_data = zip_file.read(member)
+ with open(xmlPath, "wb") as fout:
+ fout.write(meta_data)
+ # 产品日期
+ meta_content = zip_file.open(member)
+ dom = minidom.parse(meta_content)
+ ProduceTime = dom.getElementsByTagName('qualityInformation')[
+ 0].getElementsByTagName('qualityDataList')[
+ 0].getElementsByTagName('qualityData')[
+ 0].getElementsByTagName('azimuthTime')[
+ 0].firstChild.data
+ StartTime = dom.getElementsByTagName('adsHeader')[0].getElementsByTagName('startTime')[
+ 0].firstChild.data
+ StopTime = dom.getElementsByTagName('adsHeader')[0].getElementsByTagName('stopTime')[
+ 0].firstChild.data
+ elif re.match(r'[0-9a-zA-Z\_]+.SAFE/preview/map-overlay.kml', member):
+ # 读取其他信息
+ meta_content = zip_file.open(member)
+ dom = minidom.parse(meta_content)
+ coordinates = dom.getElementsByTagName('coordinates')[0].firstChild.data
+
+ # 经纬度
+ lon_lat = re.split(r'\s', coordinates)
+ TopLeftLatitude = re.split(r'\,', lon_lat[0])[1] # 左上纬度
+ TopLeftLongitude = re.split(r'\,', lon_lat[0])[0] # 左上经度
+ TopRightLatitude = re.split(r'\,', lon_lat[1])[1] # 右上纬度
+ TopRightLongitude = re.split(r'\,', lon_lat[1])[0] # 右上经度
+ BottomRightLatitude = re.split(r'\,', lon_lat[2])[1] # 右下纬度
+ BottomRightLongitude = re.split(r'\,', lon_lat[2])[0] # 右下经度
+ BottomLeftLatitude = re.split(r'\,', lon_lat[3])[1] # 左下纬度
+ BottomLeftLongitude = re.split(r'\,', lon_lat[3])[0] # 左下经度
+
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ elif re.match(r'[0-9a-zA-Z\_]+.SAFE/preview/quick-look.png', member):
+ # 输出缩略图
+ thumb_data = zip_file.read(member)
+ with open(ThumbnailPath, "wb") as fout:
+ fout.write(thumb_data)
+ else:
+ continue
+ # 生成字典
+ S1_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "StopTime": StopTime,
+ "CloudPercent": "",
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": "Amplitude_VH,Intensity_VH,Amplitude_VV,Intensity_VV",
+ # "NumberBands": "",
+ "ImageGSD": "10",
+ "ProjectedCoordinates": '',
+ "CollectionCode": '',
+ "ThumbnailName": ThumbnailName,
+ "ThumbnailPath": ThumbnailPath,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ zip_file.close()
+ if not S1_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return S1_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetSentinel2Data(in_file, xml_path, thumbnail_path):
+ """
+ 获取哨兵2卫星元数据
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file:
+ extensions = ('_B02_60m.jp2', '_B03_60m.jp2', '_B04_60m.jp2', '.SAFE/MTD_MSIL2A.xml')
+ file_list = [file for file in zip_file.namelist() if file.endswith(extensions)]
+ file_list.sort()
+
+ # 生成缩略图
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+
+ # bgr_data = ['/vsizip/%s/%s' % (in_file, file) for file in file_list[:3]]
+ # out_vrt = '/vsimem/stacked.vrt' # 波段合成输出虚拟路径
+ # # 将多个源文件合成为一个VRT(virtual gdal dataset)文件
+ # out_dataset = gdal.BuildVRT(out_vrt, bgr_data, separate=True)
+ # # 将VRT文件转换为目标格式的图像
+ # gdal.Translate(ThumbnailPath,
+ # out_dataset,
+ # format='JPEG',
+ # outputType=gdal.GDT_Byte,
+ # widthPct=10,
+ # heightPct=10,
+ # creationOptions=["TILED=YES", "COMPRESS=LZW"])
+ # # 释放内存
+ # # gdal.GetDriverByName("VRT").Delete('/vsimem/stacked.vrt')
+ # gdal.Unlink('/vsimem/stacked.vrt')
+ # del out_dataset
+
+ rgb_list = []
+ for file in file_list[:3]:
+ sub_dataset = gdal.Open('/vsizip/%s/%s' % (in_file, file))
+ sub_array = sub_dataset.ReadAsArray()
+ rgb_list.append(sub_array)
+ img_data = np.array([rgb_list[2], rgb_list[1], rgb_list[0]])
+ img_data = uint16to8(img_data)
+
+ # Array转Image
+ img_data2 = np.transpose(img_data, (1, 2, 0))
+ img_data2 = img_data2[:, :, ::-1]
+ img = Image.fromarray(img_data2)
+ # 压缩图片大小
+ if img_data.shape[1] > img_data.shape[2]:
+ width = 512
+ height = int(width / img_data.shape[1] * img_data.shape[2])
+ else:
+ height = 512
+ width = int(height / img_data.shape[1] * img_data.shape[2])
+ img.thumbnail((width, height))
+ img.save(ThumbnailPath, "PNG")
+
+ # 释放内存
+ del rgb_list
+ del img_data
+ del img_data2
+ del img
+
+ # 解压多光谱XML文件
+ if file_list[3].endswith('.SAFE/MTD_MSIL2A.xml'):
+ # 生成XML文件
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ meta_data = zip_file.read(file_list[3])
+ with open(xmlPath, "wb") as fout:
+ fout.write(meta_data)
+
+ # 读取其他信息
+ meta_content = zip_file.open(file_list[3])
+ dom = minidom.parse(meta_content)
+ cloud_percent = dom.getElementsByTagName('n1:Quality_Indicators_Info')[
+ 0].getElementsByTagName('Cloud_Coverage_Assessment')[0].firstChild.data
+ ImageGSD = '10, 20, 60'
+ ProjectedCoordinates = dom.getElementsByTagName('n1:Geometric_Info')[
+ 0].getElementsByTagName('Coordinate_Reference_System')[
+ 0].getElementsByTagName('GEO_TABLES')[0].firstChild.data
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[
+ 0].getElementsByTagName('GENERATION_TIME')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[
+ 0].getElementsByTagName('PRODUCT_START_TIME')[0].firstChild.data
+ StopTime = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[
+ 0].getElementsByTagName('PRODUCT_STOP_TIME')[0].firstChild.data
+
+ # 经纬度
+ lon_lat = dom.getElementsByTagName('n1:Geometric_Info')[0].getElementsByTagName('Product_Footprint')[
+ 0].getElementsByTagName('Product_Footprint')[0].getElementsByTagName('Global_Footprint')[
+ 0].getElementsByTagName('EXT_POS_LIST')[0].firstChild.data
+ TopLeftLatitude = re.split(r'\s', lon_lat)[0] # 左上纬度
+ TopLeftLongitude = re.split(r'\s', lon_lat)[1] # 左上经度
+ TopRightLatitude = re.split(r'\s', lon_lat)[2] # 右上纬度
+ TopRightLongitude = re.split(r'\s', lon_lat)[3] # 右上经度
+ BottomRightLatitude = re.split(r'\s', lon_lat)[4] # 右下纬度
+ BottomRightLongitude = re.split(r'\s', lon_lat)[5] # 右下经度
+ BottomLeftLatitude = re.split(r'\s', lon_lat)[6] # 左下纬度
+ BottomLeftLongitude = re.split(r'\s', lon_lat)[7] # 左下经度
+
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 生成字典
+ S2_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "StopTime": StopTime,
+ "CloudPercent": cloud_percent,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": "1,2,3,4,5,6,7,8,9,10,11,12",
+ # "NumberBands": '12',
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": ProjectedCoordinates,
+ "CollectionCode": '',
+ "ThumbnailName": ThumbnailName,
+ "ThumbnailPath": ThumbnailPath,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ zip_file.close()
+ if not S2_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return S2_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetSentinel3OLData(in_file, xml_path, thumbnail_path):
+ """
+ 获取哨兵3 OLCI(海陆色度计)卫星元数据(有待修改)
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ in_path, basename = os.path.split(in_file)
+ with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file:
+ extensions = ('Oa03_radiance.nc', 'Oa05_radiance.nc', 'Oa08_radiance.nc', 'xfdumanifest.xml')
+ file_list = [file for file in zip_file.namelist() if file.endswith(extensions)]
+
+ # 生成缩略图
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+ rgb_list = []
+ for file in file_list[:3]:
+ sub_dataset = gdal.Open('/vsizip/%s/%s' % (in_file, file))
+ sub_array = sub_dataset.ReadAsArray()
+ rgb_list.append(sub_array)
+ img_data = np.array([rgb_list[2], rgb_list[1], rgb_list[0]])
+ img_data = uint16to8(img_data)
+
+ # Array转Image
+ img_data2 = np.transpose(img_data, (1, 2, 0))
+ img_data2 = img_data2[:, :, ::-1]
+ img = Image.fromarray(img_data2)
+ # 压缩图片大小
+ if img_data.shape[1] > img_data.shape[2]:
+ width = 512
+ height = int(width / img_data.shape[1] * img_data.shape[2])
+ else:
+ height = 512
+ width = int(height / img_data.shape[1] * img_data.shape[2])
+ img.thumbnail((width, height))
+ img.save(ThumbnailPath, "PNG")
+
+ # 释放内存
+ del rgb_list
+ del img_data
+ del img_data2
+ del img
+
+ # 解压XML文件
+ if file_list[3].endswith('xfdumanifest.xml'):
+ # 生成XML文件
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ meta_data = zip_file.read(file_list[3])
+ with open(xmlPath, "wb") as fout:
+ fout.write(meta_data)
+
+ # 读取其他信息
+ CollectionCode = "Sentinel3_OLCI_L1"
+ meta_content = zip_file.open(file_list[3])
+ dom = minidom.parse(meta_content)
+ ProjectedCoordinates = ""
+ CloudPercent = ""
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('sentinel3:creationTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('sentinel3:receivingStartTime')[0].firstChild.data
+ StopTime = dom.getElementsByTagName('sentinel3:receivingStopTime')[0].firstChild.data
+
+ # 经纬度
+ TopLeftLatitude = dom.getElementsByTagName('sentinel-safe:y')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('sentinel-safe:x')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('sentinel-safe:y')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('sentinel-safe:x')[2].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('sentinel-safe:y')[2].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('sentinel-safe:x')[2].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('sentinel-safe:y')[2].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('sentinel-safe:x')[0].firstChild.data # 左下经度
+
+ # boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ # f'{TopRightLongitude} {TopRightLatitude},' \
+ # f'{BottomRightLongitude} {BottomRightLatitude},' \
+ # f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ # f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 生成字典
+ S3_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "StopTime": StopTime,
+ "CloudPercent": CloudPercent,
+ "boundaryGeomStr": "",
+ "bands": "Oa01,Oa02,Oa03,Oa04,Oa05,Oa06,Oa07,Oa08,Oa09,Oa10,Oa11,Oa12,Oa13,Oa14,Oa15,Oa16,"
+ "Oa17,Oa18,Oa19,Oa20,Oa21",
+ # "NumberBands": '21',
+ "ImageGSD": "270,294",
+ "ProjectedCoordinates": ProjectedCoordinates,
+ "CollectionCode": CollectionCode,
+ "ThumbnailName": ThumbnailName,
+ "ThumbnailPath": ThumbnailPath,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ zip_file.close()
+ if not S3_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return S3_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetHJ1Data(in_file, xml_path, thumbnail_path):
+ """
+ 获取环境1号(HJ-1)卫星元数据
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ with tarfile.open(in_file, mode='r') as tar_file:
+ in_path, basename = os.path.split(in_file)
+ for member in tar_file.getnames():
+ if member.endswith("THUMB.JPG"):
+ # 解压缩略图
+ tar_file.extract(member, thumbnail_path)
+ ThumbnailPath = thumbnail_path + "/" + member
+ ThumbnailName = member.split('/')[1]
+ elif member.endswith(".XML"):
+ # 解压XML文件
+ tar_file.extract(member, xml_path)
+ xmlPath = xml_path + "/" + member
+ xmlFileName = member.split('/')[1]
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(member)
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ productDate = dom.getElementsByTagName('productDate')[0].firstChild.data
+ imagingStartTime = dom.getElementsByTagName('imagingStartTime')[0].firstChild.data # 开始时间
+ imagingStopTime = dom.getElementsByTagName('imagingStopTime')[0].firstChild.data # 结束时间
+
+ # 其他信息
+ pixelSpacing = dom.getElementsByTagName('pixelSpacing')[0].firstChild.data # 分辨率
+ # earthModel = dom.getElementsByTagName('earthModel')[0].firstChild.data # 投影
+ mapProjection = dom.getElementsByTagName('mapProjection')[0].firstChild.data # 投影坐标系
+ # zone = dom.getElementsByTagName('zone')[0].firstChild.data # 带号
+ bands = dom.getElementsByTagName('bands')[0].firstChild.data # 波段
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('productUpperLeftLat')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('productUpperLeftLong')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('productUpperRightLat')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('productUpperRightLong')[0].firstChild.data # 右上经度
+ BottomLeftLatitude = dom.getElementsByTagName('productLowerLeftLat')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('productLowerLeftLong')[0].firstChild.data # 左下经度
+ BottomRightLatitude = dom.getElementsByTagName('productLowerRightLat')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('productLowerRightLong')[0].firstChild.data # 右下纬度
+
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+ else:
+ continue
+ # 构建字典
+ hj1_dict = {"ProductTime": productDate,
+ "StartTime": imagingStartTime,
+ "EndTime": imagingStopTime,
+ "CloudPercent": "",
+ # "TopLeftLatitude": TopLeftLatitude,
+ # "TopLeftLongitude": TopLeftLongitude,
+ # "TopRightLatitude": TopRightLatitude,
+ # "TopRightLongitude": TopRightLongitude,
+ # "BottomLeftLatitude": BottomLeftLatitude,
+ # "BottomLeftLongitude": BottomLeftLongitude,
+ # "BottomRightLatitude": BottomRightLatitude,
+ # "BottomRightLongitude": BottomRightLongitude,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": bands,
+ "ImageGSD": pixelSpacing,
+ "ProjectedCoordinates": mapProjection,
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ # 关闭压缩文件
+ tar_file.close()
+ # 判断字典是否为空
+ if not hj1_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return hj1_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetZY02CData(in_file, xml_path, thumbnail_path):
+ """
+ 获取资源2(ZY-2)卫星元数据:
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ zy2_mux_dict, zy2_pan_dict = dict(), dict()
+ in_path, basename = os.path.split(in_file)
+ with tarfile.open(in_file, mode='r') as tar_file:
+ for member in tar_file.getnames():
+ if member.endswith("MUX_thumb.jpg"):
+ # 解压多光谱缩略图
+ tar_file.extract(member, thumbnail_path)
+ ThumbnailPath_MUX = thumbnail_path + "/" + member
+ ThumbnailName_MUX = member
+ elif member.endswith("PAN_thumb.jpg"):
+ # 解压全色缩略图
+ tar_file.extract(member, thumbnail_path)
+ ThumbnailPath_PAN = thumbnail_path + "/" + member
+ ThumbnailName_PAN = member
+
+ if member.endswith('MUX.xml'):
+ # 解压XML文件
+ tar_file.extract(member, xml_path)
+ xmlPath = xml_path + "/" + member
+ xmlFileName = member
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(member)
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ MapProjection = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系
+ EarthEllipsoid = dom.getElementsByTagName('EarthEllipsoid')[0].firstChild.data # 地理坐标系
+ ZoneNo = dom.getElementsByTagName('ZoneNo')[0].firstChild.data # 投影分带带号
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ # 几何边界
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建多光谱字典
+ zy2_mux_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ # "TopLeftLatitude": TopLeftLatitude,
+ # "TopLeftLongitude": TopLeftLongitude,
+ # "TopRightLatitude": TopRightLatitude,
+ # "TopRightLongitude": TopRightLongitude,
+ # "BottomRightLatitude": BottomRightLatitude,
+ # "BottomRightLongitude": BottomRightLongitude,
+ # "BottomLeftLatitude": BottomLeftLatitude,
+ # "BottomLeftLongitude": BottomLeftLongitude,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": MapProjection,
+ 'CollectionCode': "",
+ "ThumbnailPath": ThumbnailPath_MUX,
+ "ThumbnailName": ThumbnailName_MUX,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "day"}
+ elif member.endswith('PAN.xml'):
+ # 解压XML文件
+ tar_file.extract(member, xml_path)
+ xmlPath = xml_path + "/" + member
+ xmlFileName = member
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(member)
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ MapProjection = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系
+ EarthEllipsoid = dom.getElementsByTagName('EarthEllipsoid')[0].firstChild.data # 地理坐标系
+ ZoneNo = dom.getElementsByTagName('ZoneNo')[0].firstChild.data # 投影分带带号
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ # 几何边界
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建全色字典
+ zy2_pan_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ # "TopLeftLatitude": TopLeftLatitude,
+ # "TopLeftLongitude": TopLeftLongitude,
+ # "TopRightLatitude": TopRightLatitude,
+ # "TopRightLongitude": TopRightLongitude,
+ # "BottomRightLatitude": BottomRightLatitude,
+ # "BottomRightLongitude": BottomRightLongitude,
+ # "BottomLeftLatitude": BottomLeftLatitude,
+ # "BottomLeftLongitude": BottomLeftLongitude,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": MapProjection,
+ 'CollectionCode': "",
+ "ThumbnailPath": ThumbnailPath_PAN,
+ "ThumbnailName": ThumbnailName_PAN,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "day"}
+ else:
+ continue
+ # 关闭压缩文件
+ tar_file.close()
+ # 判断字典是否为空
+ if (not zy2_mux_dict) or (not zy2_pan_dict):
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return zy2_mux_dict, zy2_pan_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+def GetZY3Data(in_file, xml_path, thumbnail_path):
+ """
+ 获取资源3(ZY-3)卫星元数据:
+ :param thumbnail_path:
+ :param xml_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ zy3_dict = dict()
+ with tarfile.open(in_file, mode='r') as tar_file:
+ in_path, basename = os.path.split(in_file)
+ for member in tar_file.getnames():
+ if member.endswith("thumb.jpg"):
+ # 解压缩略图
+ tar_file.extract(member, thumbnail_path)
+ ThumbnailPath = thumbnail_path + "/" + member
+ ThumbnailName = member
+ if not member.endswith('.xml'):
+ continue
+ elif member.endswith('order.xml'):
+ continue
+ else:
+ # 解压XML文件
+ tar_file.extract(member, xml_path)
+ xmlPath = xml_path + "/" + member
+ xmlFileName = member
+
+ # 获取文件流
+ meta_file = tar_file.extractfile(member)
+ meta_content = meta_file.read()
+ dom = minidom.parse(io.StringIO(meta_content.decode("utf-8")))
+
+ # 产品日期
+ ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data
+ StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data
+ EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data
+
+ # 其他信息
+ ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率
+ MapProjection = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系
+ EarthEllipsoid = dom.getElementsByTagName('EarthEllipsoid')[0].firstChild.data # 地理坐标系
+ ZoneNo = dom.getElementsByTagName('ZoneNo')[0].firstChild.data # 投影分带带号
+ Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段
+ CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖
+
+ # 中心经纬度
+ TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度
+ TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度
+ TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度
+ TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度
+ BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度
+ BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度
+ BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度
+ BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度
+
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
+ f'{TopRightLongitude} {TopRightLatitude},' \
+ f'{BottomRightLongitude} {BottomRightLatitude},' \
+ f'{BottomLeftLongitude} {BottomLeftLatitude},' \
+ f'{TopLeftLongitude} {TopLeftLatitude}))'
+
+ # 构建字典
+ zy3_dict = {"ProduceTime": ProduceTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": CloudPercent,
+ # "TopLeftLatitude": TopLeftLatitude,
+ # "TopLeftLongitude": TopLeftLongitude,
+ # "TopRightLatitude": TopRightLatitude,
+ # "TopRightLongitude": TopRightLongitude,
+ # "BottomRightLatitude": BottomRightLatitude,
+ # "BottomRightLongitude": BottomRightLongitude,
+ # "BottomLeftLatitude": BottomLeftLatitude,
+ # "BottomLeftLongitude": BottomLeftLongitude,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": MapProjection,
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "month"}
+ # 关闭压缩文件
+ tar_file.close()
+ # 判断是否为空
+ if not zy3_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ return zy3_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
+
+
+if __name__ == '__main__':
+ HJ1FilePath = r"Y:\不同传感器数据\HJ-1\HJ1A-CCD2-450-80-20090501-L20000106616.tar.gz"
+ JPSSFilePath = r"Y:\不同传感器数据\JPSS\VJ102IMG.A2021159.0542.002.2021159094907.nc"
+ ZY2FilePath = r"Y:\不同传感器数据\ZY-2\ZY02C_PMS_E115.9_N36.2_20120422_L2C0000391981.tar.gz"
+ ZY3FilePath = r"Y:\不同传感器数据\ZY-3\ZY3_MUX_E83.3_N43.3_20120405_L2A0000301226.tar.gz"
+
+ S1FilePath = r'Y:\不同传感器数据\SENTINEL-1\S1A_IW_GRDH_1SDV_20210407T095634_20210407T095659_037343_046675_8E66.zip'
+ S2FilePath = r'Y:\不同传感器数据\SENTINEL-2\S2B_MSIL2A_20210804T024549_N0301_R132_T50SQF_20210804T053331.zip'
+ GF1PMSPath = r'Y:\不同传感器数据\GF-1\GF1_PMS2_E104.1_N36.6_20210308_L1A0005524847.tar.gz'
+ H08FilePath = r"Y:\不同传感器数据\葵花8\NC_H08_20210802_2010_R21_FLDK.06001_06001.nc"
+ SNPPFilePath = r"Y:\不同传感器数据\VIIRS\VNP02IMG.A2021182.0418.001.2021182100800.nc"
+
+ GF3MDJPath = r'Y:\不同传感器数据\GF-3\GF3_MDJ_SS_024986_E120.8_N35.6_20210509_L1A_VHVV_L10005638033.tar.gz'
+ GF4PMIPath = r'Y:\不同传感器数据\GF-4\GF4_PMI_E119.8_N35.3_20210908_L1A0000417337.tar.gz'
+ S3OLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \
+ r'\S3B_OL_1_EFR____20210910T022645_20210910T022945_20210911T064342_0179_056_374_2340_LN1_O_NT_002.zip'
+ S3SLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \
+ r'\S3A_SL_1_RBT____20210916T020956_20210916T021256_20210917T120953_0179_076_217_2340_LN2_O_NT_004.zip'
+ # 读取 HJ-1 元数据
+ hj1_dic = GetHJ1Data(HJ1FilePath)
+ print(hj1_dic)
+ # 读取 JPSS 元数据
+ jpss_dic = GetJPSSData(JPSSFilePath)
+ print(jpss_dic)
+ # 读取 ZY2 元数据
+ zy2_mux_dic, zy2_pan_dic = GetZY02CData(ZY2FilePath)
+ print(zy2_mux_dic)
+ print(zy2_pan_dic)
+ # 读取 ZY3 元数据
+ zy3_dic = GetZY3Data(ZY3FilePath)
+ print(zy3_dic)
+
+ # 读取GF-PMS元数据
+ pms_mss_dic, pms_pan_dic = GetGFPMSData(GF1PMSPath)
+ print(pms_mss_dic)
+ print(pms_pan_dic)
+ # 读取葵花8元数据
+ h8_dic = GetH08Data(H08FilePath)
+ print(h8_dic)
+ # 读取 S2 元数据
+ s2_dic = GetSentinel2Data(S2FilePath)
+ print(s2_dic)
+ # 读取 S1 元数据
+ s1_dic = GetSentinel1Data(S1FilePath)
+ print(s1_dic)
+ # 读取 SNPP 元数据
+ snpp_dic = GetSNPPData(SNPPFilePath)
+ print(snpp_dic)
+
+ # 读取 GF3 元数据
+ gf3_dic = GetGF3MDJData(GF3MDJPath)
+ print(gf3_dic)
+ # 读取 GF4 元数据
+ gf4_pms_dic, gf4_irs_dic = GetGF4PMIData(GF4PMIPath)
+ print(gf4_pms_dic)
+ print(gf4_irs_dic)
+
+ # 读取 S3 OL元数据
+ s3ol_dic = GetSentinel3OLData(S3OLFilePath)
+ print(s3ol_dic)
+ # # 读取 S3 SL元数据
+ # s3sl_dic = GetSentinel3SLData(S3SLFilePath)
+ # print(s3sl_dic)
diff --git a/scan_data/__init__.py b/scan_data/__init__.py
new file mode 100644
index 0000000..09796cd
--- /dev/null
+++ b/scan_data/__init__.py
@@ -0,0 +1,5 @@
+"""
+Author : XinYi Song
+Time : 2021/11/19 14:05
+Desc:
+"""
diff --git a/scan_data/__pycache__/GetMetaInfo.cpython-39.pyc b/scan_data/__pycache__/GetMetaInfo.cpython-39.pyc
new file mode 100644
index 0000000..5e6cf27
Binary files /dev/null and b/scan_data/__pycache__/GetMetaInfo.cpython-39.pyc differ
diff --git a/scan_data/__pycache__/__init__.cpython-39.pyc b/scan_data/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..0af127e
Binary files /dev/null and b/scan_data/__pycache__/__init__.cpython-39.pyc differ
diff --git a/scan_data/__pycache__/example.cpython-39.pyc b/scan_data/__pycache__/example.cpython-39.pyc
new file mode 100644
index 0000000..d9cf58b
Binary files /dev/null and b/scan_data/__pycache__/example.cpython-39.pyc differ
diff --git a/scan_data/example.py b/scan_data/example.py
new file mode 100644
index 0000000..101bb41
--- /dev/null
+++ b/scan_data/example.py
@@ -0,0 +1,165 @@
+from xml.dom import minidom
+from osgeo import gdal
+from PIL import Image
+import numpy as np
+import os
+
+
+def uint16to8(bands, lower_percent=0.001, higher_percent=99.999):
+ """
+ 拉伸图像:图片16位转8位
+ :param bands: 输入栅格数据
+ :param lower_percent: 最低百分比
+ :param higher_percent: 最高百分比
+ :return:
+ """
+ out = np.zeros_like(bands, dtype=np.uint8)
+ n = bands.shape[0]
+ for i in range(n):
+ a = 0 # np.min(band)
+ b = 255 # np.max(band)
+ c = np.percentile(bands[i, :, :], lower_percent)
+ d = np.percentile(bands[i, :, :], higher_percent)
+ t = a + (bands[i, :, :] - c) * (b - a) / (d - c)
+ t[t < a] = a
+ t[t > b] = b
+ out[i, :, :] = t
+ return out
+
+
+def createXML(metadata, xlm_file):
+ """
+ 创建xlm文件并写入字典
+ :param metadata: 元数据信息
+ :param xlm_file: xlm文件
+ :return:
+ """
+ # 创建一个空的文档
+ document = minidom.Document() # 创建DOM文档对象
+ # 创建一个根节点对象
+ root = document.createElement('ProductMetaData')
+ # 设置根节点的属性
+ # root.setAttribute('', '')
+ # 将根节点添加到文档对象中
+ document.appendChild(root)
+ # 字典转xml
+ for key in metadata:
+ # 创建父节点
+ node_name = document.createElement(key)
+ # 给父节点设置文本
+ node_name.appendChild(document.createTextNode(str(metadata[key])))
+ # 将各父节点添加到根节点
+ root.appendChild(node_name)
+ # 写入xlm文档
+ with open(xlm_file, 'w', encoding='utf-8') as f:
+ document.writexml(f, indent='\t', newl='\n', addindent='\t', encoding='utf-8')
+ f.close()
+
+
+def GetJPSSData(in_file, xml_path, thumbnail_path):
+ """
+ 获取联合极轨卫星系统(JPSS-1)元数据:NOAA-20(Joint Polar Satellite System spacecraft)
+ :param xml_path:
+ :param thumbnail_path:
+ :param in_file:
+ :return: 元数据字典
+ """
+ try:
+ # 生成缩略图
+ in_path, basename = os.path.split(in_file)
+ ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
+ ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
+
+ gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
+ in_datasets = gdal.Open(in_file)
+ meta_data = in_datasets.GetMetadata()
+ # 取出子数据集
+ datasets = in_datasets.GetSubDatasets()
+ red_data = gdal.Open(datasets[0][0]).ReadAsArray()
+ nir_data = gdal.Open(datasets[3][0]).ReadAsArray()
+ swir_data = gdal.Open(datasets[9][0]).ReadAsArray()
+ img_data = np.array([red_data, nir_data, swir_data])
+ img_data = uint16to8(img_data)
+ # Array转Image
+ img_data2 = np.transpose(img_data, (1, 2, 0))
+ img_data2 = img_data2[:, :, ::-1]
+ img = Image.fromarray(img_data2)
+ # 压缩图片大小
+ if img_data.shape[1] > img_data.shape[2]:
+ width = 512
+ height = int(width / img_data.shape[1] * img_data.shape[2])
+ else:
+ height = 512
+ width = int(height / img_data.shape[1] * img_data.shape[2])
+ img.thumbnail((width, height))
+ img.save(ThumbnailPath, "PNG")
+
+ # 释放内存
+ del in_datasets
+ del img_data
+ del img_data2
+ del img
+
+ # 生成XML文件
+ xmlFileName = os.path.splitext(basename)[0] + ".xml"
+ xmlPath = os.path.join(xml_path, xmlFileName)
+ createXML(meta_data, xmlPath)
+
+ # 产品日期
+ ProductionTime = meta_data['ProductionTime']
+ StartTime = meta_data['StartTime']
+ EndTime = meta_data['EndTime']
+
+ # 其他信息
+ ImageGSD = str(meta_data['LongName']).split(" ")[-1]
+ Bands = str(meta_data['title']).split(" ")[1]
+
+ # 中心经纬度
+ productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度
+ productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度
+ productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度
+ productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度
+ productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度
+ productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度
+ productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度
+ productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度
+
+ # 边界几何
+ boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \
+ f'{productUpperRightLong} {productUpperRightLat},' \
+ f'{productLowerRightLong} {productLowerRightLat},' \
+ f'{productLowerLeftLong} {productLowerLeftLat},' \
+ f'{productUpperLeftLong} {productUpperLeftLat}))'
+
+ # 构建字典
+ jpss_dict = {"ProduceTime": ProductionTime,
+ "StartTime": StartTime,
+ "EndTime": EndTime,
+ "CloudPercent": "",
+ # "TopLeftLatitude": productUpperLeftLat,
+ # "TopLeftLongitude": productUpperLeftLong,
+ # "TopRightLatitude": productUpperRightLat,
+ # "TopRightLongitude": productUpperRightLong,
+ # "BottomLeftLatitude": productLowerLeftLat,
+ # "BottomLeftLongitude": productLowerLeftLong,
+ # "BottomRightLatitude": productLowerRightLat,
+ # "BottomRightLongitude": productLowerRightLong,
+ "boundaryGeomStr": boundaryGeomStr,
+ "bands": Bands,
+ "ImageGSD": ImageGSD,
+ "ProjectedCoordinates": "",
+ "CollectionCode": "",
+ "ThumbnailPath": ThumbnailPath,
+ "ThumbnailName": ThumbnailName,
+ "xmlPath": xmlPath,
+ "xmlFileName": xmlFileName,
+ "DirectoryDepth": "day"}
+
+ # 判断字典是否为空
+ if not jpss_dict:
+ return {"code": -1, "msg": "没有满足条件的数据字典..."}
+ print(jpss_dict)
+ return jpss_dict
+ except Exception as e:
+ print(str(e))
+ return {"code": -1, "msg": str(e)}
\ No newline at end of file
diff --git a/util/__init__.py b/util/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/util/__pycache__/__init__.cpython-39.pyc b/util/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..a60cc7a
Binary files /dev/null and b/util/__pycache__/__init__.cpython-39.pyc differ
diff --git a/util/__pycache__/file_store_path.cpython-39.pyc b/util/__pycache__/file_store_path.cpython-39.pyc
new file mode 100644
index 0000000..c53f3ad
Binary files /dev/null and b/util/__pycache__/file_store_path.cpython-39.pyc differ
diff --git a/util/__pycache__/http_file_upload.cpython-39.pyc b/util/__pycache__/http_file_upload.cpython-39.pyc
new file mode 100644
index 0000000..6b6fa5a
Binary files /dev/null and b/util/__pycache__/http_file_upload.cpython-39.pyc differ
diff --git a/util/__pycache__/http_util.cpython-39.pyc b/util/__pycache__/http_util.cpython-39.pyc
new file mode 100644
index 0000000..d572780
Binary files /dev/null and b/util/__pycache__/http_util.cpython-39.pyc differ
diff --git a/util/__pycache__/remote_sensing_util.cpython-39.pyc b/util/__pycache__/remote_sensing_util.cpython-39.pyc
new file mode 100644
index 0000000..9adc1e8
Binary files /dev/null and b/util/__pycache__/remote_sensing_util.cpython-39.pyc differ
diff --git a/util/__pycache__/scan_file_util.cpython-39.pyc b/util/__pycache__/scan_file_util.cpython-39.pyc
new file mode 100644
index 0000000..25bb17f
Binary files /dev/null and b/util/__pycache__/scan_file_util.cpython-39.pyc differ
diff --git a/util/__pycache__/xml_util.cpython-39.pyc b/util/__pycache__/xml_util.cpython-39.pyc
new file mode 100644
index 0000000..6e35090
Binary files /dev/null and b/util/__pycache__/xml_util.cpython-39.pyc differ
diff --git a/util/copy_util.py b/util/copy_util.py
new file mode 100644
index 0000000..140470f
--- /dev/null
+++ b/util/copy_util.py
@@ -0,0 +1,42 @@
+"""
+Author : XinYi Song
+Time : 2021/10/13 10:13
+Desc: 复制文件
+"""
+import os
+from shutil import copy
+
+
+# 将文件复制到另一个文件夹中
+def copyToDir(from_path, to_path):
+ # 如果 to_path 目录不存在,则创建
+ if not os.path.isdir(to_path):
+ os.makedirs(to_path)
+ copy(from_path, to_path)
+
+
+# 将一个文件夹中所有的文件复制到另一个文件夹中
+def copyToDirAll(path, path_two):
+ """
+ :param path: 路径1
+ :param path_two: 路径2
+ :return:
+ """
+ if os.path.isdir(path) and os.path.isdir(path_two): # 判断传入的值为文件夹
+ a = os.listdir(path) # 读取该路径下的文件为列表
+ for i in a:
+ po = os.path.join(path, i) # 路径1拼接
+ po_two = os.path.join(path_two, i) # 路径2拼接
+ with open(po, "rb") as f:
+ res_one = f.read()
+ with open(po_two, "wb") as a:
+ a.write(res_one)
+ print("{}复制成功".format(i))
+ else:
+ print("不是文件夹 ")
+
+
+if __name__ == '__main__':
+ path1 = 'D:/img'
+ path_two1 = 'D:/image'
+ copyToDirAll(path1, path_two1)
diff --git a/util/file_store_path.py b/util/file_store_path.py
new file mode 100644
index 0000000..439bc99
--- /dev/null
+++ b/util/file_store_path.py
@@ -0,0 +1,84 @@
+"""
+Author : XinYi Song
+Time : 2021/11/4 9:27
+Desc:
+"""
+import datetime
+import os
+import time
+
+
+def file_store_path(time_stamp):
+ """
+ :param time_stamp: 时间戳类型时间
+ :return:
+ """
+ now = int(round(time_stamp * 1000))
+ t = time.localtime(now / 1000)
+ return os.path.join('E:/data/upload', str(t[0]), str(t[1]), str(t[2]))
+
+
+def file_store_path_year(data_str_time, upload_path):
+ """
+ 目录到年
+ :param upload_path:
+ :param data_str_time: 字符串类型
+ :return:
+ """
+ t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
+ return os.path.join(upload_path, str(t[0]))
+
+
+def file_store_path_month(data_str_time, upload_path):
+ """
+ 目录到月
+ :param upload_path:
+ :param data_str_time:
+ :return:
+ """
+ t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
+ return os.path.join(upload_path, str(t[0]), str(t[1]))
+
+
+def file_store_path_day(data_str_time, upload_path):
+ """
+ 目录到日
+ :param upload_path:
+ :param data_str_time: 字符串类型的时间
+ :return:
+ """
+ t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
+ return os.path.join(upload_path, str(t[0]), str(t[1]), str(t[2]))
+
+
+if __name__ == '__main__':
+ # time_stamp1 = time.time()
+ # print(time_stamp1)
+ str_time = '2020-06-08 09:33:07'
+ t = time.strptime(str_time, '%Y-%m-%d %H:%M:%S')
+ # path = os.path.join('../upload', str(t[0]), str(t[1]), str(t[2]))
+ # if not os.path.exists(path):
+ # os.makedirs(path)
+ # print(path)
+ # time_stamp = float(time_stamp1)
+ # now = int(round(time_stamp * 1000))
+ # t = time.localtime(now / 1000)
+ # print(t)
+
+ # list1 = ['张三', '李四']
+ # token_s = dms_login()
+ # dms_list = dms_sensing_data(token_s)
+ # 数据库返回值
+ # list2 = ['张三', '李四']
+
+ # d = [y for y in list2 if y not in list1]
+ # if d is None or len(d) == 0:
+ # print("d为空")
+ # else:
+ # print(d)
+ # file_dir = 'C:/Users/HP/Desktop/数管/'
+ # dir_list = os.listdir(file_dir)
+ # print(dir_list)
+ # timestring = '2016-12-21 10:22:56'
+ # print(time.mktime(time.strptime(timestring, '%Y-%m-%d %H:%M:%S'))) # 1482286976.0
+ print(t)
diff --git a/util/http_file_upload.py b/util/http_file_upload.py
new file mode 100644
index 0000000..23bb749
--- /dev/null
+++ b/util/http_file_upload.py
@@ -0,0 +1,86 @@
+"""
+Author : XinYi Song
+Time : 2021/11/3 14:29
+Desc:
+"""
+from util.file_store_path import file_store_path_day, file_store_path_year, file_store_path_month
+
+"""
+实现文件断点续传
+"""
+import sys
+import os
+from hashlib import md5
+
+FILE_DIR = os.path.dirname(__file__)
+
+BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+home = os.path.join(BASE_DIR, "E:/data/upload")
+
+
+# 定义一个函数,计算进度条
+def bar(num=1, sum=100):
+ rate = float(num) / float(sum)
+ rate_num = int(rate * 100)
+ temp = '\r%d %%' % rate_num
+ sys.stdout.write(temp)
+
+
+def md5_file(name):
+ m = md5()
+ a_file = open(name, 'rb') #需要使用二进制格式读取文件内容
+ m.update(a_file.read())
+ a_file.close()
+ return m.hexdigest()
+
+
+def upload_client(local_path, depth, dateTime):
+ global file_path
+ while True:
+ file_byte_size = os.stat(local_path).st_size # 获取文件的大小
+ file_name = os.path.basename(local_path) # 设置文件名
+ md5 = md5_file(local_path)
+
+ has_sent = 0
+ file_obj = open(local_path, 'rb') # 对文件进行读操作
+ file_obj.seek(has_sent) # 调整指针
+ if depth == 'year':
+ file_path = file_store_path_year(dateTime, home)
+ if not os.path.exists(file_path):
+ os.makedirs(file_path)
+ if depth == 'month':
+ file_path = file_store_path_month(dateTime, home)
+ if not os.path.exists(file_path):
+ os.makedirs(file_path)
+ if depth == 'day':
+ file_path = file_store_path_day(dateTime, home)
+ if not os.path.exists(file_path):
+ os.makedirs(file_path)
+ path = os.path.join(file_path, file_name)
+ has_received = 0
+
+ # 首先判断该路径下是否已存在文件
+ if os.path.exists(path):
+ f = open(path, 'wb')
+ else:
+ f = open(path, 'wb')
+
+ while has_sent < file_byte_size:
+ # 读出数据
+ data = file_obj.read(1024)
+ try:
+ # 写入数据
+ f.write(data)
+ has_received += len(data)
+ if not data:
+ raise Exception
+ except Exception:
+ flag = False
+ break
+ has_sent += len(data)
+ bar(has_sent, file_byte_size) # 进度条
+ print("文件上传成功!")
+ file_obj.close()
+ f.close()
+ file_dict = {'fileName': file_name, 'md5': md5, 'file_size': file_byte_size, 'file_path': file_path, 'type': 'ok'}
+ return file_dict
diff --git a/util/http_util.py b/util/http_util.py
new file mode 100644
index 0000000..38dc858
--- /dev/null
+++ b/util/http_util.py
@@ -0,0 +1,158 @@
+# 导入requests包
+import json
+import os
+from typing import Dict
+
+import requests
+from requests_toolbelt import MultipartEncoder
+
+
+class httpUtil(object):
+
+ def __init__(self, *, url: str, params: Dict = None, data: Dict = None, file_path: str = None, token: str = None):
+ self.url = url
+ self.params = params
+ self.file_path = file_path
+ self.data = data
+ self.token = token
+
+ def get_no_param(self):
+ """
+ get请求, 无参数
+ :return:
+ """
+ # 发送请求,获取响应, res即返回的响应对象
+ res = requests.get(url=self.url)
+ return res
+
+ def get(self):
+ """
+ get请求
+ :return:
+ """
+ # 发送请求,获取响应, Res即返回的响应对象
+ try:
+ res = requests.get(url=self.url, params=self.params)
+ return res
+ except:
+ return -1
+
+ def get_herder(self):
+ """
+ get请求,带token
+ :return:
+ """
+ # 发送请求,获取响应, Res即返回的响应对象
+ try:
+ headers = {
+ ""
+ "Authorization": self.token
+ }
+ res = requests.get(url=self.url, params=self.params, headers=headers)
+ return res
+ except:
+ return -1
+
+ def post_no_patam(self):
+ """
+ post请求
+ :return:
+ """
+ # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
+ # data支持字典或字符串
+ data = json.dumps(self.data)
+ res = requests.post(url=self.url, data=data)
+ return res
+
+ def post_no_patam_herder(self):
+ """
+ post请求
+ :return:
+ """
+ # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
+ # data支持字典或字符串
+ headers = {
+ "Content-Type": "application/json",
+ "Authorization": self.token
+ }
+ res = requests.post(url=self.url, data=json.dumps(self.data), headers=headers)
+ return res
+
+ def post_patam_herder(self):
+ """
+ post请求
+ :return:
+ """
+ # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
+ # data支持字典或字符串
+ headers = {
+ "Authorization": self.token
+ }
+ res = requests.post(url=self.url, param=self.params, headers=headers)
+ return res
+
+ def post(self):
+ """
+ post请求
+ :return:
+ """
+ # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
+ # data支持字典或字符串
+ res = requests.post(url=self.url, data=json.dumps(self.data), params=self.params)
+ return res
+
+ def post_param(self):
+ """
+ post请求
+ :return:
+ """
+ # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
+ # data支持字典或字符串
+ res = requests.post(url=self.url, params=self.params)
+ return res
+
+ def post_file(self):
+ """
+ 发送文件
+ """
+ filepath, filename = os.path.split(self.file_path)
+ payload = {
+ 'file': (filename, open(self.file_path, 'rb'))
+ }
+ m = MultipartEncoder(payload)
+ headers = {
+ "Content-Type": m.content_type,
+ "other-keys": "other-values"
+ }
+ res = requests.post(url=self.url, data=m, params=self.params, headers=headers)
+ return res
+
+
+if __name__ == '__main__':
+ res = httpUtil(url='http://192.168.2.105:8820/api/login',
+ params={"userName": "client1", "password": "sxy1998"}).post_param()
+ print(res.json()['data'])
+
+ token_s = res.json()['data']
+ # res1 = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/get/collection-code/revision',
+ # params={"collectionCode": "GF4-0001", "revision": 1}, token=token_s).get_herder()
+ # print(res1.json())
+ #
+ # res2 = httpUtil(url='http://192.168.2.105:8820/api/remote-sensing-data/get/collection-code',
+ # params={"collectionCode": "GF4-0001"}, token=token_s).get_herder()
+ # print(res2.json())
+
+ res3 = httpUtil(url='http://192.168.2.105:8820/api/dic-remote-sensing-data/get/code',
+ params={"code": "GF4-0001"}, token=token_s).get_herder()
+ print(res3.json())
+ # res = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/add',
+ # data={"clientCode": "client1", "collectionCode": "GF4-0001", "storageFileList": "file_total_name",
+ # "storageFileSizeList": "file_total_name", "remarks": ""}, token=token_s).post_no_patam_herder()
+ # print(res.json())
+ # res = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/end',
+ # params={"taskCode": 6856963234793680896}, token=token_s).post_patam_herder()
+ # print(res.json())
+ # header = {"Authorization": token_s}
+ # res = requests.post(url='http://192.168.2.105:8820/api/data-storage-task-record/end',
+ # params={"taskCode": 6856963234793680896}, headers=header).json()
+ # print(res)
diff --git a/util/json_util.py b/util/json_util.py
new file mode 100644
index 0000000..bf85f98
--- /dev/null
+++ b/util/json_util.py
@@ -0,0 +1,44 @@
+import json
+import os
+
+
+def write_info(file_name, file_info):
+ dir = os.path.dirname(file_name)
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ with open('{}.json'.format(file_name), 'w', encoding='UTF-8') as fp:
+ json.dump(file_info, fp, indent=4, sort_keys=False)
+
+
+def read_json(file_path):
+ with open(file_path, 'r') as f:
+ data = json.load(f)
+ return data
+
+
+report_data = {"project_no": "628740635893760", "img_path": "2000.png",
+ "create_time": "2021-06-10T11:17:12.202000+00:00", "labels": [
+ {"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string",
+ "color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0,
+ "point": [{"x": 114.04715127701375, "y": 53.04518664047151}, {"x": 196.2671905697446, "y": 53.04518664047151},
+ {"x": 196.2671905697446, "y": 149.4106090373281},
+ {"x": 114.04715127701375, "y": 149.4106090373281}]},
+ {"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string",
+ "color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0,
+ "point": [{"x": 284.67583497053045, "y": 64.53831041257367}, {"x": 401.3752455795678, "y": 64.53831041257367},
+ {"x": 401.3752455795678, "y": 266.1100196463654},
+ {"x": 284.67583497053045, "y": 266.1100196463654}]},
+ {"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string",
+ "color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0,
+ "point": [{"x": 501.2770137524558, "y": 148.52652259332024}, {"x": 623.2809430255403, "y": 148.52652259332024},
+ {"x": 623.2809430255403, "y": 320.0392927308448},
+ {"x": 501.2770137524558, "y": 320.0392927308448}]}]}
+
+if __name__ == "__main__":
+ path = os.path.abspath(os.path.dirname(__file__))
+ print(path)
+ # write_info('', dict(report_data))
+ # read_json('d://report.json')
+ # s = '/group1/628740635893760/ori/images/7 (1).png'
+ # s1 = s.replace('images', 'labels')
+ # print(s[2])
diff --git a/util/md5_util.py b/util/md5_util.py
new file mode 100644
index 0000000..a9e0cb4
--- /dev/null
+++ b/util/md5_util.py
@@ -0,0 +1,16 @@
+import hashlib
+
+
+class Md5Util(object):
+
+ def __init__(self, *, salt: str, password: str):
+ self.salt = salt
+ self.password = password
+
+ def md5(self):
+ # 实例化对象, 加盐
+ obj = hashlib.md5(self.salt.encode('utf-8'))
+ # 加密密码
+ obj.update(self.password.encode('utf-8'))
+ # 提取密码,返回
+ return obj.hexdigest()
diff --git a/util/remote_sensing_util.py b/util/remote_sensing_util.py
new file mode 100644
index 0000000..18440ff
--- /dev/null
+++ b/util/remote_sensing_util.py
@@ -0,0 +1,91 @@
+"""
+Author : XinYi Song
+Time : 2021/11/4 16:59
+Desc:
+"""
+import rasterio
+import requests
+
+from util.xml_util import xml_to_dict, dict_to_xml
+
+
+def gf4_pmi_001(file_name, xml_name):
+ """
+
+ :param file_name: 扫描文件传过来的遥感数据源文件的路径
+ :param xmlPath: 解析出的xml文件存储的路径
+ :param ThumbnailPath: 解析出的缩略图文件存储的路径
+ :return:
+ """
+ file_path = 'E:/sensing/GF4_PMI_001/'
+ with rasterio.open(file_path+file_name, 'r') as ds:
+ # 存放xml,缩略图的文件夹,由根文件夹+数据集代码命名的文件夹组成
+ print('该栅格数据的基本数据集信息:')
+ CollectionCode = 'GF4_PMI_001' # 数据集代码
+ DataFormat = ds.driver # DataFormat 数据格式
+ NumberBands = ds.count # NumberBands 波段数目
+ ImageWidth = ds.width # ImageWidth 影像宽度
+ ImageHeight = ds.height # ImageHeight 影像高度
+ GeographicScope = ds.bounds # GeographicScope 地理范围
+ ReflectionParameter = ds.transform # ReflectionParameter 反射变换参数(六参数模型)
+ ProjectionDefinition = ds.crs # ProjectionDefinition 投影定义
+ # print(CRS.from_epsg(4326))
+ # 获取第一个波段数据,跟GDAL一样索引从1开始
+ # 直接获得numpy.ndarray类型的二维数组表示,如果read()函数不加参数,则得到所有波段(第一个维度是波段)
+ band1 = ds.read(1)
+ FirstBindMax = band1.max() # FirstBindMax 第一波段的最大值
+
+ FirstBindMin = band1.min() # FirstBindMin 第一波段的最小值
+ FirstBindAverage = band1.mean() # FirstBindAverage 第一波段的平均值
+ # 根据地理坐标得到行列号
+ x, y = (ds.bounds.left + 300, ds.bounds.top - 300) # 距离左上角东300米,南300米的投影坐标
+ row, col = ds.index(x, y) # 对应的行列号
+ print(f'(投影坐标{x}, {y})对应的行列号是({row}, {col})')
+ ProjectedCoordinates = x, y # ProjectedCoordinates 投影坐标
+ RowNumber = row, col # RowNumber 对应的行列号
+ # 根据行列号得到地理坐标
+ x, y = ds.xy(row, col) # 中心点的坐标
+ print(f'行列号({row}, {col})对应的中心投影坐标是({x}, {y})') #
+ CenterProjectionCoordinates = x, y # CenterProjectionCoordinates 中心投影坐标
+ # 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
+ # 传入xml文件路径,解析xml文件
+ # xml_name 存储后的xml文件的路径+文件名
+ xml_dict = xml_to_dict(file_path+xml_name)
+ StartTime = xml_dict['ProductMetaData']['StartTime'] # 开始采集时间
+ EndTime = xml_dict['ProductMetaData']['EndTime'] # 结束采集时间
+ CloudPercent = xml_dict['ProductMetaData']['CloudPercent'] # 云覆盖量百分比
+ TopLeftLatitude = xml_dict['ProductMetaData']['TopLeftLatitude'] # 左上纬度
+ TopLeftLongitude = xml_dict['ProductMetaData']['TopLeftLongitude'] # 左上经度
+ TopRightLatitude = xml_dict['ProductMetaData']['TopRightLatitude'] # 右上纬度
+ TopRightLongitude = xml_dict['ProductMetaData']['TopRightLongitude'] # 右上经度
+ BottomRightLatitude = xml_dict['ProductMetaData']['BottomRightLatitude'] # 右下纬度
+ BottomRightLongitude = xml_dict['ProductMetaData']['BottomRightLongitude'] # 右下经度
+ BottomLeftLatitude = xml_dict['ProductMetaData']['BottomLeftLatitude'] # 左下纬度
+ BottomLeftLongitude = xml_dict['ProductMetaData']['BottomLeftLongitude'] # 左下经度
+ boundaryGeomStr = f'POLYGON(({TopLeftLatitude} {TopLeftLongitude},' \
+ f'{TopRightLatitude} {TopRightLongitude},' \
+ f'{BottomRightLatitude} {BottomRightLongitude},' \
+ f'{BottomLeftLatitude} {BottomLeftLongitude},' \
+ f'{TopLeftLatitude} {TopLeftLongitude}))'
+ # ThumbnailPath 存储后的缩略图的路径+文件名 ThumbnailName 缩略图文件名称
+ # xmlPath 存储后的xml文件路径+文件名 xmlFileName xml文件名称
+ sensing_dict = {'StartTime': StartTime, 'EndTime': EndTime, 'CloudPercent': CloudPercent,
+ 'boundaryGeomStr': boundaryGeomStr, 'DataFormat': DataFormat, 'NumberBands': NumberBands,
+ 'ImageWidth': ImageWidth, 'ImageHeight': ImageHeight, 'GeographicScope': GeographicScope,
+ #'ReflectionParameter': ReflectionParameter, 'ProjectionDefinition': ProjectionDefinition,
+ #'FirstBindMax': FirstBindMax, 'FirstBindMin': FirstBindMin,
+ 'FirstBindAverage': FirstBindAverage,
+ 'ProjectedCoordinates': ProjectedCoordinates, 'RowNumber': RowNumber,
+ #'CenterProjectionCoordinates': CenterProjectionCoordinates,
+ 'CollectionCode': CollectionCode,
+ "ThumbnailPath": file_path+"GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg",
+ "ThumbnailName": "GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg",
+ "xmlPath": "", "xmlFileName": "",
+ 'DirectoryDepth': 'day'}
+ return sensing_dict
+
+
+if __name__ == '__main__':
+ file_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.tiff'
+ xml_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
+ gf4_pmi_001(file_path, xml_path)
diff --git a/util/scan_file_util.py b/util/scan_file_util.py
new file mode 100644
index 0000000..3080cea
--- /dev/null
+++ b/util/scan_file_util.py
@@ -0,0 +1,1195 @@
+"""
+Author : XinYi Song
+Time : 2021/11/3 9:00
+Desc:
+"""
+import os
+import time
+
+import requests
+
+from application.settings import Config
+from common.tools.dms import dms_login, dms_task_record, dms_sensing_data
+from scan_data.GetMetaInfo import GetGFPMSData, GetGF3MDJData, GetH08Data, GetSentinel1Data, GetSentinel2Data, \
+ GetSentinel3OLData, GetHJ1Data, GetZY3Data, GetSNPPData
+from scan_data.example import GetJPSSData
+from util.http_file_upload import upload_client
+from util.http_util import httpUtil
+from util.remote_sensing_util import gf4_pmi_001
+from util.xml_util import dict_to_xml
+from apscheduler.schedulers.blocking import BlockingScheduler
+
+sched = BlockingScheduler()
+
+
+def list_dir(file_dir):
+ """
+ 通过 listdir 得到的是仅当前路径下的文件名,不包括子目录中的文件,如果需要得到所有文件需要递归
+ """
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = '' # 解析出的xml文件保存的路径
+ ThumbnailPath = '' # 解析出的缩略图保存的路径
+ for cur_file in d:
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ file_name = 'GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.tiff'
+ xml_path = 'GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
+ # 解析遥感数据文件(demo)
+ gf4_sensing = gf4_pmi_001(file_name, xml_path)
+ file_name = os.path.basename(path).split('.')[0]
+ xml_path = 'D:/file/work/pythonyuanma/dms_management/xml/' + file_name + '.xml'
+ # 将遥感数据写入xml中,并保存本地文件夹中
+ dict_to_xml(gf4_sensing, xml_path)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(xml_path, 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/GF4_PMI_001'} # 参阅浏览器上传的选项
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(gf4_sensing['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/GF4_PMI_001'} # 参阅浏览器上传的选项
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = gf4_sensing['CollectionCode']
+ DirectoryDepth = gf4_sensing['DirectoryDepth']
+ StartTime = gf4_sensing['StartTime']
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(gf4_sensing['StartTime'], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(gf4_sensing['EndTime'], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": "GF4_PMI_001", "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": gf4_sensing['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": gf4_sensing['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": "GF4_PMI_001", "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_VJ102_dir():
+ """
+ 解析JPSS-VJ102元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描VJ102IMG数据集')
+ collectionCode = 'VJ102IMG'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'VJ102IMG' in cur_file and os.path.splitext(cur_file)[1] == '.nc':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ print(path)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ JPSSData_dict = GetJPSSData(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(JPSSData_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(JPSSData_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = JPSSData_dict['CollectionCode']
+ DirectoryDepth = JPSSData_dict['DirectoryDepth']
+ StartTime = JPSSData_dict['StartTime']
+ uc = upload_client(path, DirectoryDepth, StartTime[0:19])
+
+ StartTime = time.mktime(time.strptime(JPSSData_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(JPSSData_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": JPSSData_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": JPSSData_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_VJ103_dir():
+ """
+ 解析JPSS-VJ103元数据
+ :param file_dir:
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描VJ103IMG数据集')
+ collectionCode = 'VJ103IMG'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'VJ103IMG' in cur_file and os.path.splitext(cur_file)[1] == '.nc':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ JPSSData_dict = GetJPSSData(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(JPSSData_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(JPSSData_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = JPSSData_dict['CollectionCode']
+ DirectoryDepth = JPSSData_dict['DirectoryDepth']
+ StartTime = JPSSData_dict['StartTime']
+ uc = upload_client(path, DirectoryDepth, StartTime[0:19])
+
+ StartTime = time.mktime(time.strptime(JPSSData_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(JPSSData_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": JPSSData_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": JPSSData_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_GF1_PMS2_dir():
+ """
+ 获取高分 PMS卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描GF1_PMS2_001数据集')
+ collectionCode = 'GF1_PMS2_001'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'GF1_PMS2' in cur_file[0:8] and os.path.splitext(cur_file)[1] == '.gz':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ GFPMS_dict = GetGFPMSData(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ print(GFPMS_dict['xmlPath'])
+ files = {'file': open(GFPMS_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GFPMS_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = GFPMS_dict['CollectionCode']
+ DirectoryDepth = GFPMS_dict['DirectoryDepth']
+ StartTime = GFPMS_dict['StartTime']
+ uc = upload_client(path, DirectoryDepth, StartTime[0:19])
+
+ StartTime = time.mktime(time.strptime(GFPMS_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(GFPMS_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": GFPMS_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": GFPMS_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_GF3MDJ_dir():
+ """
+ 获取高分3号MDJ(GF-3 MDJ)卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描GF3_MDJ_SS数据集')
+ collectionCode = 'GF3_MDJ_SS'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'GF3_MDJ' in cur_file[0:7] and os.path.splitext(cur_file)[1] == '.gz':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ GF3_MDJ_SS_dict = GetGF3MDJData(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GF3_MDJ_SS_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GF3_MDJ_SS_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = GF3_MDJ_SS_dict['CollectionCode']
+ DirectoryDepth = GF3_MDJ_SS_dict['DirectoryDepth']
+ StartTime = GF3_MDJ_SS_dict['StartTime']
+ uc = upload_client(path, DirectoryDepth, StartTime[0:19])
+
+ StartTime = time.mktime(time.strptime(GF3_MDJ_SS_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(GF3_MDJ_SS_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": GF3_MDJ_SS_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": GF3_MDJ_SS_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_H08_dir():
+ """
+ 获取高分3号MDJ(GF-3 MDJ)卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描H08数据集')
+ collectionCode = 'NC_H08'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'NC_H08' in cur_file[0:6] and os.path.splitext(cur_file)[1] == '.nc':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ GetH08_dict = GetH08Data(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetH08_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetH08_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = GetH08_dict['CollectionCode']
+ DirectoryDepth = GetH08_dict['DirectoryDepth']
+ StartTime = GetH08_dict['ProduceTime'][0:10] + ' ' + GetH08_dict['ProduceTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(GetH08_dict['ProduceTime'][0:10] + ' ' + GetH08_dict['ProduceTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(GetH08_dict['ProduceTime'][0:10] + ' '+ GetH08_dict['ProduceTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": GetH08_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": GetH08_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_Sentinel1_dir():
+ """
+ 获取哨兵1号卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描Sentinel1数据集')
+ collectionCode = 'S1A_IW_GRDH'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'S1A_IW_GRDH' in cur_file[0:11] and os.path.splitext(cur_file)[1] == '.zip':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ Sentinel1_dict = GetSentinel1Data(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(Sentinel1_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(Sentinel1_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = Sentinel1_dict['CollectionCode']
+ DirectoryDepth = Sentinel1_dict['DirectoryDepth']
+ StartTime = Sentinel1_dict['ProduceTime'][0:10] + ' ' + Sentinel1_dict['ProduceTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(Sentinel1_dict['StartTime'][0:10] + ' ' + Sentinel1_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(Sentinel1_dict['StopTime'][0:10] + ' ' + Sentinel1_dict['StopTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": Sentinel1_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": Sentinel1_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_Sentinel2_dir():
+ """
+ 获取哨兵2号卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描Sentinel2数据集')
+ collectionCode = 'S2B'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'S2B' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.zip':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ Sentinel2_dict = GetSentinel2Data(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(Sentinel2_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(Sentinel2_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = Sentinel2_dict['CollectionCode']
+ DirectoryDepth = Sentinel2_dict['DirectoryDepth']
+ StartTime = Sentinel2_dict['ProduceTime'][0:10] + ' ' + Sentinel2_dict['ProduceTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(Sentinel2_dict['StartTime'][0:10] + ' ' + Sentinel2_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(Sentinel2_dict['StopTime'][0:10] + ' ' + Sentinel2_dict['StopTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": Sentinel2_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": Sentinel2_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_Sentinel3OL_dir():
+ """
+ 获取哨兵3号卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描Sentinel3数据集')
+ collectionCode = 'Sentinel3_OLCI'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'S3B' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.zip':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ print(path)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ Sentinel3OL_dict = GetSentinel3OLData(path, xmlPath, ThumbnailPath)
+ print(Sentinel3OL_dict)
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(Sentinel3OL_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(Sentinel3OL_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = Sentinel3OL_dict['CollectionCode']
+ DirectoryDepth = Sentinel3OL_dict['DirectoryDepth']
+ StartTime = Sentinel3OL_dict['StartTime'][0:10] + ' ' + Sentinel3OL_dict['StartTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(Sentinel3OL_dict['StartTime'][0:10] + ' ' + Sentinel3OL_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(Sentinel3OL_dict['StopTime'][0:10] + ' ' + Sentinel3OL_dict['StopTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": Sentinel3OL_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": Sentinel3OL_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_HJ1_dir():
+ """
+ 获取环境1号卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描环境1号数据集')
+ collectionCode = 'HJ-1'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'HJ1' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.gz':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ GetHJ1Data_dict = GetHJ1Data(path, xmlPath, ThumbnailPath)
+
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetHJ1Data_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetHJ1Data_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = GetHJ1Data_dict['CollectionCode']
+ DirectoryDepth = GetHJ1Data_dict['DirectoryDepth']
+ StartTime = GetHJ1Data_dict['ProductTime'][0:10] + ' ' + GetHJ1Data_dict['ProductTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(GetHJ1Data_dict['StartTime'][0:10] + ' ' + GetHJ1Data_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(GetHJ1Data_dict['EndTime'][0:10] + ' ' + GetHJ1Data_dict['EndTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": GetHJ1Data_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": GetHJ1Data_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_ZY3_dir():
+ """
+ 获取资源3号卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描资源3号数据集')
+ collectionCode = 'ZY-3'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'ZY3' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.gz':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ GetZY3Data_dict = GetZY3Data(path, xmlPath, ThumbnailPath)
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetZY3Data_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetZY3Data_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = GetZY3Data_dict['CollectionCode']
+ DirectoryDepth = GetZY3Data_dict['DirectoryDepth']
+ StartTime = GetZY3Data_dict['ProduceTime'][0:10] + ' ' + GetZY3Data_dict['ProduceTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(GetZY3Data_dict['StartTime'][0:10] + ' ' + GetZY3Data_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(GetZY3Data_dict['EndTime'][0:10] + ' ' + GetZY3Data_dict['EndTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": GetZY3Data_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": GetZY3Data_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+def scan_SNPP_dir():
+ """
+ 获取资源3号卫星元数据
+ :return:
+ """
+ file_dir = 'E:/数管'
+ print('开始扫描VNP02IMG数据集')
+ collectionCode = 'VNP02IMG'
+ # 用户登录
+ token_s = dms_login()
+ # 判断定时任务是否在进行
+ task = dms_task_record(token_s, collectionCode)
+ # 如果不是空说明正在进行
+ if task is not None and len(task) > 0:
+ return
+ fileNameList = []
+ dms_list = dms_sensing_data(token_s, collectionCode)
+ for dms in dms_list:
+ fileNameList.append(dms['fileName'])
+ dir_list = os.listdir(file_dir)
+ # 判断扫描出的文件和已有的文件,将多出的文件进行解析
+ d = [y for y in dir_list if y not in fileNameList]
+ if d is None or len(d) == 0:
+ print('没有多余的遥感数据文件,终止程序')
+ return
+ file_total_size = ""
+ file_total_name = ""
+ xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
+ ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
+ for cur_file in d:
+ if 'VNP02IMG' in cur_file[0:8] and os.path.splitext(cur_file)[1] == '.nc':
+
+ # 获取文件的绝对路径
+ path = os.path.join(file_dir, cur_file)
+ if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径
+ # 解析遥感数据文件(demo)
+ GetSNPPData_dict = GetSNPPData(path, xmlPath, ThumbnailPath)
+ # 配置文件服务器参数
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetSNPPData_dict['xmlPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ xml = requests.post(url, data=options, files=files)
+
+ url = Config.DFS_UPLOAD_URL
+ files = {'file': open(GetSNPPData_dict['ThumbnailPath'], 'rb')}
+ options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode}
+ # 上传生成的xml文件到文件服务器
+ ThumbnailName = requests.post(url, data=options, files=files)
+
+ CollectionCode = GetSNPPData_dict['CollectionCode']
+ DirectoryDepth = GetSNPPData_dict['DirectoryDepth']
+ StartTime = GetSNPPData_dict['ProductionTime'][0:10] + ' ' + GetSNPPData_dict['ProductionTime'][11:19]
+ uc = upload_client(path, DirectoryDepth, StartTime)
+
+ StartTime = time.mktime(time.strptime(GetSNPPData_dict['StartTime'][0:10] + ' ' + GetSNPPData_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+ EndTime = time.mktime(time.strptime(GetSNPPData_dict['EndTime'][0:10] + ' ' + GetSNPPData_dict['EndTime'][11:19], '%Y-%m-%d %H:%M:%S'))
+
+ # 入库遥感数据
+ res_data = httpUtil(url=Config.RESING_DATA_URL,
+ data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime,
+ "shootingTimeEndTs": EndTime,
+ "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
+ "fileSize": uc['file_size'], "cloudCoverage": GetSNPPData_dict['CloudPercent'],
+ "metaInformationFile": xml.json()['path'],
+ "thumbnailFile": ThumbnailName.json()['path'],
+ "remarks": "", "boundaryGeomStr": GetSNPPData_dict['boundaryGeomStr']},
+ token=token_s).post_no_patam_herder()
+ print(res_data.json()['data'])
+
+ file_total_size = file_total_size + str(uc['file_size'])
+ file_total_size = file_total_size + ","
+
+ file_total_name = file_total_name + uc['fileName']
+ file_total_name = file_total_name + ","
+ # print("========"+suffix)
+ print("{0} : is file!".format(cur_file))
+ if os.path.isdir(path):
+ print("{0} : is dir!".format(cur_file))
+ list_dir(path) # 递归子目录
+ if uc['type'] == 'ok':
+ continue
+ # 添加遥感数据归档任务
+ res = httpUtil(url=Config.DATA_TASK_URL,
+ data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name,
+ "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
+ task_code = res.json()['data']
+
+ # 结束遥感数据归档任务
+ header = {"Authorization": token_s}
+ res = requests.post(url=Config.DATA_END_TASK_URL,
+
+ params={"taskCode": task_code}, headers=header).json()
+
+
+if __name__ == '__main__':
+ # file_dir = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_001/sensingdata'
+ # list_dir(file_dir)
+ # file_dir = 'E:/数管'
+ #
+ # scan_VJ102_dir()
+ #
+ # scan_VJ103_dir()
+
+ # scan_GF1_PMS2_dir()
+ # scan_GF3MDJ_dir()
+ # scan_H08_dir()
+ # scan_Sentinel1_dir()
+ # scan_Sentinel2_dir()
+ # scan_Sentinel3OL_dir()
+ # scan_HJ1_dir()
+ # scan_ZY3_dir()
+ scan_SNPP_dir()
\ No newline at end of file
diff --git a/util/snow_ari.py b/util/snow_ari.py
new file mode 100644
index 0000000..bfa380e
--- /dev/null
+++ b/util/snow_ari.py
@@ -0,0 +1,103 @@
+import sys
+import time
+import logging
+
+
+class MySnow(object):
+
+ def __init__(self, datacenter_id, worker_id):
+ # 初始毫秒级时间戳(2021-06-09)
+ self.initial_time_stamp = int(time.mktime(time.strptime('2021-06-09 00:00:00', "%Y-%m-%d %H:%M:%S")) * 1000)
+ # 机器 ID 所占的位数
+ self.worker_id_bits = 5
+ # 数据表示 ID 所占的位数
+ self.datacenter_id_bits = 5
+ # 支持的最大机器 ID,结果是 31(这个位移算法可以很快的计算出几位二进制数所能表示的最大十进制数)
+ # 2**5-1 0b11111
+ self.max_worker_id = -1 ^ (-1 << self.worker_id_bits)
+ # 支持最大标识 ID,结果是 31
+ self.max_datacenter_id = -1 ^ (-1 << self.datacenter_id_bits)
+ # 序列号 ID所占的位数
+ self.sequence_bits = 12
+ # 机器 ID 偏移量(12)
+ self.workerid_offset = self.sequence_bits
+ # 数据中心 ID 偏移量(12 + 5)
+ self.datacenterid_offset = self.sequence_bits + self.datacenter_id_bits
+ # 时间戳偏移量(12 + 5 + 5)
+ self.timestamp_offset = self.sequence_bits + self.datacenter_id_bits + self.worker_id_bits
+ # 生成序列的掩码,这里为 4095(0b111111111111 = 0xfff = 4095)
+ self.sequence_mask = -1 ^ (-1 << self.sequence_bits)
+
+ # 初始化日志
+ self.logger = logging.getLogger('snowflake')
+
+ # 数据中心 ID(0 ~ 31)
+ if datacenter_id > self.max_datacenter_id or datacenter_id < 0:
+ err_msg = 'datacenter_id 不能大于 %d 或小于 0' % self.max_worker_id
+ self.logger.error(err_msg)
+ sys.exit()
+ self.datacenter_id = datacenter_id
+ # 工作节点 ID(0 ~ 31)
+ if worker_id > self.max_worker_id or worker_id < 0:
+ err_msg = 'worker_id 不能大于 %d 或小于 0' % self.max_worker_id
+ self.logger.error(err_msg)
+ sys.exit()
+ self.worker_id = worker_id
+ # 毫秒内序列(0 ~ 4095)
+ self.sequence = 0
+ # 上次生成 ID 的时间戳
+ self.last_timestamp = -1
+
+ def _gen_timestamp(self):
+ """
+ 生成整数毫秒级时间戳
+ :return: 整数毫秒级时间戳
+ """
+ return int(time.time() * 1000)
+
+ def next_id(self):
+ """
+ 获得下一个ID (用同步锁保证线程安全)
+ :return: snowflake_id
+ """
+ timestamp = self._gen_timestamp()
+ # 如果当前时间小于上一次 ID 生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
+ if timestamp < self.last_timestamp:
+ self.logger.error('clock is moving backwards. Rejecting requests until {}'.format(self.last_timestamp))
+ # 如果是同一时间生成的,则进行毫秒内序列
+ if timestamp == self.last_timestamp:
+ self.sequence = (self.sequence + 1) & self.sequence_mask
+ # sequence 等于 0 说明毫秒内序列已经增长到最大值
+ if self.sequence == 0:
+ # 阻塞到下一个毫秒,获得新的时间戳
+ timestamp = self._til_next_millis(self.last_timestamp)
+ else:
+ # 时间戳改变,毫秒内序列重置
+ self.sequence = 0
+
+ # 上次生成 ID 的时间戳
+ self.last_timestamp = timestamp
+
+ # 移位并通过或运算拼到一起组成 64 位的 ID
+ new_id = ((timestamp - self.initial_time_stamp) << self.timestamp_offset) | \
+ (self.datacenter_id << self.datacenterid_offset) | \
+ (self.worker_id << self.workerid_offset) | \
+ self.sequence
+ return new_id
+
+ def _til_next_millis(self, last_timestamp):
+ """
+ 阻塞到下一个毫秒,直到获得新的时间戳
+ :param last_timestamp: 上次生成 ID 的毫秒级时间戳
+ :return: 当前毫秒级时间戳
+ """
+ timestamp = self._gen_timestamp()
+ while timestamp <= last_timestamp:
+ timestamp = self._gen_timestamp()
+ return timestamp
+
+
+if __name__ == '__main__':
+ mysnow = MySnow(1, 2)
+ id = mysnow.next_id()
+ print(id)
diff --git a/util/xml_util.py b/util/xml_util.py
new file mode 100644
index 0000000..5f09c01
--- /dev/null
+++ b/util/xml_util.py
@@ -0,0 +1,50 @@
+"""
+Author : XinYi Song
+Time : 2021/10/12 11:13
+Desc: xml工具类
+"""
+from xml.dom.minidom import parseString
+import dict2xml
+import xmltodict
+import json
+import os
+
+
+# 初始化数据,传入参数1、xml文件路径 2、xml格式字符串 3、json字符串 4、dict字典
+def init_data(file_Str_Dict):
+ if isinstance(file_Str_Dict, str) and os.path.isfile(file_Str_Dict):
+ with open(file_Str_Dict) as fp:
+ data = fp.read()
+ return data
+ elif isinstance(file_Str_Dict, (str, dict)):
+ data = file_Str_Dict
+ return data
+
+
+# 读取xml文件,转换成字典
+def xml_to_dict(file_Str_Dict):
+ data = init_data(file_Str_Dict)
+
+ data_orderedD = xmltodict.parse(data)
+ data_json = json.dumps(data_orderedD, indent=4)
+ data_dict = json.loads(data_json)
+
+ return data_dict
+
+
+# 将字典写入到xml中并保存
+def dict_to_xml(dict_in, xml_out):
+ xml_str = dict2xml.dict2xml(dict_in)
+ xml_raw = '\n' + '\n' + xml_str + '\n'
+ dom = parseString(xml_raw.replace('\n', ''))
+ pretty = dom.toprettyxml(indent=" ", newl="\n", encoding="utf-8")
+ with open(xml_out, 'w') as f:
+ f.write(pretty.decode("utf-8"))
+ f.close()
+
+
+if __name__ == '__main__':
+ d = xml_to_dict("E:/xmltest/demo.xml")
+ print(d)
+ # a = {'ProductMetaData': {'SatelliteID': 'GF1', 'SensorID': 'PMS1', 'ReceiveTime': '2013-11-05 05:52:11', 'OrbitID': '2850', 'ProduceType': 'STANDARD', 'SceneID': '154627', 'ProductID': '107383', 'ProductLevel': 'LEVEL1A', 'ProductQuality': None, 'ProductQualityReport': None, 'ProductFormat': 'GEOTIFF', 'ProduceTime': '2013-11-05 17:27:10', 'Bands': '1,2,3,4', 'ScenePath': '65', 'SceneRow': '177', 'SatPath': '65', 'SatRow': '177', 'SceneCount': '1', 'SceneShift': '1', 'StartTime': '2013-11-05 13:52:08', 'EndTime': '2013-11-05 13:52:14', 'CenterTime': '2013-11-05 13:52:11', 'ImageGSD': '8', 'WidthInPixels': '4548', 'HeightInPixels': '4500', 'WidthInMeters': None, 'HeightInMeters': None, 'CloudPercent': '0', 'QualityInfo': None, 'PixelBits': None, 'ValidPixelBits': None, 'RollViewingAngle': '0', 'PitchViewingAngle': '0', 'RollSatelliteAngle': '9.56992e-05', 'PitchSatelliteAngle': '0.000105642', 'YawSatelliteAngle': '2.91257', 'SolarAzimuth': '169.08', 'SolarZenith': '33.6357', 'SatelliteAzimuth': '81.7259', 'SatelliteZenith': '88.1627', 'GainMode': 'G0,G4,G0,G0', 'IntegrationTime': '0.00115931', 'IntegrationLevel': 'S5,S3,S4,S3', 'MapProjection': None, 'EarthEllipsoid': None, 'ZoneNo': None, 'ResamplingKernel': None, 'HeightMode': None, 'MtfCorrection': 'LAB', 'RelativeCorrectionData': None, 'TopLeftLatitude': '40.1695', 'TopLeftLongitude': '78.2523', 'TopRightLatitude': '40.0948', 'TopRightLongitude': '78.6629', 'BottomRightLatitude': '39.7655', 'BottomRightLongitude': '78.5609', 'BottomLeftLatitude': '39.84', 'BottomLeftLongitude': '78.1522', 'TopLeftMapX': None, 'TopLeftMapY': None, 'TopRightMapX': None, 'TopRightMapY': None, 'BottomRightMapX': None, 'BottomRightMapY': None, 'BottomLeftMapX': None, 'BottomLeftMapY': None}}
+ # dict_to_xml(a, './res.xml')
\ No newline at end of file
diff --git a/util/zxby.py b/util/zxby.py
new file mode 100644
index 0000000..261eaf7
--- /dev/null
+++ b/util/zxby.py
@@ -0,0 +1,78 @@
+"""
+Author : XinYi Song
+Time : 2021/10/9 9:43
+Desc:
+"""
+import os, sys, subprocess, tempfile, time
+
+# 创建临时文件夹,返回临时文件夹路径
+TempFile = tempfile.mkdtemp(suffix='_test', prefix='python_')
+# 文件名
+FileNum = int(time.time() * 1000)
+# python编译器位置
+EXEC = sys.executable
+
+
+# 获取python版本
+def get_version():
+ v = sys.version_info
+ version = "python %s.%s" % (v.major, v.minor)
+ return version
+
+
+# 获得py文件名
+def get_pyname():
+ global FileNum
+ return 'test_%d' % FileNum
+
+
+# 接收代码写入文件
+def write_file(pyname, code):
+ fpath = os.path.join(TempFile, '%s.py' % pyname)
+ with open(fpath, 'w', encoding='utf-8') as f:
+ f.write(code)
+ print('file path: %s' % fpath)
+ return fpath
+
+
+# 编码
+def decode(s):
+ try:
+ return s.decode('utf-8')
+ except UnicodeDecodeError:
+ return s.decode('gbk')
+
+ # 主执行函数
+
+
+def main(code):
+ r = dict()
+ r["version"] = get_version()
+ pyname = get_pyname()
+ fpath = write_file(pyname, code)
+ try:
+ # subprocess.check_output 是 父进程等待子进程完成,返回子进程向标准输出的输出结果
+ # stderr是标准输出的类型
+ outdata = decode(subprocess.check_output([EXEC, fpath], stderr=subprocess.STDOUT, timeout=5))
+ except subprocess.CalledProcessError as e:
+ # e.output是错误信息标准输出
+ # 错误返回的数据
+ r["code"] = 'Error'
+ r["output"] = decode(e.output)
+ return r
+ else:
+ # 成功返回的数据
+ r['output'] = outdata
+ return r
+ finally:
+ # 删除文件(其实不用删除临时文件会自动删除)
+ try:
+ os.remove(fpath)
+ except Exception as e:
+ exit(1)
+
+
+# if __name__ == '__main__':
+# code = "print(11);print(22)"
+# print(main(code))
+